1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use unindent::Unindent as _;
18use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
19use worktree::WorktreeModelHandle as _;
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/Users").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128
129 cx.executor().run_until_parked();
130 cx.update(|cx| {
131 let tree = worktree.read(cx);
132
133 let settings_a = language_settings(
134 None,
135 Some(
136 &(File::for_entry(
137 tree.entry_for_path("a/a.rs").unwrap().clone(),
138 worktree.clone(),
139 ) as _),
140 ),
141 cx,
142 );
143 let settings_b = language_settings(
144 None,
145 Some(
146 &(File::for_entry(
147 tree.entry_for_path("b/b.rs").unwrap().clone(),
148 worktree.clone(),
149 ) as _),
150 ),
151 cx,
152 );
153
154 assert_eq!(settings_a.tab_size.get(), 8);
155 assert_eq!(settings_b.tab_size.get(), 2);
156
157 let workree_id = project.update(cx, |project, cx| {
158 project.worktrees().next().unwrap().read(cx).id()
159 });
160 let all_tasks = project
161 .update(cx, |project, cx| {
162 project.task_inventory().update(cx, |inventory, cx| {
163 inventory.list_tasks(None, None, false, cx)
164 })
165 })
166 .into_iter()
167 .map(|(source_kind, task)| (source_kind, task.name().to_string()))
168 .collect::<Vec<_>>();
169 assert_eq!(
170 all_tasks,
171 vec![
172 (
173 TaskSourceKind::Worktree {
174 id: workree_id,
175 abs_path: PathBuf::from("/the-root/.zed/tasks.json")
176 },
177 "cargo check".to_string()
178 ),
179 (
180 TaskSourceKind::Worktree {
181 id: workree_id,
182 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json")
183 },
184 "cargo check".to_string()
185 ),
186 ]
187 );
188 });
189}
190
191#[gpui::test]
192async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
193 init_test(cx);
194
195 let fs = FakeFs::new(cx.executor());
196 fs.insert_tree(
197 "/the-root",
198 json!({
199 "test.rs": "const A: i32 = 1;",
200 "test2.rs": "",
201 "Cargo.toml": "a = 1",
202 "package.json": "{\"a\": 1}",
203 }),
204 )
205 .await;
206
207 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
208 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
209
210 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
211 "Rust",
212 FakeLspAdapter {
213 name: "the-rust-language-server",
214 capabilities: lsp::ServerCapabilities {
215 completion_provider: Some(lsp::CompletionOptions {
216 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
217 ..Default::default()
218 }),
219 ..Default::default()
220 },
221 ..Default::default()
222 },
223 );
224 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
225 "JSON",
226 FakeLspAdapter {
227 name: "the-json-language-server",
228 capabilities: lsp::ServerCapabilities {
229 completion_provider: Some(lsp::CompletionOptions {
230 trigger_characters: Some(vec![":".to_string()]),
231 ..Default::default()
232 }),
233 ..Default::default()
234 },
235 ..Default::default()
236 },
237 );
238
239 // Open a buffer without an associated language server.
240 let toml_buffer = project
241 .update(cx, |project, cx| {
242 project.open_local_buffer("/the-root/Cargo.toml", cx)
243 })
244 .await
245 .unwrap();
246
247 // Open a buffer with an associated language server before the language for it has been loaded.
248 let rust_buffer = project
249 .update(cx, |project, cx| {
250 project.open_local_buffer("/the-root/test.rs", cx)
251 })
252 .await
253 .unwrap();
254 rust_buffer.update(cx, |buffer, _| {
255 assert_eq!(buffer.language().map(|l| l.name()), None);
256 });
257
258 // Now we add the languages to the project, and ensure they get assigned to all
259 // the relevant open buffers.
260 language_registry.add(json_lang());
261 language_registry.add(rust_lang());
262 cx.executor().run_until_parked();
263 rust_buffer.update(cx, |buffer, _| {
264 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
265 });
266
267 // A server is started up, and it is notified about Rust files.
268 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
269 assert_eq!(
270 fake_rust_server
271 .receive_notification::<lsp::notification::DidOpenTextDocument>()
272 .await
273 .text_document,
274 lsp::TextDocumentItem {
275 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
276 version: 0,
277 text: "const A: i32 = 1;".to_string(),
278 language_id: Default::default()
279 }
280 );
281
282 // The buffer is configured based on the language server's capabilities.
283 rust_buffer.update(cx, |buffer, _| {
284 assert_eq!(
285 buffer.completion_triggers(),
286 &[".".to_string(), "::".to_string()]
287 );
288 });
289 toml_buffer.update(cx, |buffer, _| {
290 assert!(buffer.completion_triggers().is_empty());
291 });
292
293 // Edit a buffer. The changes are reported to the language server.
294 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
295 assert_eq!(
296 fake_rust_server
297 .receive_notification::<lsp::notification::DidChangeTextDocument>()
298 .await
299 .text_document,
300 lsp::VersionedTextDocumentIdentifier::new(
301 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
302 1
303 )
304 );
305
306 // Open a third buffer with a different associated language server.
307 let json_buffer = project
308 .update(cx, |project, cx| {
309 project.open_local_buffer("/the-root/package.json", cx)
310 })
311 .await
312 .unwrap();
313
314 // A json language server is started up and is only notified about the json buffer.
315 let mut fake_json_server = fake_json_servers.next().await.unwrap();
316 assert_eq!(
317 fake_json_server
318 .receive_notification::<lsp::notification::DidOpenTextDocument>()
319 .await
320 .text_document,
321 lsp::TextDocumentItem {
322 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
323 version: 0,
324 text: "{\"a\": 1}".to_string(),
325 language_id: Default::default()
326 }
327 );
328
329 // This buffer is configured based on the second language server's
330 // capabilities.
331 json_buffer.update(cx, |buffer, _| {
332 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
333 });
334
335 // When opening another buffer whose language server is already running,
336 // it is also configured based on the existing language server's capabilities.
337 let rust_buffer2 = project
338 .update(cx, |project, cx| {
339 project.open_local_buffer("/the-root/test2.rs", cx)
340 })
341 .await
342 .unwrap();
343 rust_buffer2.update(cx, |buffer, _| {
344 assert_eq!(
345 buffer.completion_triggers(),
346 &[".".to_string(), "::".to_string()]
347 );
348 });
349
350 // Changes are reported only to servers matching the buffer's language.
351 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
352 rust_buffer2.update(cx, |buffer, cx| {
353 buffer.edit([(0..0, "let x = 1;")], None, cx)
354 });
355 assert_eq!(
356 fake_rust_server
357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
358 .await
359 .text_document,
360 lsp::VersionedTextDocumentIdentifier::new(
361 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
362 1
363 )
364 );
365
366 // Save notifications are reported to all servers.
367 project
368 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
369 .await
370 .unwrap();
371 assert_eq!(
372 fake_rust_server
373 .receive_notification::<lsp::notification::DidSaveTextDocument>()
374 .await
375 .text_document,
376 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
377 );
378 assert_eq!(
379 fake_json_server
380 .receive_notification::<lsp::notification::DidSaveTextDocument>()
381 .await
382 .text_document,
383 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
384 );
385
386 // Renames are reported only to servers matching the buffer's language.
387 fs.rename(
388 Path::new("/the-root/test2.rs"),
389 Path::new("/the-root/test3.rs"),
390 Default::default(),
391 )
392 .await
393 .unwrap();
394 assert_eq!(
395 fake_rust_server
396 .receive_notification::<lsp::notification::DidCloseTextDocument>()
397 .await
398 .text_document,
399 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
400 );
401 assert_eq!(
402 fake_rust_server
403 .receive_notification::<lsp::notification::DidOpenTextDocument>()
404 .await
405 .text_document,
406 lsp::TextDocumentItem {
407 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
408 version: 0,
409 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
410 language_id: Default::default()
411 },
412 );
413
414 rust_buffer2.update(cx, |buffer, cx| {
415 buffer.update_diagnostics(
416 LanguageServerId(0),
417 DiagnosticSet::from_sorted_entries(
418 vec![DiagnosticEntry {
419 diagnostic: Default::default(),
420 range: Anchor::MIN..Anchor::MAX,
421 }],
422 &buffer.snapshot(),
423 ),
424 cx,
425 );
426 assert_eq!(
427 buffer
428 .snapshot()
429 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
430 .count(),
431 1
432 );
433 });
434
435 // When the rename changes the extension of the file, the buffer gets closed on the old
436 // language server and gets opened on the new one.
437 fs.rename(
438 Path::new("/the-root/test3.rs"),
439 Path::new("/the-root/test3.json"),
440 Default::default(),
441 )
442 .await
443 .unwrap();
444 assert_eq!(
445 fake_rust_server
446 .receive_notification::<lsp::notification::DidCloseTextDocument>()
447 .await
448 .text_document,
449 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
450 );
451 assert_eq!(
452 fake_json_server
453 .receive_notification::<lsp::notification::DidOpenTextDocument>()
454 .await
455 .text_document,
456 lsp::TextDocumentItem {
457 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
458 version: 0,
459 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
460 language_id: Default::default()
461 },
462 );
463
464 // We clear the diagnostics, since the language has changed.
465 rust_buffer2.update(cx, |buffer, _| {
466 assert_eq!(
467 buffer
468 .snapshot()
469 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
470 .count(),
471 0
472 );
473 });
474
475 // The renamed file's version resets after changing language server.
476 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
477 assert_eq!(
478 fake_json_server
479 .receive_notification::<lsp::notification::DidChangeTextDocument>()
480 .await
481 .text_document,
482 lsp::VersionedTextDocumentIdentifier::new(
483 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
484 1
485 )
486 );
487
488 // Restart language servers
489 project.update(cx, |project, cx| {
490 project.restart_language_servers_for_buffers(
491 vec![rust_buffer.clone(), json_buffer.clone()],
492 cx,
493 );
494 });
495
496 let mut rust_shutdown_requests = fake_rust_server
497 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
498 let mut json_shutdown_requests = fake_json_server
499 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
500 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
501
502 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
503 let mut fake_json_server = fake_json_servers.next().await.unwrap();
504
505 // Ensure rust document is reopened in new rust language server
506 assert_eq!(
507 fake_rust_server
508 .receive_notification::<lsp::notification::DidOpenTextDocument>()
509 .await
510 .text_document,
511 lsp::TextDocumentItem {
512 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
513 version: 0,
514 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
515 language_id: Default::default()
516 }
517 );
518
519 // Ensure json documents are reopened in new json language server
520 assert_set_eq!(
521 [
522 fake_json_server
523 .receive_notification::<lsp::notification::DidOpenTextDocument>()
524 .await
525 .text_document,
526 fake_json_server
527 .receive_notification::<lsp::notification::DidOpenTextDocument>()
528 .await
529 .text_document,
530 ],
531 [
532 lsp::TextDocumentItem {
533 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
534 version: 0,
535 text: json_buffer.update(cx, |buffer, _| buffer.text()),
536 language_id: Default::default()
537 },
538 lsp::TextDocumentItem {
539 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
540 version: 0,
541 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
542 language_id: Default::default()
543 }
544 ]
545 );
546
547 // Close notifications are reported only to servers matching the buffer's language.
548 cx.update(|_| drop(json_buffer));
549 let close_message = lsp::DidCloseTextDocumentParams {
550 text_document: lsp::TextDocumentIdentifier::new(
551 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
552 ),
553 };
554 assert_eq!(
555 fake_json_server
556 .receive_notification::<lsp::notification::DidCloseTextDocument>()
557 .await,
558 close_message,
559 );
560}
561
562#[gpui::test]
563async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
564 init_test(cx);
565
566 let fs = FakeFs::new(cx.executor());
567 fs.insert_tree(
568 "/the-root",
569 json!({
570 ".gitignore": "target\n",
571 "src": {
572 "a.rs": "",
573 "b.rs": "",
574 },
575 "target": {
576 "x": {
577 "out": {
578 "x.rs": ""
579 }
580 },
581 "y": {
582 "out": {
583 "y.rs": "",
584 }
585 },
586 "z": {
587 "out": {
588 "z.rs": ""
589 }
590 }
591 }
592 }),
593 )
594 .await;
595
596 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
597 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
598 language_registry.add(rust_lang());
599 let mut fake_servers = language_registry.register_fake_lsp_adapter(
600 "Rust",
601 FakeLspAdapter {
602 name: "the-language-server",
603 ..Default::default()
604 },
605 );
606
607 cx.executor().run_until_parked();
608
609 // Start the language server by opening a buffer with a compatible file extension.
610 let _buffer = project
611 .update(cx, |project, cx| {
612 project.open_local_buffer("/the-root/src/a.rs", cx)
613 })
614 .await
615 .unwrap();
616
617 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
618 project.update(cx, |project, cx| {
619 let worktree = project.worktrees().next().unwrap();
620 assert_eq!(
621 worktree
622 .read(cx)
623 .snapshot()
624 .entries(true)
625 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
626 .collect::<Vec<_>>(),
627 &[
628 (Path::new(""), false),
629 (Path::new(".gitignore"), false),
630 (Path::new("src"), false),
631 (Path::new("src/a.rs"), false),
632 (Path::new("src/b.rs"), false),
633 (Path::new("target"), true),
634 ]
635 );
636 });
637
638 let prev_read_dir_count = fs.read_dir_call_count();
639
640 // Keep track of the FS events reported to the language server.
641 let fake_server = fake_servers.next().await.unwrap();
642 let file_changes = Arc::new(Mutex::new(Vec::new()));
643 fake_server
644 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
645 registrations: vec![lsp::Registration {
646 id: Default::default(),
647 method: "workspace/didChangeWatchedFiles".to_string(),
648 register_options: serde_json::to_value(
649 lsp::DidChangeWatchedFilesRegistrationOptions {
650 watchers: vec![
651 lsp::FileSystemWatcher {
652 glob_pattern: lsp::GlobPattern::String(
653 "/the-root/Cargo.toml".to_string(),
654 ),
655 kind: None,
656 },
657 lsp::FileSystemWatcher {
658 glob_pattern: lsp::GlobPattern::String(
659 "/the-root/src/*.{rs,c}".to_string(),
660 ),
661 kind: None,
662 },
663 lsp::FileSystemWatcher {
664 glob_pattern: lsp::GlobPattern::String(
665 "/the-root/target/y/**/*.rs".to_string(),
666 ),
667 kind: None,
668 },
669 ],
670 },
671 )
672 .ok(),
673 }],
674 })
675 .await
676 .unwrap();
677 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
678 let file_changes = file_changes.clone();
679 move |params, _| {
680 let mut file_changes = file_changes.lock();
681 file_changes.extend(params.changes);
682 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
683 }
684 });
685
686 cx.executor().run_until_parked();
687 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
688 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
689
690 // Now the language server has asked us to watch an ignored directory path,
691 // so we recursively load it.
692 project.update(cx, |project, cx| {
693 let worktree = project.worktrees().next().unwrap();
694 assert_eq!(
695 worktree
696 .read(cx)
697 .snapshot()
698 .entries(true)
699 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
700 .collect::<Vec<_>>(),
701 &[
702 (Path::new(""), false),
703 (Path::new(".gitignore"), false),
704 (Path::new("src"), false),
705 (Path::new("src/a.rs"), false),
706 (Path::new("src/b.rs"), false),
707 (Path::new("target"), true),
708 (Path::new("target/x"), true),
709 (Path::new("target/y"), true),
710 (Path::new("target/y/out"), true),
711 (Path::new("target/y/out/y.rs"), true),
712 (Path::new("target/z"), true),
713 ]
714 );
715 });
716
717 // Perform some file system mutations, two of which match the watched patterns,
718 // and one of which does not.
719 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
720 .await
721 .unwrap();
722 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
723 .await
724 .unwrap();
725 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
726 .await
727 .unwrap();
728 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
729 .await
730 .unwrap();
731 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
732 .await
733 .unwrap();
734
735 // The language server receives events for the FS mutations that match its watch patterns.
736 cx.executor().run_until_parked();
737 assert_eq!(
738 &*file_changes.lock(),
739 &[
740 lsp::FileEvent {
741 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
742 typ: lsp::FileChangeType::DELETED,
743 },
744 lsp::FileEvent {
745 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
746 typ: lsp::FileChangeType::CREATED,
747 },
748 lsp::FileEvent {
749 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
750 typ: lsp::FileChangeType::CREATED,
751 },
752 ]
753 );
754}
755
756#[gpui::test]
757async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
758 init_test(cx);
759
760 let fs = FakeFs::new(cx.executor());
761 fs.insert_tree(
762 "/dir",
763 json!({
764 "a.rs": "let a = 1;",
765 "b.rs": "let b = 2;"
766 }),
767 )
768 .await;
769
770 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
771
772 let buffer_a = project
773 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
774 .await
775 .unwrap();
776 let buffer_b = project
777 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
778 .await
779 .unwrap();
780
781 project.update(cx, |project, cx| {
782 project
783 .update_diagnostics(
784 LanguageServerId(0),
785 lsp::PublishDiagnosticsParams {
786 uri: Url::from_file_path("/dir/a.rs").unwrap(),
787 version: None,
788 diagnostics: vec![lsp::Diagnostic {
789 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
790 severity: Some(lsp::DiagnosticSeverity::ERROR),
791 message: "error 1".to_string(),
792 ..Default::default()
793 }],
794 },
795 &[],
796 cx,
797 )
798 .unwrap();
799 project
800 .update_diagnostics(
801 LanguageServerId(0),
802 lsp::PublishDiagnosticsParams {
803 uri: Url::from_file_path("/dir/b.rs").unwrap(),
804 version: None,
805 diagnostics: vec![lsp::Diagnostic {
806 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
807 severity: Some(lsp::DiagnosticSeverity::WARNING),
808 message: "error 2".to_string(),
809 ..Default::default()
810 }],
811 },
812 &[],
813 cx,
814 )
815 .unwrap();
816 });
817
818 buffer_a.update(cx, |buffer, _| {
819 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
820 assert_eq!(
821 chunks
822 .iter()
823 .map(|(s, d)| (s.as_str(), *d))
824 .collect::<Vec<_>>(),
825 &[
826 ("let ", None),
827 ("a", Some(DiagnosticSeverity::ERROR)),
828 (" = 1;", None),
829 ]
830 );
831 });
832 buffer_b.update(cx, |buffer, _| {
833 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
834 assert_eq!(
835 chunks
836 .iter()
837 .map(|(s, d)| (s.as_str(), *d))
838 .collect::<Vec<_>>(),
839 &[
840 ("let ", None),
841 ("b", Some(DiagnosticSeverity::WARNING)),
842 (" = 2;", None),
843 ]
844 );
845 });
846}
847
848#[gpui::test]
849async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
850 init_test(cx);
851
852 let fs = FakeFs::new(cx.executor());
853 fs.insert_tree(
854 "/root",
855 json!({
856 "dir": {
857 ".git": {
858 "HEAD": "ref: refs/heads/main",
859 },
860 ".gitignore": "b.rs",
861 "a.rs": "let a = 1;",
862 "b.rs": "let b = 2;",
863 },
864 "other.rs": "let b = c;"
865 }),
866 )
867 .await;
868
869 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
870 let (worktree, _) = project
871 .update(cx, |project, cx| {
872 project.find_or_create_local_worktree("/root/dir", true, cx)
873 })
874 .await
875 .unwrap();
876 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
877
878 let (worktree, _) = project
879 .update(cx, |project, cx| {
880 project.find_or_create_local_worktree("/root/other.rs", false, cx)
881 })
882 .await
883 .unwrap();
884 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
885
886 let server_id = LanguageServerId(0);
887 project.update(cx, |project, cx| {
888 project
889 .update_diagnostics(
890 server_id,
891 lsp::PublishDiagnosticsParams {
892 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
893 version: None,
894 diagnostics: vec![lsp::Diagnostic {
895 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
896 severity: Some(lsp::DiagnosticSeverity::ERROR),
897 message: "unused variable 'b'".to_string(),
898 ..Default::default()
899 }],
900 },
901 &[],
902 cx,
903 )
904 .unwrap();
905 project
906 .update_diagnostics(
907 server_id,
908 lsp::PublishDiagnosticsParams {
909 uri: Url::from_file_path("/root/other.rs").unwrap(),
910 version: None,
911 diagnostics: vec![lsp::Diagnostic {
912 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
913 severity: Some(lsp::DiagnosticSeverity::ERROR),
914 message: "unknown variable 'c'".to_string(),
915 ..Default::default()
916 }],
917 },
918 &[],
919 cx,
920 )
921 .unwrap();
922 });
923
924 let main_ignored_buffer = project
925 .update(cx, |project, cx| {
926 project.open_buffer((main_worktree_id, "b.rs"), cx)
927 })
928 .await
929 .unwrap();
930 main_ignored_buffer.update(cx, |buffer, _| {
931 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
932 assert_eq!(
933 chunks
934 .iter()
935 .map(|(s, d)| (s.as_str(), *d))
936 .collect::<Vec<_>>(),
937 &[
938 ("let ", None),
939 ("b", Some(DiagnosticSeverity::ERROR)),
940 (" = 2;", None),
941 ],
942 "Gigitnored buffers should still get in-buffer diagnostics",
943 );
944 });
945 let other_buffer = project
946 .update(cx, |project, cx| {
947 project.open_buffer((other_worktree_id, ""), cx)
948 })
949 .await
950 .unwrap();
951 other_buffer.update(cx, |buffer, _| {
952 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
953 assert_eq!(
954 chunks
955 .iter()
956 .map(|(s, d)| (s.as_str(), *d))
957 .collect::<Vec<_>>(),
958 &[
959 ("let b = ", None),
960 ("c", Some(DiagnosticSeverity::ERROR)),
961 (";", None),
962 ],
963 "Buffers from hidden projects should still get in-buffer diagnostics"
964 );
965 });
966
967 project.update(cx, |project, cx| {
968 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
969 assert_eq!(
970 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
971 vec![(
972 ProjectPath {
973 worktree_id: main_worktree_id,
974 path: Arc::from(Path::new("b.rs")),
975 },
976 server_id,
977 DiagnosticSummary {
978 error_count: 1,
979 warning_count: 0,
980 }
981 )]
982 );
983 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
984 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
985 });
986}
987
988#[gpui::test]
989async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
990 init_test(cx);
991
992 let progress_token = "the-progress-token";
993
994 let fs = FakeFs::new(cx.executor());
995 fs.insert_tree(
996 "/dir",
997 json!({
998 "a.rs": "fn a() { A }",
999 "b.rs": "const y: i32 = 1",
1000 }),
1001 )
1002 .await;
1003
1004 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1005 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1006
1007 language_registry.add(rust_lang());
1008 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1009 "Rust",
1010 FakeLspAdapter {
1011 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1012 disk_based_diagnostics_sources: vec!["disk".into()],
1013 ..Default::default()
1014 },
1015 );
1016
1017 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1018
1019 // Cause worktree to start the fake language server
1020 let _buffer = project
1021 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1022 .await
1023 .unwrap();
1024
1025 let mut events = cx.events(&project);
1026
1027 let fake_server = fake_servers.next().await.unwrap();
1028 assert_eq!(
1029 events.next().await.unwrap(),
1030 Event::LanguageServerAdded(LanguageServerId(0)),
1031 );
1032
1033 fake_server
1034 .start_progress(format!("{}/0", progress_token))
1035 .await;
1036 assert_eq!(
1037 events.next().await.unwrap(),
1038 Event::DiskBasedDiagnosticsStarted {
1039 language_server_id: LanguageServerId(0),
1040 }
1041 );
1042
1043 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1044 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1045 version: None,
1046 diagnostics: vec![lsp::Diagnostic {
1047 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1048 severity: Some(lsp::DiagnosticSeverity::ERROR),
1049 message: "undefined variable 'A'".to_string(),
1050 ..Default::default()
1051 }],
1052 });
1053 assert_eq!(
1054 events.next().await.unwrap(),
1055 Event::DiagnosticsUpdated {
1056 language_server_id: LanguageServerId(0),
1057 path: (worktree_id, Path::new("a.rs")).into()
1058 }
1059 );
1060
1061 fake_server.end_progress(format!("{}/0", progress_token));
1062 assert_eq!(
1063 events.next().await.unwrap(),
1064 Event::DiskBasedDiagnosticsFinished {
1065 language_server_id: LanguageServerId(0)
1066 }
1067 );
1068
1069 let buffer = project
1070 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1071 .await
1072 .unwrap();
1073
1074 buffer.update(cx, |buffer, _| {
1075 let snapshot = buffer.snapshot();
1076 let diagnostics = snapshot
1077 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1078 .collect::<Vec<_>>();
1079 assert_eq!(
1080 diagnostics,
1081 &[DiagnosticEntry {
1082 range: Point::new(0, 9)..Point::new(0, 10),
1083 diagnostic: Diagnostic {
1084 severity: lsp::DiagnosticSeverity::ERROR,
1085 message: "undefined variable 'A'".to_string(),
1086 group_id: 0,
1087 is_primary: true,
1088 ..Default::default()
1089 }
1090 }]
1091 )
1092 });
1093
1094 // Ensure publishing empty diagnostics twice only results in one update event.
1095 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1096 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1097 version: None,
1098 diagnostics: Default::default(),
1099 });
1100 assert_eq!(
1101 events.next().await.unwrap(),
1102 Event::DiagnosticsUpdated {
1103 language_server_id: LanguageServerId(0),
1104 path: (worktree_id, Path::new("a.rs")).into()
1105 }
1106 );
1107
1108 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1109 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1110 version: None,
1111 diagnostics: Default::default(),
1112 });
1113 cx.executor().run_until_parked();
1114 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1115}
1116
1117#[gpui::test]
1118async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1119 init_test(cx);
1120
1121 let progress_token = "the-progress-token";
1122
1123 let fs = FakeFs::new(cx.executor());
1124 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1125
1126 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1127
1128 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1129 language_registry.add(rust_lang());
1130 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1131 "Rust",
1132 FakeLspAdapter {
1133 name: "the-language-server",
1134 disk_based_diagnostics_sources: vec!["disk".into()],
1135 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1136 ..Default::default()
1137 },
1138 );
1139
1140 let buffer = project
1141 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1142 .await
1143 .unwrap();
1144
1145 // Simulate diagnostics starting to update.
1146 let fake_server = fake_servers.next().await.unwrap();
1147 fake_server.start_progress(progress_token).await;
1148
1149 // Restart the server before the diagnostics finish updating.
1150 project.update(cx, |project, cx| {
1151 project.restart_language_servers_for_buffers([buffer], cx);
1152 });
1153 let mut events = cx.events(&project);
1154
1155 // Simulate the newly started server sending more diagnostics.
1156 let fake_server = fake_servers.next().await.unwrap();
1157 assert_eq!(
1158 events.next().await.unwrap(),
1159 Event::LanguageServerAdded(LanguageServerId(1))
1160 );
1161 fake_server.start_progress(progress_token).await;
1162 assert_eq!(
1163 events.next().await.unwrap(),
1164 Event::DiskBasedDiagnosticsStarted {
1165 language_server_id: LanguageServerId(1)
1166 }
1167 );
1168 project.update(cx, |project, _| {
1169 assert_eq!(
1170 project
1171 .language_servers_running_disk_based_diagnostics()
1172 .collect::<Vec<_>>(),
1173 [LanguageServerId(1)]
1174 );
1175 });
1176
1177 // All diagnostics are considered done, despite the old server's diagnostic
1178 // task never completing.
1179 fake_server.end_progress(progress_token);
1180 assert_eq!(
1181 events.next().await.unwrap(),
1182 Event::DiskBasedDiagnosticsFinished {
1183 language_server_id: LanguageServerId(1)
1184 }
1185 );
1186 project.update(cx, |project, _| {
1187 assert_eq!(
1188 project
1189 .language_servers_running_disk_based_diagnostics()
1190 .collect::<Vec<_>>(),
1191 [LanguageServerId(0); 0]
1192 );
1193 });
1194}
1195
1196#[gpui::test]
1197async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1198 init_test(cx);
1199
1200 let fs = FakeFs::new(cx.executor());
1201 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1202
1203 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1204
1205 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1206 language_registry.add(rust_lang());
1207 let mut fake_servers =
1208 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1209
1210 let buffer = project
1211 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1212 .await
1213 .unwrap();
1214
1215 // Publish diagnostics
1216 let fake_server = fake_servers.next().await.unwrap();
1217 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1218 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1219 version: None,
1220 diagnostics: vec![lsp::Diagnostic {
1221 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1222 severity: Some(lsp::DiagnosticSeverity::ERROR),
1223 message: "the message".to_string(),
1224 ..Default::default()
1225 }],
1226 });
1227
1228 cx.executor().run_until_parked();
1229 buffer.update(cx, |buffer, _| {
1230 assert_eq!(
1231 buffer
1232 .snapshot()
1233 .diagnostics_in_range::<_, usize>(0..1, false)
1234 .map(|entry| entry.diagnostic.message.clone())
1235 .collect::<Vec<_>>(),
1236 ["the message".to_string()]
1237 );
1238 });
1239 project.update(cx, |project, cx| {
1240 assert_eq!(
1241 project.diagnostic_summary(false, cx),
1242 DiagnosticSummary {
1243 error_count: 1,
1244 warning_count: 0,
1245 }
1246 );
1247 });
1248
1249 project.update(cx, |project, cx| {
1250 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1251 });
1252
1253 // The diagnostics are cleared.
1254 cx.executor().run_until_parked();
1255 buffer.update(cx, |buffer, _| {
1256 assert_eq!(
1257 buffer
1258 .snapshot()
1259 .diagnostics_in_range::<_, usize>(0..1, false)
1260 .map(|entry| entry.diagnostic.message.clone())
1261 .collect::<Vec<_>>(),
1262 Vec::<String>::new(),
1263 );
1264 });
1265 project.update(cx, |project, cx| {
1266 assert_eq!(
1267 project.diagnostic_summary(false, cx),
1268 DiagnosticSummary {
1269 error_count: 0,
1270 warning_count: 0,
1271 }
1272 );
1273 });
1274}
1275
1276#[gpui::test]
1277async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1278 init_test(cx);
1279
1280 let fs = FakeFs::new(cx.executor());
1281 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1282
1283 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1284 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1285
1286 language_registry.add(rust_lang());
1287 let mut fake_servers =
1288 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1289
1290 let buffer = project
1291 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1292 .await
1293 .unwrap();
1294
1295 // Before restarting the server, report diagnostics with an unknown buffer version.
1296 let fake_server = fake_servers.next().await.unwrap();
1297 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1298 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1299 version: Some(10000),
1300 diagnostics: Vec::new(),
1301 });
1302 cx.executor().run_until_parked();
1303
1304 project.update(cx, |project, cx| {
1305 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1306 });
1307 let mut fake_server = fake_servers.next().await.unwrap();
1308 let notification = fake_server
1309 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1310 .await
1311 .text_document;
1312 assert_eq!(notification.version, 0);
1313}
1314
1315#[gpui::test]
1316async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1317 init_test(cx);
1318
1319 let fs = FakeFs::new(cx.executor());
1320 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1321 .await;
1322
1323 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1324 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1325
1326 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1327 "Rust",
1328 FakeLspAdapter {
1329 name: "rust-lsp",
1330 ..Default::default()
1331 },
1332 );
1333 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1334 "JavaScript",
1335 FakeLspAdapter {
1336 name: "js-lsp",
1337 ..Default::default()
1338 },
1339 );
1340 language_registry.add(rust_lang());
1341 language_registry.add(js_lang());
1342
1343 let _rs_buffer = project
1344 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1345 .await
1346 .unwrap();
1347 let _js_buffer = project
1348 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1349 .await
1350 .unwrap();
1351
1352 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1353 assert_eq!(
1354 fake_rust_server_1
1355 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1356 .await
1357 .text_document
1358 .uri
1359 .as_str(),
1360 "file:///dir/a.rs"
1361 );
1362
1363 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1364 assert_eq!(
1365 fake_js_server
1366 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1367 .await
1368 .text_document
1369 .uri
1370 .as_str(),
1371 "file:///dir/b.js"
1372 );
1373
1374 // Disable Rust language server, ensuring only that server gets stopped.
1375 cx.update(|cx| {
1376 cx.update_global(|settings: &mut SettingsStore, cx| {
1377 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1378 settings.languages.insert(
1379 Arc::from("Rust"),
1380 LanguageSettingsContent {
1381 enable_language_server: Some(false),
1382 ..Default::default()
1383 },
1384 );
1385 });
1386 })
1387 });
1388 fake_rust_server_1
1389 .receive_notification::<lsp::notification::Exit>()
1390 .await;
1391
1392 // Enable Rust and disable JavaScript language servers, ensuring that the
1393 // former gets started again and that the latter stops.
1394 cx.update(|cx| {
1395 cx.update_global(|settings: &mut SettingsStore, cx| {
1396 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1397 settings.languages.insert(
1398 Arc::from("Rust"),
1399 LanguageSettingsContent {
1400 enable_language_server: Some(true),
1401 ..Default::default()
1402 },
1403 );
1404 settings.languages.insert(
1405 Arc::from("JavaScript"),
1406 LanguageSettingsContent {
1407 enable_language_server: Some(false),
1408 ..Default::default()
1409 },
1410 );
1411 });
1412 })
1413 });
1414 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1415 assert_eq!(
1416 fake_rust_server_2
1417 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1418 .await
1419 .text_document
1420 .uri
1421 .as_str(),
1422 "file:///dir/a.rs"
1423 );
1424 fake_js_server
1425 .receive_notification::<lsp::notification::Exit>()
1426 .await;
1427}
1428
1429#[gpui::test(iterations = 3)]
1430async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1431 init_test(cx);
1432
1433 let text = "
1434 fn a() { A }
1435 fn b() { BB }
1436 fn c() { CCC }
1437 "
1438 .unindent();
1439
1440 let fs = FakeFs::new(cx.executor());
1441 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1442
1443 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1444 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1445
1446 language_registry.add(rust_lang());
1447 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1448 "Rust",
1449 FakeLspAdapter {
1450 disk_based_diagnostics_sources: vec!["disk".into()],
1451 ..Default::default()
1452 },
1453 );
1454
1455 let buffer = project
1456 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1457 .await
1458 .unwrap();
1459
1460 let mut fake_server = fake_servers.next().await.unwrap();
1461 let open_notification = fake_server
1462 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1463 .await;
1464
1465 // Edit the buffer, moving the content down
1466 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1467 let change_notification_1 = fake_server
1468 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1469 .await;
1470 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1471
1472 // Report some diagnostics for the initial version of the buffer
1473 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1474 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1475 version: Some(open_notification.text_document.version),
1476 diagnostics: vec![
1477 lsp::Diagnostic {
1478 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1479 severity: Some(DiagnosticSeverity::ERROR),
1480 message: "undefined variable 'A'".to_string(),
1481 source: Some("disk".to_string()),
1482 ..Default::default()
1483 },
1484 lsp::Diagnostic {
1485 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1486 severity: Some(DiagnosticSeverity::ERROR),
1487 message: "undefined variable 'BB'".to_string(),
1488 source: Some("disk".to_string()),
1489 ..Default::default()
1490 },
1491 lsp::Diagnostic {
1492 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1493 severity: Some(DiagnosticSeverity::ERROR),
1494 source: Some("disk".to_string()),
1495 message: "undefined variable 'CCC'".to_string(),
1496 ..Default::default()
1497 },
1498 ],
1499 });
1500
1501 // The diagnostics have moved down since they were created.
1502 cx.executor().run_until_parked();
1503 buffer.update(cx, |buffer, _| {
1504 assert_eq!(
1505 buffer
1506 .snapshot()
1507 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1508 .collect::<Vec<_>>(),
1509 &[
1510 DiagnosticEntry {
1511 range: Point::new(3, 9)..Point::new(3, 11),
1512 diagnostic: Diagnostic {
1513 source: Some("disk".into()),
1514 severity: DiagnosticSeverity::ERROR,
1515 message: "undefined variable 'BB'".to_string(),
1516 is_disk_based: true,
1517 group_id: 1,
1518 is_primary: true,
1519 ..Default::default()
1520 },
1521 },
1522 DiagnosticEntry {
1523 range: Point::new(4, 9)..Point::new(4, 12),
1524 diagnostic: Diagnostic {
1525 source: Some("disk".into()),
1526 severity: DiagnosticSeverity::ERROR,
1527 message: "undefined variable 'CCC'".to_string(),
1528 is_disk_based: true,
1529 group_id: 2,
1530 is_primary: true,
1531 ..Default::default()
1532 }
1533 }
1534 ]
1535 );
1536 assert_eq!(
1537 chunks_with_diagnostics(buffer, 0..buffer.len()),
1538 [
1539 ("\n\nfn a() { ".to_string(), None),
1540 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1541 (" }\nfn b() { ".to_string(), None),
1542 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1543 (" }\nfn c() { ".to_string(), None),
1544 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1545 (" }\n".to_string(), None),
1546 ]
1547 );
1548 assert_eq!(
1549 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1550 [
1551 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1552 (" }\nfn c() { ".to_string(), None),
1553 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1554 ]
1555 );
1556 });
1557
1558 // Ensure overlapping diagnostics are highlighted correctly.
1559 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1560 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1561 version: Some(open_notification.text_document.version),
1562 diagnostics: vec![
1563 lsp::Diagnostic {
1564 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1565 severity: Some(DiagnosticSeverity::ERROR),
1566 message: "undefined variable 'A'".to_string(),
1567 source: Some("disk".to_string()),
1568 ..Default::default()
1569 },
1570 lsp::Diagnostic {
1571 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1572 severity: Some(DiagnosticSeverity::WARNING),
1573 message: "unreachable statement".to_string(),
1574 source: Some("disk".to_string()),
1575 ..Default::default()
1576 },
1577 ],
1578 });
1579
1580 cx.executor().run_until_parked();
1581 buffer.update(cx, |buffer, _| {
1582 assert_eq!(
1583 buffer
1584 .snapshot()
1585 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1586 .collect::<Vec<_>>(),
1587 &[
1588 DiagnosticEntry {
1589 range: Point::new(2, 9)..Point::new(2, 12),
1590 diagnostic: Diagnostic {
1591 source: Some("disk".into()),
1592 severity: DiagnosticSeverity::WARNING,
1593 message: "unreachable statement".to_string(),
1594 is_disk_based: true,
1595 group_id: 4,
1596 is_primary: true,
1597 ..Default::default()
1598 }
1599 },
1600 DiagnosticEntry {
1601 range: Point::new(2, 9)..Point::new(2, 10),
1602 diagnostic: Diagnostic {
1603 source: Some("disk".into()),
1604 severity: DiagnosticSeverity::ERROR,
1605 message: "undefined variable 'A'".to_string(),
1606 is_disk_based: true,
1607 group_id: 3,
1608 is_primary: true,
1609 ..Default::default()
1610 },
1611 }
1612 ]
1613 );
1614 assert_eq!(
1615 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1616 [
1617 ("fn a() { ".to_string(), None),
1618 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1619 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1620 ("\n".to_string(), None),
1621 ]
1622 );
1623 assert_eq!(
1624 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1625 [
1626 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1627 ("\n".to_string(), None),
1628 ]
1629 );
1630 });
1631
1632 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1633 // changes since the last save.
1634 buffer.update(cx, |buffer, cx| {
1635 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1636 buffer.edit(
1637 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1638 None,
1639 cx,
1640 );
1641 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1642 });
1643 let change_notification_2 = fake_server
1644 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1645 .await;
1646 assert!(
1647 change_notification_2.text_document.version > change_notification_1.text_document.version
1648 );
1649
1650 // Handle out-of-order diagnostics
1651 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1652 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1653 version: Some(change_notification_2.text_document.version),
1654 diagnostics: vec![
1655 lsp::Diagnostic {
1656 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1657 severity: Some(DiagnosticSeverity::ERROR),
1658 message: "undefined variable 'BB'".to_string(),
1659 source: Some("disk".to_string()),
1660 ..Default::default()
1661 },
1662 lsp::Diagnostic {
1663 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1664 severity: Some(DiagnosticSeverity::WARNING),
1665 message: "undefined variable 'A'".to_string(),
1666 source: Some("disk".to_string()),
1667 ..Default::default()
1668 },
1669 ],
1670 });
1671
1672 cx.executor().run_until_parked();
1673 buffer.update(cx, |buffer, _| {
1674 assert_eq!(
1675 buffer
1676 .snapshot()
1677 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1678 .collect::<Vec<_>>(),
1679 &[
1680 DiagnosticEntry {
1681 range: Point::new(2, 21)..Point::new(2, 22),
1682 diagnostic: Diagnostic {
1683 source: Some("disk".into()),
1684 severity: DiagnosticSeverity::WARNING,
1685 message: "undefined variable 'A'".to_string(),
1686 is_disk_based: true,
1687 group_id: 6,
1688 is_primary: true,
1689 ..Default::default()
1690 }
1691 },
1692 DiagnosticEntry {
1693 range: Point::new(3, 9)..Point::new(3, 14),
1694 diagnostic: Diagnostic {
1695 source: Some("disk".into()),
1696 severity: DiagnosticSeverity::ERROR,
1697 message: "undefined variable 'BB'".to_string(),
1698 is_disk_based: true,
1699 group_id: 5,
1700 is_primary: true,
1701 ..Default::default()
1702 },
1703 }
1704 ]
1705 );
1706 });
1707}
1708
1709#[gpui::test]
1710async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1711 init_test(cx);
1712
1713 let text = concat!(
1714 "let one = ;\n", //
1715 "let two = \n",
1716 "let three = 3;\n",
1717 );
1718
1719 let fs = FakeFs::new(cx.executor());
1720 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1721
1722 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1723 let buffer = project
1724 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1725 .await
1726 .unwrap();
1727
1728 project.update(cx, |project, cx| {
1729 project
1730 .update_buffer_diagnostics(
1731 &buffer,
1732 LanguageServerId(0),
1733 None,
1734 vec![
1735 DiagnosticEntry {
1736 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1737 diagnostic: Diagnostic {
1738 severity: DiagnosticSeverity::ERROR,
1739 message: "syntax error 1".to_string(),
1740 ..Default::default()
1741 },
1742 },
1743 DiagnosticEntry {
1744 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1745 diagnostic: Diagnostic {
1746 severity: DiagnosticSeverity::ERROR,
1747 message: "syntax error 2".to_string(),
1748 ..Default::default()
1749 },
1750 },
1751 ],
1752 cx,
1753 )
1754 .unwrap();
1755 });
1756
1757 // An empty range is extended forward to include the following character.
1758 // At the end of a line, an empty range is extended backward to include
1759 // the preceding character.
1760 buffer.update(cx, |buffer, _| {
1761 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1762 assert_eq!(
1763 chunks
1764 .iter()
1765 .map(|(s, d)| (s.as_str(), *d))
1766 .collect::<Vec<_>>(),
1767 &[
1768 ("let one = ", None),
1769 (";", Some(DiagnosticSeverity::ERROR)),
1770 ("\nlet two =", None),
1771 (" ", Some(DiagnosticSeverity::ERROR)),
1772 ("\nlet three = 3;\n", None)
1773 ]
1774 );
1775 });
1776}
1777
1778#[gpui::test]
1779async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1780 init_test(cx);
1781
1782 let fs = FakeFs::new(cx.executor());
1783 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1784 .await;
1785
1786 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1787
1788 project.update(cx, |project, cx| {
1789 project
1790 .update_diagnostic_entries(
1791 LanguageServerId(0),
1792 Path::new("/dir/a.rs").to_owned(),
1793 None,
1794 vec![DiagnosticEntry {
1795 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1796 diagnostic: Diagnostic {
1797 severity: DiagnosticSeverity::ERROR,
1798 is_primary: true,
1799 message: "syntax error a1".to_string(),
1800 ..Default::default()
1801 },
1802 }],
1803 cx,
1804 )
1805 .unwrap();
1806 project
1807 .update_diagnostic_entries(
1808 LanguageServerId(1),
1809 Path::new("/dir/a.rs").to_owned(),
1810 None,
1811 vec![DiagnosticEntry {
1812 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1813 diagnostic: Diagnostic {
1814 severity: DiagnosticSeverity::ERROR,
1815 is_primary: true,
1816 message: "syntax error b1".to_string(),
1817 ..Default::default()
1818 },
1819 }],
1820 cx,
1821 )
1822 .unwrap();
1823
1824 assert_eq!(
1825 project.diagnostic_summary(false, cx),
1826 DiagnosticSummary {
1827 error_count: 2,
1828 warning_count: 0,
1829 }
1830 );
1831 });
1832}
1833
1834#[gpui::test]
1835async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1836 init_test(cx);
1837
1838 let text = "
1839 fn a() {
1840 f1();
1841 }
1842 fn b() {
1843 f2();
1844 }
1845 fn c() {
1846 f3();
1847 }
1848 "
1849 .unindent();
1850
1851 let fs = FakeFs::new(cx.executor());
1852 fs.insert_tree(
1853 "/dir",
1854 json!({
1855 "a.rs": text.clone(),
1856 }),
1857 )
1858 .await;
1859
1860 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1861
1862 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1863 language_registry.add(rust_lang());
1864 let mut fake_servers =
1865 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1866
1867 let buffer = project
1868 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1869 .await
1870 .unwrap();
1871
1872 let mut fake_server = fake_servers.next().await.unwrap();
1873 let lsp_document_version = fake_server
1874 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1875 .await
1876 .text_document
1877 .version;
1878
1879 // Simulate editing the buffer after the language server computes some edits.
1880 buffer.update(cx, |buffer, cx| {
1881 buffer.edit(
1882 [(
1883 Point::new(0, 0)..Point::new(0, 0),
1884 "// above first function\n",
1885 )],
1886 None,
1887 cx,
1888 );
1889 buffer.edit(
1890 [(
1891 Point::new(2, 0)..Point::new(2, 0),
1892 " // inside first function\n",
1893 )],
1894 None,
1895 cx,
1896 );
1897 buffer.edit(
1898 [(
1899 Point::new(6, 4)..Point::new(6, 4),
1900 "// inside second function ",
1901 )],
1902 None,
1903 cx,
1904 );
1905
1906 assert_eq!(
1907 buffer.text(),
1908 "
1909 // above first function
1910 fn a() {
1911 // inside first function
1912 f1();
1913 }
1914 fn b() {
1915 // inside second function f2();
1916 }
1917 fn c() {
1918 f3();
1919 }
1920 "
1921 .unindent()
1922 );
1923 });
1924
1925 let edits = project
1926 .update(cx, |project, cx| {
1927 project.edits_from_lsp(
1928 &buffer,
1929 vec![
1930 // replace body of first function
1931 lsp::TextEdit {
1932 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
1933 new_text: "
1934 fn a() {
1935 f10();
1936 }
1937 "
1938 .unindent(),
1939 },
1940 // edit inside second function
1941 lsp::TextEdit {
1942 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
1943 new_text: "00".into(),
1944 },
1945 // edit inside third function via two distinct edits
1946 lsp::TextEdit {
1947 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
1948 new_text: "4000".into(),
1949 },
1950 lsp::TextEdit {
1951 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
1952 new_text: "".into(),
1953 },
1954 ],
1955 LanguageServerId(0),
1956 Some(lsp_document_version),
1957 cx,
1958 )
1959 })
1960 .await
1961 .unwrap();
1962
1963 buffer.update(cx, |buffer, cx| {
1964 for (range, new_text) in edits {
1965 buffer.edit([(range, new_text)], None, cx);
1966 }
1967 assert_eq!(
1968 buffer.text(),
1969 "
1970 // above first function
1971 fn a() {
1972 // inside first function
1973 f10();
1974 }
1975 fn b() {
1976 // inside second function f200();
1977 }
1978 fn c() {
1979 f4000();
1980 }
1981 "
1982 .unindent()
1983 );
1984 });
1985}
1986
1987#[gpui::test]
1988async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
1989 init_test(cx);
1990
1991 let text = "
1992 use a::b;
1993 use a::c;
1994
1995 fn f() {
1996 b();
1997 c();
1998 }
1999 "
2000 .unindent();
2001
2002 let fs = FakeFs::new(cx.executor());
2003 fs.insert_tree(
2004 "/dir",
2005 json!({
2006 "a.rs": text.clone(),
2007 }),
2008 )
2009 .await;
2010
2011 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2012 let buffer = project
2013 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2014 .await
2015 .unwrap();
2016
2017 // Simulate the language server sending us a small edit in the form of a very large diff.
2018 // Rust-analyzer does this when performing a merge-imports code action.
2019 let edits = project
2020 .update(cx, |project, cx| {
2021 project.edits_from_lsp(
2022 &buffer,
2023 [
2024 // Replace the first use statement without editing the semicolon.
2025 lsp::TextEdit {
2026 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2027 new_text: "a::{b, c}".into(),
2028 },
2029 // Reinsert the remainder of the file between the semicolon and the final
2030 // newline of the file.
2031 lsp::TextEdit {
2032 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2033 new_text: "\n\n".into(),
2034 },
2035 lsp::TextEdit {
2036 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2037 new_text: "
2038 fn f() {
2039 b();
2040 c();
2041 }"
2042 .unindent(),
2043 },
2044 // Delete everything after the first newline of the file.
2045 lsp::TextEdit {
2046 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2047 new_text: "".into(),
2048 },
2049 ],
2050 LanguageServerId(0),
2051 None,
2052 cx,
2053 )
2054 })
2055 .await
2056 .unwrap();
2057
2058 buffer.update(cx, |buffer, cx| {
2059 let edits = edits
2060 .into_iter()
2061 .map(|(range, text)| {
2062 (
2063 range.start.to_point(buffer)..range.end.to_point(buffer),
2064 text,
2065 )
2066 })
2067 .collect::<Vec<_>>();
2068
2069 assert_eq!(
2070 edits,
2071 [
2072 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2073 (Point::new(1, 0)..Point::new(2, 0), "".into())
2074 ]
2075 );
2076
2077 for (range, new_text) in edits {
2078 buffer.edit([(range, new_text)], None, cx);
2079 }
2080 assert_eq!(
2081 buffer.text(),
2082 "
2083 use a::{b, c};
2084
2085 fn f() {
2086 b();
2087 c();
2088 }
2089 "
2090 .unindent()
2091 );
2092 });
2093}
2094
2095#[gpui::test]
2096async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2097 init_test(cx);
2098
2099 let text = "
2100 use a::b;
2101 use a::c;
2102
2103 fn f() {
2104 b();
2105 c();
2106 }
2107 "
2108 .unindent();
2109
2110 let fs = FakeFs::new(cx.executor());
2111 fs.insert_tree(
2112 "/dir",
2113 json!({
2114 "a.rs": text.clone(),
2115 }),
2116 )
2117 .await;
2118
2119 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2120 let buffer = project
2121 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2122 .await
2123 .unwrap();
2124
2125 // Simulate the language server sending us edits in a non-ordered fashion,
2126 // with ranges sometimes being inverted or pointing to invalid locations.
2127 let edits = project
2128 .update(cx, |project, cx| {
2129 project.edits_from_lsp(
2130 &buffer,
2131 [
2132 lsp::TextEdit {
2133 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2134 new_text: "\n\n".into(),
2135 },
2136 lsp::TextEdit {
2137 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2138 new_text: "a::{b, c}".into(),
2139 },
2140 lsp::TextEdit {
2141 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2142 new_text: "".into(),
2143 },
2144 lsp::TextEdit {
2145 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2146 new_text: "
2147 fn f() {
2148 b();
2149 c();
2150 }"
2151 .unindent(),
2152 },
2153 ],
2154 LanguageServerId(0),
2155 None,
2156 cx,
2157 )
2158 })
2159 .await
2160 .unwrap();
2161
2162 buffer.update(cx, |buffer, cx| {
2163 let edits = edits
2164 .into_iter()
2165 .map(|(range, text)| {
2166 (
2167 range.start.to_point(buffer)..range.end.to_point(buffer),
2168 text,
2169 )
2170 })
2171 .collect::<Vec<_>>();
2172
2173 assert_eq!(
2174 edits,
2175 [
2176 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2177 (Point::new(1, 0)..Point::new(2, 0), "".into())
2178 ]
2179 );
2180
2181 for (range, new_text) in edits {
2182 buffer.edit([(range, new_text)], None, cx);
2183 }
2184 assert_eq!(
2185 buffer.text(),
2186 "
2187 use a::{b, c};
2188
2189 fn f() {
2190 b();
2191 c();
2192 }
2193 "
2194 .unindent()
2195 );
2196 });
2197}
2198
2199fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2200 buffer: &Buffer,
2201 range: Range<T>,
2202) -> Vec<(String, Option<DiagnosticSeverity>)> {
2203 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2204 for chunk in buffer.snapshot().chunks(range, true) {
2205 if chunks.last().map_or(false, |prev_chunk| {
2206 prev_chunk.1 == chunk.diagnostic_severity
2207 }) {
2208 chunks.last_mut().unwrap().0.push_str(chunk.text);
2209 } else {
2210 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2211 }
2212 }
2213 chunks
2214}
2215
2216#[gpui::test(iterations = 10)]
2217async fn test_definition(cx: &mut gpui::TestAppContext) {
2218 init_test(cx);
2219
2220 let fs = FakeFs::new(cx.executor());
2221 fs.insert_tree(
2222 "/dir",
2223 json!({
2224 "a.rs": "const fn a() { A }",
2225 "b.rs": "const y: i32 = crate::a()",
2226 }),
2227 )
2228 .await;
2229
2230 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2231
2232 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2233 language_registry.add(rust_lang());
2234 let mut fake_servers =
2235 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2236
2237 let buffer = project
2238 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2239 .await
2240 .unwrap();
2241
2242 let fake_server = fake_servers.next().await.unwrap();
2243 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2244 let params = params.text_document_position_params;
2245 assert_eq!(
2246 params.text_document.uri.to_file_path().unwrap(),
2247 Path::new("/dir/b.rs"),
2248 );
2249 assert_eq!(params.position, lsp::Position::new(0, 22));
2250
2251 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2252 lsp::Location::new(
2253 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2254 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2255 ),
2256 )))
2257 });
2258
2259 let mut definitions = project
2260 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2261 .await
2262 .unwrap();
2263
2264 // Assert no new language server started
2265 cx.executor().run_until_parked();
2266 assert!(fake_servers.try_next().is_err());
2267
2268 assert_eq!(definitions.len(), 1);
2269 let definition = definitions.pop().unwrap();
2270 cx.update(|cx| {
2271 let target_buffer = definition.target.buffer.read(cx);
2272 assert_eq!(
2273 target_buffer
2274 .file()
2275 .unwrap()
2276 .as_local()
2277 .unwrap()
2278 .abs_path(cx),
2279 Path::new("/dir/a.rs"),
2280 );
2281 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2282 assert_eq!(
2283 list_worktrees(&project, cx),
2284 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2285 );
2286
2287 drop(definition);
2288 });
2289 cx.update(|cx| {
2290 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2291 });
2292
2293 fn list_worktrees<'a>(
2294 project: &'a Model<Project>,
2295 cx: &'a AppContext,
2296 ) -> Vec<(&'a Path, bool)> {
2297 project
2298 .read(cx)
2299 .worktrees()
2300 .map(|worktree| {
2301 let worktree = worktree.read(cx);
2302 (
2303 worktree.as_local().unwrap().abs_path().as_ref(),
2304 worktree.is_visible(),
2305 )
2306 })
2307 .collect::<Vec<_>>()
2308 }
2309}
2310
2311#[gpui::test]
2312async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2313 init_test(cx);
2314
2315 let fs = FakeFs::new(cx.executor());
2316 fs.insert_tree(
2317 "/dir",
2318 json!({
2319 "a.ts": "",
2320 }),
2321 )
2322 .await;
2323
2324 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2325
2326 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2327 language_registry.add(typescript_lang());
2328 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2329 "TypeScript",
2330 FakeLspAdapter {
2331 capabilities: lsp::ServerCapabilities {
2332 completion_provider: Some(lsp::CompletionOptions {
2333 trigger_characters: Some(vec![":".to_string()]),
2334 ..Default::default()
2335 }),
2336 ..Default::default()
2337 },
2338 ..Default::default()
2339 },
2340 );
2341
2342 let buffer = project
2343 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2344 .await
2345 .unwrap();
2346
2347 let fake_server = fake_language_servers.next().await.unwrap();
2348
2349 let text = "let a = b.fqn";
2350 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2351 let completions = project.update(cx, |project, cx| {
2352 project.completions(&buffer, text.len(), cx)
2353 });
2354
2355 fake_server
2356 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2357 Ok(Some(lsp::CompletionResponse::Array(vec![
2358 lsp::CompletionItem {
2359 label: "fullyQualifiedName?".into(),
2360 insert_text: Some("fullyQualifiedName".into()),
2361 ..Default::default()
2362 },
2363 ])))
2364 })
2365 .next()
2366 .await;
2367 let completions = completions.await.unwrap();
2368 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2369 assert_eq!(completions.len(), 1);
2370 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2371 assert_eq!(
2372 completions[0].old_range.to_offset(&snapshot),
2373 text.len() - 3..text.len()
2374 );
2375
2376 let text = "let a = \"atoms/cmp\"";
2377 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2378 let completions = project.update(cx, |project, cx| {
2379 project.completions(&buffer, text.len() - 1, cx)
2380 });
2381
2382 fake_server
2383 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2384 Ok(Some(lsp::CompletionResponse::Array(vec![
2385 lsp::CompletionItem {
2386 label: "component".into(),
2387 ..Default::default()
2388 },
2389 ])))
2390 })
2391 .next()
2392 .await;
2393 let completions = completions.await.unwrap();
2394 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2395 assert_eq!(completions.len(), 1);
2396 assert_eq!(completions[0].new_text, "component");
2397 assert_eq!(
2398 completions[0].old_range.to_offset(&snapshot),
2399 text.len() - 4..text.len() - 1
2400 );
2401}
2402
2403#[gpui::test]
2404async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2405 init_test(cx);
2406
2407 let fs = FakeFs::new(cx.executor());
2408 fs.insert_tree(
2409 "/dir",
2410 json!({
2411 "a.ts": "",
2412 }),
2413 )
2414 .await;
2415
2416 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2417
2418 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2419 language_registry.add(typescript_lang());
2420 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2421 "TypeScript",
2422 FakeLspAdapter {
2423 capabilities: lsp::ServerCapabilities {
2424 completion_provider: Some(lsp::CompletionOptions {
2425 trigger_characters: Some(vec![":".to_string()]),
2426 ..Default::default()
2427 }),
2428 ..Default::default()
2429 },
2430 ..Default::default()
2431 },
2432 );
2433
2434 let buffer = project
2435 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2436 .await
2437 .unwrap();
2438
2439 let fake_server = fake_language_servers.next().await.unwrap();
2440
2441 let text = "let a = b.fqn";
2442 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2443 let completions = project.update(cx, |project, cx| {
2444 project.completions(&buffer, text.len(), cx)
2445 });
2446
2447 fake_server
2448 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2449 Ok(Some(lsp::CompletionResponse::Array(vec![
2450 lsp::CompletionItem {
2451 label: "fullyQualifiedName?".into(),
2452 insert_text: Some("fully\rQualified\r\nName".into()),
2453 ..Default::default()
2454 },
2455 ])))
2456 })
2457 .next()
2458 .await;
2459 let completions = completions.await.unwrap();
2460 assert_eq!(completions.len(), 1);
2461 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2462}
2463
2464#[gpui::test(iterations = 10)]
2465async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2466 init_test(cx);
2467
2468 let fs = FakeFs::new(cx.executor());
2469 fs.insert_tree(
2470 "/dir",
2471 json!({
2472 "a.ts": "a",
2473 }),
2474 )
2475 .await;
2476
2477 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2478
2479 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2480 language_registry.add(typescript_lang());
2481 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2482 "TypeScript",
2483 FakeLspAdapter {
2484 capabilities: lsp::ServerCapabilities {
2485 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2486 lsp::CodeActionOptions {
2487 resolve_provider: Some(true),
2488 ..lsp::CodeActionOptions::default()
2489 },
2490 )),
2491 ..lsp::ServerCapabilities::default()
2492 },
2493 ..FakeLspAdapter::default()
2494 },
2495 );
2496
2497 let buffer = project
2498 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2499 .await
2500 .unwrap();
2501
2502 let fake_server = fake_language_servers.next().await.unwrap();
2503
2504 // Language server returns code actions that contain commands, and not edits.
2505 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2506 fake_server
2507 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2508 Ok(Some(vec![
2509 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2510 title: "The code action".into(),
2511 data: Some(serde_json::json!({
2512 "command": "_the/command",
2513 })),
2514 ..lsp::CodeAction::default()
2515 }),
2516 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2517 title: "two".into(),
2518 ..lsp::CodeAction::default()
2519 }),
2520 ]))
2521 })
2522 .next()
2523 .await;
2524
2525 let action = actions.await[0].clone();
2526 let apply = project.update(cx, |project, cx| {
2527 project.apply_code_action(buffer.clone(), action, true, cx)
2528 });
2529
2530 // Resolving the code action does not populate its edits. In absence of
2531 // edits, we must execute the given command.
2532 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2533 |mut action, _| async move {
2534 if action.data.is_some() {
2535 action.command = Some(lsp::Command {
2536 title: "The command".into(),
2537 command: "_the/command".into(),
2538 arguments: Some(vec![json!("the-argument")]),
2539 });
2540 }
2541 Ok(action)
2542 },
2543 );
2544
2545 // While executing the command, the language server sends the editor
2546 // a `workspaceEdit` request.
2547 fake_server
2548 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2549 let fake = fake_server.clone();
2550 move |params, _| {
2551 assert_eq!(params.command, "_the/command");
2552 let fake = fake.clone();
2553 async move {
2554 fake.server
2555 .request::<lsp::request::ApplyWorkspaceEdit>(
2556 lsp::ApplyWorkspaceEditParams {
2557 label: None,
2558 edit: lsp::WorkspaceEdit {
2559 changes: Some(
2560 [(
2561 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2562 vec![lsp::TextEdit {
2563 range: lsp::Range::new(
2564 lsp::Position::new(0, 0),
2565 lsp::Position::new(0, 0),
2566 ),
2567 new_text: "X".into(),
2568 }],
2569 )]
2570 .into_iter()
2571 .collect(),
2572 ),
2573 ..Default::default()
2574 },
2575 },
2576 )
2577 .await
2578 .unwrap();
2579 Ok(Some(json!(null)))
2580 }
2581 }
2582 })
2583 .next()
2584 .await;
2585
2586 // Applying the code action returns a project transaction containing the edits
2587 // sent by the language server in its `workspaceEdit` request.
2588 let transaction = apply.await.unwrap();
2589 assert!(transaction.0.contains_key(&buffer));
2590 buffer.update(cx, |buffer, cx| {
2591 assert_eq!(buffer.text(), "Xa");
2592 buffer.undo(cx);
2593 assert_eq!(buffer.text(), "a");
2594 });
2595}
2596
2597#[gpui::test(iterations = 10)]
2598async fn test_save_file(cx: &mut gpui::TestAppContext) {
2599 init_test(cx);
2600
2601 let fs = FakeFs::new(cx.executor());
2602 fs.insert_tree(
2603 "/dir",
2604 json!({
2605 "file1": "the old contents",
2606 }),
2607 )
2608 .await;
2609
2610 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2611 let buffer = project
2612 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2613 .await
2614 .unwrap();
2615 buffer.update(cx, |buffer, cx| {
2616 assert_eq!(buffer.text(), "the old contents");
2617 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2618 });
2619
2620 project
2621 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2622 .await
2623 .unwrap();
2624
2625 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2626 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2627}
2628
2629#[gpui::test(iterations = 30)]
2630async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2631 init_test(cx);
2632
2633 let fs = FakeFs::new(cx.executor().clone());
2634 fs.insert_tree(
2635 "/dir",
2636 json!({
2637 "file1": "the original contents",
2638 }),
2639 )
2640 .await;
2641
2642 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2643 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2644 let buffer = project
2645 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2646 .await
2647 .unwrap();
2648
2649 // Simulate buffer diffs being slow, so that they don't complete before
2650 // the next file change occurs.
2651 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2652
2653 // Change the buffer's file on disk, and then wait for the file change
2654 // to be detected by the worktree, so that the buffer starts reloading.
2655 fs.save(
2656 "/dir/file1".as_ref(),
2657 &"the first contents".into(),
2658 Default::default(),
2659 )
2660 .await
2661 .unwrap();
2662 worktree.next_event(cx);
2663
2664 // Change the buffer's file again. Depending on the random seed, the
2665 // previous file change may still be in progress.
2666 fs.save(
2667 "/dir/file1".as_ref(),
2668 &"the second contents".into(),
2669 Default::default(),
2670 )
2671 .await
2672 .unwrap();
2673 worktree.next_event(cx);
2674
2675 cx.executor().run_until_parked();
2676 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2677 buffer.read_with(cx, |buffer, _| {
2678 assert_eq!(buffer.text(), on_disk_text);
2679 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2680 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2681 });
2682}
2683
2684#[gpui::test(iterations = 30)]
2685async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2686 init_test(cx);
2687
2688 let fs = FakeFs::new(cx.executor().clone());
2689 fs.insert_tree(
2690 "/dir",
2691 json!({
2692 "file1": "the original contents",
2693 }),
2694 )
2695 .await;
2696
2697 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2698 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2699 let buffer = project
2700 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2701 .await
2702 .unwrap();
2703
2704 // Simulate buffer diffs being slow, so that they don't complete before
2705 // the next file change occurs.
2706 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2707
2708 // Change the buffer's file on disk, and then wait for the file change
2709 // to be detected by the worktree, so that the buffer starts reloading.
2710 fs.save(
2711 "/dir/file1".as_ref(),
2712 &"the first contents".into(),
2713 Default::default(),
2714 )
2715 .await
2716 .unwrap();
2717 worktree.next_event(cx);
2718
2719 cx.executor()
2720 .spawn(cx.executor().simulate_random_delay())
2721 .await;
2722
2723 // Perform a noop edit, causing the buffer's version to increase.
2724 buffer.update(cx, |buffer, cx| {
2725 buffer.edit([(0..0, " ")], None, cx);
2726 buffer.undo(cx);
2727 });
2728
2729 cx.executor().run_until_parked();
2730 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2731 buffer.read_with(cx, |buffer, _| {
2732 let buffer_text = buffer.text();
2733 if buffer_text == on_disk_text {
2734 assert!(
2735 !buffer.is_dirty() && !buffer.has_conflict(),
2736 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2737 );
2738 }
2739 // If the file change occurred while the buffer was processing the first
2740 // change, the buffer will be in a conflicting state.
2741 else {
2742 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2743 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2744 }
2745 });
2746}
2747
2748#[gpui::test]
2749async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2750 init_test(cx);
2751
2752 let fs = FakeFs::new(cx.executor());
2753 fs.insert_tree(
2754 "/dir",
2755 json!({
2756 "file1": "the old contents",
2757 }),
2758 )
2759 .await;
2760
2761 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2762 let buffer = project
2763 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2764 .await
2765 .unwrap();
2766 buffer.update(cx, |buffer, cx| {
2767 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2768 });
2769
2770 project
2771 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2772 .await
2773 .unwrap();
2774
2775 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2776 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2777}
2778
2779#[gpui::test]
2780async fn test_save_as(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor());
2784 fs.insert_tree("/dir", json!({})).await;
2785
2786 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2787
2788 let languages = project.update(cx, |project, _| project.languages().clone());
2789 languages.add(rust_lang());
2790
2791 let buffer = project.update(cx, |project, cx| {
2792 project.create_buffer("", None, cx).unwrap()
2793 });
2794 buffer.update(cx, |buffer, cx| {
2795 buffer.edit([(0..0, "abc")], None, cx);
2796 assert!(buffer.is_dirty());
2797 assert!(!buffer.has_conflict());
2798 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2799 });
2800 project
2801 .update(cx, |project, cx| {
2802 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2803 })
2804 .await
2805 .unwrap();
2806 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2807
2808 cx.executor().run_until_parked();
2809 buffer.update(cx, |buffer, cx| {
2810 assert_eq!(
2811 buffer.file().unwrap().full_path(cx),
2812 Path::new("dir/file1.rs")
2813 );
2814 assert!(!buffer.is_dirty());
2815 assert!(!buffer.has_conflict());
2816 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2817 });
2818
2819 let opened_buffer = project
2820 .update(cx, |project, cx| {
2821 project.open_local_buffer("/dir/file1.rs", cx)
2822 })
2823 .await
2824 .unwrap();
2825 assert_eq!(opened_buffer, buffer);
2826}
2827
2828#[gpui::test(retries = 5)]
2829async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2830 init_test(cx);
2831 cx.executor().allow_parking();
2832
2833 let dir = temp_tree(json!({
2834 "a": {
2835 "file1": "",
2836 "file2": "",
2837 "file3": "",
2838 },
2839 "b": {
2840 "c": {
2841 "file4": "",
2842 "file5": "",
2843 }
2844 }
2845 }));
2846
2847 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2848 let rpc = project.update(cx, |p, _| p.client.clone());
2849
2850 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2851 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2852 async move { buffer.await.unwrap() }
2853 };
2854 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2855 project.update(cx, |project, cx| {
2856 let tree = project.worktrees().next().unwrap();
2857 tree.read(cx)
2858 .entry_for_path(path)
2859 .unwrap_or_else(|| panic!("no entry for path {}", path))
2860 .id
2861 })
2862 };
2863
2864 let buffer2 = buffer_for_path("a/file2", cx).await;
2865 let buffer3 = buffer_for_path("a/file3", cx).await;
2866 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2867 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2868
2869 let file2_id = id_for_path("a/file2", cx);
2870 let file3_id = id_for_path("a/file3", cx);
2871 let file4_id = id_for_path("b/c/file4", cx);
2872
2873 // Create a remote copy of this worktree.
2874 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2875
2876 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
2877
2878 let updates = Arc::new(Mutex::new(Vec::new()));
2879 tree.update(cx, |tree, cx| {
2880 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
2881 let updates = updates.clone();
2882 move |update| {
2883 updates.lock().push(update);
2884 async { true }
2885 }
2886 });
2887 });
2888
2889 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
2890
2891 cx.executor().run_until_parked();
2892
2893 cx.update(|cx| {
2894 assert!(!buffer2.read(cx).is_dirty());
2895 assert!(!buffer3.read(cx).is_dirty());
2896 assert!(!buffer4.read(cx).is_dirty());
2897 assert!(!buffer5.read(cx).is_dirty());
2898 });
2899
2900 // Rename and delete files and directories.
2901 tree.flush_fs_events(cx).await;
2902 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
2903 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
2904 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
2905 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
2906 tree.flush_fs_events(cx).await;
2907
2908 let expected_paths = vec![
2909 "a",
2910 "a/file1",
2911 "a/file2.new",
2912 "b",
2913 "d",
2914 "d/file3",
2915 "d/file4",
2916 ];
2917
2918 cx.update(|app| {
2919 assert_eq!(
2920 tree.read(app)
2921 .paths()
2922 .map(|p| p.to_str().unwrap())
2923 .collect::<Vec<_>>(),
2924 expected_paths
2925 );
2926 });
2927
2928 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
2929 assert_eq!(id_for_path("d/file3", cx), file3_id);
2930 assert_eq!(id_for_path("d/file4", cx), file4_id);
2931
2932 cx.update(|cx| {
2933 assert_eq!(
2934 buffer2.read(cx).file().unwrap().path().as_ref(),
2935 Path::new("a/file2.new")
2936 );
2937 assert_eq!(
2938 buffer3.read(cx).file().unwrap().path().as_ref(),
2939 Path::new("d/file3")
2940 );
2941 assert_eq!(
2942 buffer4.read(cx).file().unwrap().path().as_ref(),
2943 Path::new("d/file4")
2944 );
2945 assert_eq!(
2946 buffer5.read(cx).file().unwrap().path().as_ref(),
2947 Path::new("b/c/file5")
2948 );
2949
2950 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
2951 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
2952 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
2953 assert!(buffer5.read(cx).file().unwrap().is_deleted());
2954 });
2955
2956 // Update the remote worktree. Check that it becomes consistent with the
2957 // local worktree.
2958 cx.executor().run_until_parked();
2959
2960 remote.update(cx, |remote, _| {
2961 for update in updates.lock().drain(..) {
2962 remote.as_remote_mut().unwrap().update_from_remote(update);
2963 }
2964 });
2965 cx.executor().run_until_parked();
2966 remote.update(cx, |remote, _| {
2967 assert_eq!(
2968 remote
2969 .paths()
2970 .map(|p| p.to_str().unwrap())
2971 .collect::<Vec<_>>(),
2972 expected_paths
2973 );
2974 });
2975}
2976
2977#[gpui::test(iterations = 10)]
2978async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
2979 init_test(cx);
2980
2981 let fs = FakeFs::new(cx.executor());
2982 fs.insert_tree(
2983 "/dir",
2984 json!({
2985 "a": {
2986 "file1": "",
2987 }
2988 }),
2989 )
2990 .await;
2991
2992 let project = Project::test(fs, [Path::new("/dir")], cx).await;
2993 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
2994 let tree_id = tree.update(cx, |tree, _| tree.id());
2995
2996 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2997 project.update(cx, |project, cx| {
2998 let tree = project.worktrees().next().unwrap();
2999 tree.read(cx)
3000 .entry_for_path(path)
3001 .unwrap_or_else(|| panic!("no entry for path {}", path))
3002 .id
3003 })
3004 };
3005
3006 let dir_id = id_for_path("a", cx);
3007 let file_id = id_for_path("a/file1", cx);
3008 let buffer = project
3009 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3010 .await
3011 .unwrap();
3012 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3013
3014 project
3015 .update(cx, |project, cx| {
3016 project.rename_entry(dir_id, Path::new("b"), cx)
3017 })
3018 .unwrap()
3019 .await
3020 .unwrap();
3021 cx.executor().run_until_parked();
3022
3023 assert_eq!(id_for_path("b", cx), dir_id);
3024 assert_eq!(id_for_path("b/file1", cx), file_id);
3025 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3026}
3027
3028#[gpui::test]
3029async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3030 init_test(cx);
3031
3032 let fs = FakeFs::new(cx.executor());
3033 fs.insert_tree(
3034 "/dir",
3035 json!({
3036 "a.txt": "a-contents",
3037 "b.txt": "b-contents",
3038 }),
3039 )
3040 .await;
3041
3042 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3043
3044 // Spawn multiple tasks to open paths, repeating some paths.
3045 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3046 (
3047 p.open_local_buffer("/dir/a.txt", cx),
3048 p.open_local_buffer("/dir/b.txt", cx),
3049 p.open_local_buffer("/dir/a.txt", cx),
3050 )
3051 });
3052
3053 let buffer_a_1 = buffer_a_1.await.unwrap();
3054 let buffer_a_2 = buffer_a_2.await.unwrap();
3055 let buffer_b = buffer_b.await.unwrap();
3056 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3057 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3058
3059 // There is only one buffer per path.
3060 let buffer_a_id = buffer_a_1.entity_id();
3061 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3062
3063 // Open the same path again while it is still open.
3064 drop(buffer_a_1);
3065 let buffer_a_3 = project
3066 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3067 .await
3068 .unwrap();
3069
3070 // There's still only one buffer per path.
3071 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3072}
3073
3074#[gpui::test]
3075async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3076 init_test(cx);
3077
3078 let fs = FakeFs::new(cx.executor());
3079 fs.insert_tree(
3080 "/dir",
3081 json!({
3082 "file1": "abc",
3083 "file2": "def",
3084 "file3": "ghi",
3085 }),
3086 )
3087 .await;
3088
3089 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3090
3091 let buffer1 = project
3092 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3093 .await
3094 .unwrap();
3095 let events = Arc::new(Mutex::new(Vec::new()));
3096
3097 // initially, the buffer isn't dirty.
3098 buffer1.update(cx, |buffer, cx| {
3099 cx.subscribe(&buffer1, {
3100 let events = events.clone();
3101 move |_, _, event, _| match event {
3102 BufferEvent::Operation(_) => {}
3103 _ => events.lock().push(event.clone()),
3104 }
3105 })
3106 .detach();
3107
3108 assert!(!buffer.is_dirty());
3109 assert!(events.lock().is_empty());
3110
3111 buffer.edit([(1..2, "")], None, cx);
3112 });
3113
3114 // after the first edit, the buffer is dirty, and emits a dirtied event.
3115 buffer1.update(cx, |buffer, cx| {
3116 assert!(buffer.text() == "ac");
3117 assert!(buffer.is_dirty());
3118 assert_eq!(
3119 *events.lock(),
3120 &[language::Event::Edited, language::Event::DirtyChanged]
3121 );
3122 events.lock().clear();
3123 buffer.did_save(
3124 buffer.version(),
3125 buffer.as_rope().fingerprint(),
3126 buffer.file().unwrap().mtime(),
3127 cx,
3128 );
3129 });
3130
3131 // after saving, the buffer is not dirty, and emits a saved event.
3132 buffer1.update(cx, |buffer, cx| {
3133 assert!(!buffer.is_dirty());
3134 assert_eq!(*events.lock(), &[language::Event::Saved]);
3135 events.lock().clear();
3136
3137 buffer.edit([(1..1, "B")], None, cx);
3138 buffer.edit([(2..2, "D")], None, cx);
3139 });
3140
3141 // after editing again, the buffer is dirty, and emits another dirty event.
3142 buffer1.update(cx, |buffer, cx| {
3143 assert!(buffer.text() == "aBDc");
3144 assert!(buffer.is_dirty());
3145 assert_eq!(
3146 *events.lock(),
3147 &[
3148 language::Event::Edited,
3149 language::Event::DirtyChanged,
3150 language::Event::Edited,
3151 ],
3152 );
3153 events.lock().clear();
3154
3155 // After restoring the buffer to its previously-saved state,
3156 // the buffer is not considered dirty anymore.
3157 buffer.edit([(1..3, "")], None, cx);
3158 assert!(buffer.text() == "ac");
3159 assert!(!buffer.is_dirty());
3160 });
3161
3162 assert_eq!(
3163 *events.lock(),
3164 &[language::Event::Edited, language::Event::DirtyChanged]
3165 );
3166
3167 // When a file is deleted, the buffer is considered dirty.
3168 let events = Arc::new(Mutex::new(Vec::new()));
3169 let buffer2 = project
3170 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3171 .await
3172 .unwrap();
3173 buffer2.update(cx, |_, cx| {
3174 cx.subscribe(&buffer2, {
3175 let events = events.clone();
3176 move |_, _, event, _| events.lock().push(event.clone())
3177 })
3178 .detach();
3179 });
3180
3181 fs.remove_file("/dir/file2".as_ref(), Default::default())
3182 .await
3183 .unwrap();
3184 cx.executor().run_until_parked();
3185 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3186 assert_eq!(
3187 *events.lock(),
3188 &[
3189 language::Event::DirtyChanged,
3190 language::Event::FileHandleChanged
3191 ]
3192 );
3193
3194 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3195 let events = Arc::new(Mutex::new(Vec::new()));
3196 let buffer3 = project
3197 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3198 .await
3199 .unwrap();
3200 buffer3.update(cx, |_, cx| {
3201 cx.subscribe(&buffer3, {
3202 let events = events.clone();
3203 move |_, _, event, _| events.lock().push(event.clone())
3204 })
3205 .detach();
3206 });
3207
3208 buffer3.update(cx, |buffer, cx| {
3209 buffer.edit([(0..0, "x")], None, cx);
3210 });
3211 events.lock().clear();
3212 fs.remove_file("/dir/file3".as_ref(), Default::default())
3213 .await
3214 .unwrap();
3215 cx.executor().run_until_parked();
3216 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3217 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3218}
3219
3220#[gpui::test]
3221async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3222 init_test(cx);
3223
3224 let initial_contents = "aaa\nbbbbb\nc\n";
3225 let fs = FakeFs::new(cx.executor());
3226 fs.insert_tree(
3227 "/dir",
3228 json!({
3229 "the-file": initial_contents,
3230 }),
3231 )
3232 .await;
3233 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3234 let buffer = project
3235 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3236 .await
3237 .unwrap();
3238
3239 let anchors = (0..3)
3240 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3241 .collect::<Vec<_>>();
3242
3243 // Change the file on disk, adding two new lines of text, and removing
3244 // one line.
3245 buffer.update(cx, |buffer, _| {
3246 assert!(!buffer.is_dirty());
3247 assert!(!buffer.has_conflict());
3248 });
3249 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3250 fs.save(
3251 "/dir/the-file".as_ref(),
3252 &new_contents.into(),
3253 LineEnding::Unix,
3254 )
3255 .await
3256 .unwrap();
3257
3258 // Because the buffer was not modified, it is reloaded from disk. Its
3259 // contents are edited according to the diff between the old and new
3260 // file contents.
3261 cx.executor().run_until_parked();
3262 buffer.update(cx, |buffer, _| {
3263 assert_eq!(buffer.text(), new_contents);
3264 assert!(!buffer.is_dirty());
3265 assert!(!buffer.has_conflict());
3266
3267 let anchor_positions = anchors
3268 .iter()
3269 .map(|anchor| anchor.to_point(&*buffer))
3270 .collect::<Vec<_>>();
3271 assert_eq!(
3272 anchor_positions,
3273 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3274 );
3275 });
3276
3277 // Modify the buffer
3278 buffer.update(cx, |buffer, cx| {
3279 buffer.edit([(0..0, " ")], None, cx);
3280 assert!(buffer.is_dirty());
3281 assert!(!buffer.has_conflict());
3282 });
3283
3284 // Change the file on disk again, adding blank lines to the beginning.
3285 fs.save(
3286 "/dir/the-file".as_ref(),
3287 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3288 LineEnding::Unix,
3289 )
3290 .await
3291 .unwrap();
3292
3293 // Because the buffer is modified, it doesn't reload from disk, but is
3294 // marked as having a conflict.
3295 cx.executor().run_until_parked();
3296 buffer.update(cx, |buffer, _| {
3297 assert!(buffer.has_conflict());
3298 });
3299}
3300
3301#[gpui::test]
3302async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3303 init_test(cx);
3304
3305 let fs = FakeFs::new(cx.executor());
3306 fs.insert_tree(
3307 "/dir",
3308 json!({
3309 "file1": "a\nb\nc\n",
3310 "file2": "one\r\ntwo\r\nthree\r\n",
3311 }),
3312 )
3313 .await;
3314
3315 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3316 let buffer1 = project
3317 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3318 .await
3319 .unwrap();
3320 let buffer2 = project
3321 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3322 .await
3323 .unwrap();
3324
3325 buffer1.update(cx, |buffer, _| {
3326 assert_eq!(buffer.text(), "a\nb\nc\n");
3327 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3328 });
3329 buffer2.update(cx, |buffer, _| {
3330 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3331 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3332 });
3333
3334 // Change a file's line endings on disk from unix to windows. The buffer's
3335 // state updates correctly.
3336 fs.save(
3337 "/dir/file1".as_ref(),
3338 &"aaa\nb\nc\n".into(),
3339 LineEnding::Windows,
3340 )
3341 .await
3342 .unwrap();
3343 cx.executor().run_until_parked();
3344 buffer1.update(cx, |buffer, _| {
3345 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3346 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3347 });
3348
3349 // Save a file with windows line endings. The file is written correctly.
3350 buffer2.update(cx, |buffer, cx| {
3351 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3352 });
3353 project
3354 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3355 .await
3356 .unwrap();
3357 assert_eq!(
3358 fs.load("/dir/file2".as_ref()).await.unwrap(),
3359 "one\r\ntwo\r\nthree\r\nfour\r\n",
3360 );
3361}
3362
3363#[gpui::test]
3364async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3365 init_test(cx);
3366
3367 let fs = FakeFs::new(cx.executor());
3368 fs.insert_tree(
3369 "/the-dir",
3370 json!({
3371 "a.rs": "
3372 fn foo(mut v: Vec<usize>) {
3373 for x in &v {
3374 v.push(1);
3375 }
3376 }
3377 "
3378 .unindent(),
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3384 let buffer = project
3385 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3386 .await
3387 .unwrap();
3388
3389 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3390 let message = lsp::PublishDiagnosticsParams {
3391 uri: buffer_uri.clone(),
3392 diagnostics: vec![
3393 lsp::Diagnostic {
3394 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3395 severity: Some(DiagnosticSeverity::WARNING),
3396 message: "error 1".to_string(),
3397 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3398 location: lsp::Location {
3399 uri: buffer_uri.clone(),
3400 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3401 },
3402 message: "error 1 hint 1".to_string(),
3403 }]),
3404 ..Default::default()
3405 },
3406 lsp::Diagnostic {
3407 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3408 severity: Some(DiagnosticSeverity::HINT),
3409 message: "error 1 hint 1".to_string(),
3410 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3411 location: lsp::Location {
3412 uri: buffer_uri.clone(),
3413 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3414 },
3415 message: "original diagnostic".to_string(),
3416 }]),
3417 ..Default::default()
3418 },
3419 lsp::Diagnostic {
3420 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3421 severity: Some(DiagnosticSeverity::ERROR),
3422 message: "error 2".to_string(),
3423 related_information: Some(vec![
3424 lsp::DiagnosticRelatedInformation {
3425 location: lsp::Location {
3426 uri: buffer_uri.clone(),
3427 range: lsp::Range::new(
3428 lsp::Position::new(1, 13),
3429 lsp::Position::new(1, 15),
3430 ),
3431 },
3432 message: "error 2 hint 1".to_string(),
3433 },
3434 lsp::DiagnosticRelatedInformation {
3435 location: lsp::Location {
3436 uri: buffer_uri.clone(),
3437 range: lsp::Range::new(
3438 lsp::Position::new(1, 13),
3439 lsp::Position::new(1, 15),
3440 ),
3441 },
3442 message: "error 2 hint 2".to_string(),
3443 },
3444 ]),
3445 ..Default::default()
3446 },
3447 lsp::Diagnostic {
3448 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3449 severity: Some(DiagnosticSeverity::HINT),
3450 message: "error 2 hint 1".to_string(),
3451 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3452 location: lsp::Location {
3453 uri: buffer_uri.clone(),
3454 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3455 },
3456 message: "original diagnostic".to_string(),
3457 }]),
3458 ..Default::default()
3459 },
3460 lsp::Diagnostic {
3461 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3462 severity: Some(DiagnosticSeverity::HINT),
3463 message: "error 2 hint 2".to_string(),
3464 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3465 location: lsp::Location {
3466 uri: buffer_uri,
3467 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3468 },
3469 message: "original diagnostic".to_string(),
3470 }]),
3471 ..Default::default()
3472 },
3473 ],
3474 version: None,
3475 };
3476
3477 project
3478 .update(cx, |p, cx| {
3479 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3480 })
3481 .unwrap();
3482 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3483
3484 assert_eq!(
3485 buffer
3486 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3487 .collect::<Vec<_>>(),
3488 &[
3489 DiagnosticEntry {
3490 range: Point::new(1, 8)..Point::new(1, 9),
3491 diagnostic: Diagnostic {
3492 severity: DiagnosticSeverity::WARNING,
3493 message: "error 1".to_string(),
3494 group_id: 1,
3495 is_primary: true,
3496 ..Default::default()
3497 }
3498 },
3499 DiagnosticEntry {
3500 range: Point::new(1, 8)..Point::new(1, 9),
3501 diagnostic: Diagnostic {
3502 severity: DiagnosticSeverity::HINT,
3503 message: "error 1 hint 1".to_string(),
3504 group_id: 1,
3505 is_primary: false,
3506 ..Default::default()
3507 }
3508 },
3509 DiagnosticEntry {
3510 range: Point::new(1, 13)..Point::new(1, 15),
3511 diagnostic: Diagnostic {
3512 severity: DiagnosticSeverity::HINT,
3513 message: "error 2 hint 1".to_string(),
3514 group_id: 0,
3515 is_primary: false,
3516 ..Default::default()
3517 }
3518 },
3519 DiagnosticEntry {
3520 range: Point::new(1, 13)..Point::new(1, 15),
3521 diagnostic: Diagnostic {
3522 severity: DiagnosticSeverity::HINT,
3523 message: "error 2 hint 2".to_string(),
3524 group_id: 0,
3525 is_primary: false,
3526 ..Default::default()
3527 }
3528 },
3529 DiagnosticEntry {
3530 range: Point::new(2, 8)..Point::new(2, 17),
3531 diagnostic: Diagnostic {
3532 severity: DiagnosticSeverity::ERROR,
3533 message: "error 2".to_string(),
3534 group_id: 0,
3535 is_primary: true,
3536 ..Default::default()
3537 }
3538 }
3539 ]
3540 );
3541
3542 assert_eq!(
3543 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3544 &[
3545 DiagnosticEntry {
3546 range: Point::new(1, 13)..Point::new(1, 15),
3547 diagnostic: Diagnostic {
3548 severity: DiagnosticSeverity::HINT,
3549 message: "error 2 hint 1".to_string(),
3550 group_id: 0,
3551 is_primary: false,
3552 ..Default::default()
3553 }
3554 },
3555 DiagnosticEntry {
3556 range: Point::new(1, 13)..Point::new(1, 15),
3557 diagnostic: Diagnostic {
3558 severity: DiagnosticSeverity::HINT,
3559 message: "error 2 hint 2".to_string(),
3560 group_id: 0,
3561 is_primary: false,
3562 ..Default::default()
3563 }
3564 },
3565 DiagnosticEntry {
3566 range: Point::new(2, 8)..Point::new(2, 17),
3567 diagnostic: Diagnostic {
3568 severity: DiagnosticSeverity::ERROR,
3569 message: "error 2".to_string(),
3570 group_id: 0,
3571 is_primary: true,
3572 ..Default::default()
3573 }
3574 }
3575 ]
3576 );
3577
3578 assert_eq!(
3579 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3580 &[
3581 DiagnosticEntry {
3582 range: Point::new(1, 8)..Point::new(1, 9),
3583 diagnostic: Diagnostic {
3584 severity: DiagnosticSeverity::WARNING,
3585 message: "error 1".to_string(),
3586 group_id: 1,
3587 is_primary: true,
3588 ..Default::default()
3589 }
3590 },
3591 DiagnosticEntry {
3592 range: Point::new(1, 8)..Point::new(1, 9),
3593 diagnostic: Diagnostic {
3594 severity: DiagnosticSeverity::HINT,
3595 message: "error 1 hint 1".to_string(),
3596 group_id: 1,
3597 is_primary: false,
3598 ..Default::default()
3599 }
3600 },
3601 ]
3602 );
3603}
3604
3605#[gpui::test]
3606async fn test_rename(cx: &mut gpui::TestAppContext) {
3607 init_test(cx);
3608
3609 let fs = FakeFs::new(cx.executor());
3610 fs.insert_tree(
3611 "/dir",
3612 json!({
3613 "one.rs": "const ONE: usize = 1;",
3614 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3615 }),
3616 )
3617 .await;
3618
3619 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3620
3621 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3622 language_registry.add(rust_lang());
3623 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3624 "Rust",
3625 FakeLspAdapter {
3626 capabilities: lsp::ServerCapabilities {
3627 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3628 prepare_provider: Some(true),
3629 work_done_progress_options: Default::default(),
3630 })),
3631 ..Default::default()
3632 },
3633 ..Default::default()
3634 },
3635 );
3636
3637 let buffer = project
3638 .update(cx, |project, cx| {
3639 project.open_local_buffer("/dir/one.rs", cx)
3640 })
3641 .await
3642 .unwrap();
3643
3644 let fake_server = fake_servers.next().await.unwrap();
3645
3646 let response = project.update(cx, |project, cx| {
3647 project.prepare_rename(buffer.clone(), 7, cx)
3648 });
3649 fake_server
3650 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3651 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3652 assert_eq!(params.position, lsp::Position::new(0, 7));
3653 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3654 lsp::Position::new(0, 6),
3655 lsp::Position::new(0, 9),
3656 ))))
3657 })
3658 .next()
3659 .await
3660 .unwrap();
3661 let range = response.await.unwrap().unwrap();
3662 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3663 assert_eq!(range, 6..9);
3664
3665 let response = project.update(cx, |project, cx| {
3666 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3667 });
3668 fake_server
3669 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3670 assert_eq!(
3671 params.text_document_position.text_document.uri.as_str(),
3672 "file:///dir/one.rs"
3673 );
3674 assert_eq!(
3675 params.text_document_position.position,
3676 lsp::Position::new(0, 7)
3677 );
3678 assert_eq!(params.new_name, "THREE");
3679 Ok(Some(lsp::WorkspaceEdit {
3680 changes: Some(
3681 [
3682 (
3683 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3684 vec![lsp::TextEdit::new(
3685 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3686 "THREE".to_string(),
3687 )],
3688 ),
3689 (
3690 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3691 vec![
3692 lsp::TextEdit::new(
3693 lsp::Range::new(
3694 lsp::Position::new(0, 24),
3695 lsp::Position::new(0, 27),
3696 ),
3697 "THREE".to_string(),
3698 ),
3699 lsp::TextEdit::new(
3700 lsp::Range::new(
3701 lsp::Position::new(0, 35),
3702 lsp::Position::new(0, 38),
3703 ),
3704 "THREE".to_string(),
3705 ),
3706 ],
3707 ),
3708 ]
3709 .into_iter()
3710 .collect(),
3711 ),
3712 ..Default::default()
3713 }))
3714 })
3715 .next()
3716 .await
3717 .unwrap();
3718 let mut transaction = response.await.unwrap().0;
3719 assert_eq!(transaction.len(), 2);
3720 assert_eq!(
3721 transaction
3722 .remove_entry(&buffer)
3723 .unwrap()
3724 .0
3725 .update(cx, |buffer, _| buffer.text()),
3726 "const THREE: usize = 1;"
3727 );
3728 assert_eq!(
3729 transaction
3730 .into_keys()
3731 .next()
3732 .unwrap()
3733 .update(cx, |buffer, _| buffer.text()),
3734 "const TWO: usize = one::THREE + one::THREE;"
3735 );
3736}
3737
3738#[gpui::test]
3739async fn test_search(cx: &mut gpui::TestAppContext) {
3740 init_test(cx);
3741
3742 let fs = FakeFs::new(cx.executor());
3743 fs.insert_tree(
3744 "/dir",
3745 json!({
3746 "one.rs": "const ONE: usize = 1;",
3747 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3748 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3749 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3750 }),
3751 )
3752 .await;
3753 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3754 assert_eq!(
3755 search(
3756 &project,
3757 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3758 cx
3759 )
3760 .await
3761 .unwrap(),
3762 HashMap::from_iter([
3763 ("dir/two.rs".to_string(), vec![6..9]),
3764 ("dir/three.rs".to_string(), vec![37..40])
3765 ])
3766 );
3767
3768 let buffer_4 = project
3769 .update(cx, |project, cx| {
3770 project.open_local_buffer("/dir/four.rs", cx)
3771 })
3772 .await
3773 .unwrap();
3774 buffer_4.update(cx, |buffer, cx| {
3775 let text = "two::TWO";
3776 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3777 });
3778
3779 assert_eq!(
3780 search(
3781 &project,
3782 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3783 cx
3784 )
3785 .await
3786 .unwrap(),
3787 HashMap::from_iter([
3788 ("dir/two.rs".to_string(), vec![6..9]),
3789 ("dir/three.rs".to_string(), vec![37..40]),
3790 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3791 ])
3792 );
3793}
3794
3795#[gpui::test]
3796async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3797 init_test(cx);
3798
3799 let search_query = "file";
3800
3801 let fs = FakeFs::new(cx.executor());
3802 fs.insert_tree(
3803 "/dir",
3804 json!({
3805 "one.rs": r#"// Rust file one"#,
3806 "one.ts": r#"// TypeScript file one"#,
3807 "two.rs": r#"// Rust file two"#,
3808 "two.ts": r#"// TypeScript file two"#,
3809 }),
3810 )
3811 .await;
3812 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3813
3814 assert!(
3815 search(
3816 &project,
3817 SearchQuery::text(
3818 search_query,
3819 false,
3820 true,
3821 false,
3822 vec![PathMatcher::new("*.odd").unwrap()],
3823 Vec::new()
3824 )
3825 .unwrap(),
3826 cx
3827 )
3828 .await
3829 .unwrap()
3830 .is_empty(),
3831 "If no inclusions match, no files should be returned"
3832 );
3833
3834 assert_eq!(
3835 search(
3836 &project,
3837 SearchQuery::text(
3838 search_query,
3839 false,
3840 true,
3841 false,
3842 vec![PathMatcher::new("*.rs").unwrap()],
3843 Vec::new()
3844 )
3845 .unwrap(),
3846 cx
3847 )
3848 .await
3849 .unwrap(),
3850 HashMap::from_iter([
3851 ("dir/one.rs".to_string(), vec![8..12]),
3852 ("dir/two.rs".to_string(), vec![8..12]),
3853 ]),
3854 "Rust only search should give only Rust files"
3855 );
3856
3857 assert_eq!(
3858 search(
3859 &project,
3860 SearchQuery::text(
3861 search_query,
3862 false,
3863 true,
3864 false,
3865 vec![
3866 PathMatcher::new("*.ts").unwrap(),
3867 PathMatcher::new("*.odd").unwrap(),
3868 ],
3869 Vec::new()
3870 ).unwrap(),
3871 cx
3872 )
3873 .await
3874 .unwrap(),
3875 HashMap::from_iter([
3876 ("dir/one.ts".to_string(), vec![14..18]),
3877 ("dir/two.ts".to_string(), vec![14..18]),
3878 ]),
3879 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
3880 );
3881
3882 assert_eq!(
3883 search(
3884 &project,
3885 SearchQuery::text(
3886 search_query,
3887 false,
3888 true,
3889 false,
3890 vec![
3891 PathMatcher::new("*.rs").unwrap(),
3892 PathMatcher::new("*.ts").unwrap(),
3893 PathMatcher::new("*.odd").unwrap(),
3894 ],
3895 Vec::new()
3896 ).unwrap(),
3897 cx
3898 )
3899 .await
3900 .unwrap(),
3901 HashMap::from_iter([
3902 ("dir/two.ts".to_string(), vec![14..18]),
3903 ("dir/one.rs".to_string(), vec![8..12]),
3904 ("dir/one.ts".to_string(), vec![14..18]),
3905 ("dir/two.rs".to_string(), vec![8..12]),
3906 ]),
3907 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
3908 );
3909}
3910
3911#[gpui::test]
3912async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
3913 init_test(cx);
3914
3915 let search_query = "file";
3916
3917 let fs = FakeFs::new(cx.executor());
3918 fs.insert_tree(
3919 "/dir",
3920 json!({
3921 "one.rs": r#"// Rust file one"#,
3922 "one.ts": r#"// TypeScript file one"#,
3923 "two.rs": r#"// Rust file two"#,
3924 "two.ts": r#"// TypeScript file two"#,
3925 }),
3926 )
3927 .await;
3928 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3929
3930 assert_eq!(
3931 search(
3932 &project,
3933 SearchQuery::text(
3934 search_query,
3935 false,
3936 true,
3937 false,
3938 Vec::new(),
3939 vec![PathMatcher::new("*.odd").unwrap()],
3940 )
3941 .unwrap(),
3942 cx
3943 )
3944 .await
3945 .unwrap(),
3946 HashMap::from_iter([
3947 ("dir/one.rs".to_string(), vec![8..12]),
3948 ("dir/one.ts".to_string(), vec![14..18]),
3949 ("dir/two.rs".to_string(), vec![8..12]),
3950 ("dir/two.ts".to_string(), vec![14..18]),
3951 ]),
3952 "If no exclusions match, all files should be returned"
3953 );
3954
3955 assert_eq!(
3956 search(
3957 &project,
3958 SearchQuery::text(
3959 search_query,
3960 false,
3961 true,
3962 false,
3963 Vec::new(),
3964 vec![PathMatcher::new("*.rs").unwrap()],
3965 )
3966 .unwrap(),
3967 cx
3968 )
3969 .await
3970 .unwrap(),
3971 HashMap::from_iter([
3972 ("dir/one.ts".to_string(), vec![14..18]),
3973 ("dir/two.ts".to_string(), vec![14..18]),
3974 ]),
3975 "Rust exclusion search should give only TypeScript files"
3976 );
3977
3978 assert_eq!(
3979 search(
3980 &project,
3981 SearchQuery::text(
3982 search_query,
3983 false,
3984 true,
3985 false,
3986 Vec::new(),
3987 vec![
3988 PathMatcher::new("*.ts").unwrap(),
3989 PathMatcher::new("*.odd").unwrap(),
3990 ],
3991 ).unwrap(),
3992 cx
3993 )
3994 .await
3995 .unwrap(),
3996 HashMap::from_iter([
3997 ("dir/one.rs".to_string(), vec![8..12]),
3998 ("dir/two.rs".to_string(), vec![8..12]),
3999 ]),
4000 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4001 );
4002
4003 assert!(
4004 search(
4005 &project,
4006 SearchQuery::text(
4007 search_query,
4008 false,
4009 true,
4010 false,
4011 Vec::new(),
4012 vec![
4013 PathMatcher::new("*.rs").unwrap(),
4014 PathMatcher::new("*.ts").unwrap(),
4015 PathMatcher::new("*.odd").unwrap(),
4016 ],
4017 ).unwrap(),
4018 cx
4019 )
4020 .await
4021 .unwrap().is_empty(),
4022 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4023 );
4024}
4025
4026#[gpui::test]
4027async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4028 init_test(cx);
4029
4030 let search_query = "file";
4031
4032 let fs = FakeFs::new(cx.executor());
4033 fs.insert_tree(
4034 "/dir",
4035 json!({
4036 "one.rs": r#"// Rust file one"#,
4037 "one.ts": r#"// TypeScript file one"#,
4038 "two.rs": r#"// Rust file two"#,
4039 "two.ts": r#"// TypeScript file two"#,
4040 }),
4041 )
4042 .await;
4043 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4044
4045 assert!(
4046 search(
4047 &project,
4048 SearchQuery::text(
4049 search_query,
4050 false,
4051 true,
4052 false,
4053 vec![PathMatcher::new("*.odd").unwrap()],
4054 vec![PathMatcher::new("*.odd").unwrap()],
4055 )
4056 .unwrap(),
4057 cx
4058 )
4059 .await
4060 .unwrap()
4061 .is_empty(),
4062 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4063 );
4064
4065 assert!(
4066 search(
4067 &project,
4068 SearchQuery::text(
4069 search_query,
4070 false,
4071 true,
4072 false,
4073 vec![PathMatcher::new("*.ts").unwrap()],
4074 vec![PathMatcher::new("*.ts").unwrap()],
4075 ).unwrap(),
4076 cx
4077 )
4078 .await
4079 .unwrap()
4080 .is_empty(),
4081 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4082 );
4083
4084 assert!(
4085 search(
4086 &project,
4087 SearchQuery::text(
4088 search_query,
4089 false,
4090 true,
4091 false,
4092 vec![
4093 PathMatcher::new("*.ts").unwrap(),
4094 PathMatcher::new("*.odd").unwrap()
4095 ],
4096 vec![
4097 PathMatcher::new("*.ts").unwrap(),
4098 PathMatcher::new("*.odd").unwrap()
4099 ],
4100 )
4101 .unwrap(),
4102 cx
4103 )
4104 .await
4105 .unwrap()
4106 .is_empty(),
4107 "Non-matching inclusions and exclusions should not change that."
4108 );
4109
4110 assert_eq!(
4111 search(
4112 &project,
4113 SearchQuery::text(
4114 search_query,
4115 false,
4116 true,
4117 false,
4118 vec![
4119 PathMatcher::new("*.ts").unwrap(),
4120 PathMatcher::new("*.odd").unwrap()
4121 ],
4122 vec![
4123 PathMatcher::new("*.rs").unwrap(),
4124 PathMatcher::new("*.odd").unwrap()
4125 ],
4126 )
4127 .unwrap(),
4128 cx
4129 )
4130 .await
4131 .unwrap(),
4132 HashMap::from_iter([
4133 ("dir/one.ts".to_string(), vec![14..18]),
4134 ("dir/two.ts".to_string(), vec![14..18]),
4135 ]),
4136 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4137 );
4138}
4139
4140#[gpui::test]
4141async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4142 init_test(cx);
4143
4144 let fs = FakeFs::new(cx.executor());
4145 fs.insert_tree(
4146 "/worktree-a",
4147 json!({
4148 "haystack.rs": r#"// NEEDLE"#,
4149 "haystack.ts": r#"// NEEDLE"#,
4150 }),
4151 )
4152 .await;
4153 fs.insert_tree(
4154 "/worktree-b",
4155 json!({
4156 "haystack.rs": r#"// NEEDLE"#,
4157 "haystack.ts": r#"// NEEDLE"#,
4158 }),
4159 )
4160 .await;
4161
4162 let project = Project::test(
4163 fs.clone(),
4164 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4165 cx,
4166 )
4167 .await;
4168
4169 assert_eq!(
4170 search(
4171 &project,
4172 SearchQuery::text(
4173 "NEEDLE",
4174 false,
4175 true,
4176 false,
4177 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4178 Vec::new()
4179 )
4180 .unwrap(),
4181 cx
4182 )
4183 .await
4184 .unwrap(),
4185 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4186 "should only return results from included worktree"
4187 );
4188 assert_eq!(
4189 search(
4190 &project,
4191 SearchQuery::text(
4192 "NEEDLE",
4193 false,
4194 true,
4195 false,
4196 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4197 Vec::new()
4198 )
4199 .unwrap(),
4200 cx
4201 )
4202 .await
4203 .unwrap(),
4204 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4205 "should only return results from included worktree"
4206 );
4207
4208 assert_eq!(
4209 search(
4210 &project,
4211 SearchQuery::text(
4212 "NEEDLE",
4213 false,
4214 true,
4215 false,
4216 vec![PathMatcher::new("*.ts").unwrap()],
4217 Vec::new()
4218 )
4219 .unwrap(),
4220 cx
4221 )
4222 .await
4223 .unwrap(),
4224 HashMap::from_iter([
4225 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4226 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4227 ]),
4228 "should return results from both worktrees"
4229 );
4230}
4231
4232#[gpui::test]
4233async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4234 init_test(cx);
4235
4236 let fs = FakeFs::new(cx.background_executor.clone());
4237 fs.insert_tree(
4238 "/dir",
4239 json!({
4240 ".git": {},
4241 ".gitignore": "**/target\n/node_modules\n",
4242 "target": {
4243 "index.txt": "index_key:index_value"
4244 },
4245 "node_modules": {
4246 "eslint": {
4247 "index.ts": "const eslint_key = 'eslint value'",
4248 "package.json": r#"{ "some_key": "some value" }"#,
4249 },
4250 "prettier": {
4251 "index.ts": "const prettier_key = 'prettier value'",
4252 "package.json": r#"{ "other_key": "other value" }"#,
4253 },
4254 },
4255 "package.json": r#"{ "main_key": "main value" }"#,
4256 }),
4257 )
4258 .await;
4259 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4260
4261 let query = "key";
4262 assert_eq!(
4263 search(
4264 &project,
4265 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4266 cx
4267 )
4268 .await
4269 .unwrap(),
4270 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4271 "Only one non-ignored file should have the query"
4272 );
4273
4274 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4275 assert_eq!(
4276 search(
4277 &project,
4278 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4279 cx
4280 )
4281 .await
4282 .unwrap(),
4283 HashMap::from_iter([
4284 ("dir/package.json".to_string(), vec![8..11]),
4285 ("dir/target/index.txt".to_string(), vec![6..9]),
4286 (
4287 "dir/node_modules/prettier/package.json".to_string(),
4288 vec![9..12]
4289 ),
4290 (
4291 "dir/node_modules/prettier/index.ts".to_string(),
4292 vec![15..18]
4293 ),
4294 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4295 (
4296 "dir/node_modules/eslint/package.json".to_string(),
4297 vec![8..11]
4298 ),
4299 ]),
4300 "Unrestricted search with ignored directories should find every file with the query"
4301 );
4302
4303 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4304 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4305 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4306 assert_eq!(
4307 search(
4308 &project,
4309 SearchQuery::text(
4310 query,
4311 false,
4312 false,
4313 true,
4314 files_to_include,
4315 files_to_exclude,
4316 )
4317 .unwrap(),
4318 cx
4319 )
4320 .await
4321 .unwrap(),
4322 HashMap::from_iter([(
4323 "dir/node_modules/prettier/package.json".to_string(),
4324 vec![9..12]
4325 )]),
4326 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4327 );
4328}
4329
4330#[test]
4331fn test_glob_literal_prefix() {
4332 assert_eq!(glob_literal_prefix("**/*.js"), "");
4333 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4334 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4335 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4336}
4337
4338#[gpui::test]
4339async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4340 init_test(cx);
4341
4342 let fs = FakeFs::new(cx.executor().clone());
4343 fs.insert_tree(
4344 "/one/two",
4345 json!({
4346 "three": {
4347 "a.txt": "",
4348 "four": {}
4349 },
4350 "c.rs": ""
4351 }),
4352 )
4353 .await;
4354
4355 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4356 project
4357 .update(cx, |project, cx| {
4358 let id = project.worktrees().next().unwrap().read(cx).id();
4359 project.create_entry((id, "b.."), true, cx)
4360 })
4361 .unwrap()
4362 .await
4363 .unwrap();
4364
4365 // Can't create paths outside the project
4366 let result = project
4367 .update(cx, |project, cx| {
4368 let id = project.worktrees().next().unwrap().read(cx).id();
4369 project.create_entry((id, "../../boop"), true, cx)
4370 })
4371 .await;
4372 assert!(result.is_err());
4373
4374 // Can't create paths with '..'
4375 let result = project
4376 .update(cx, |project, cx| {
4377 let id = project.worktrees().next().unwrap().read(cx).id();
4378 project.create_entry((id, "four/../beep"), true, cx)
4379 })
4380 .await;
4381 assert!(result.is_err());
4382
4383 assert_eq!(
4384 fs.paths(true),
4385 vec![
4386 PathBuf::from("/"),
4387 PathBuf::from("/one"),
4388 PathBuf::from("/one/two"),
4389 PathBuf::from("/one/two/c.rs"),
4390 PathBuf::from("/one/two/three"),
4391 PathBuf::from("/one/two/three/a.txt"),
4392 PathBuf::from("/one/two/three/b.."),
4393 PathBuf::from("/one/two/three/four"),
4394 ]
4395 );
4396
4397 // And we cannot open buffers with '..'
4398 let result = project
4399 .update(cx, |project, cx| {
4400 let id = project.worktrees().next().unwrap().read(cx).id();
4401 project.open_buffer((id, "../c.rs"), cx)
4402 })
4403 .await;
4404 assert!(result.is_err())
4405}
4406
4407#[gpui::test]
4408async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4409 init_test(cx);
4410
4411 let fs = FakeFs::new(cx.executor());
4412 fs.insert_tree(
4413 "/dir",
4414 json!({
4415 "a.tsx": "a",
4416 }),
4417 )
4418 .await;
4419
4420 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4421
4422 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4423 language_registry.add(tsx_lang());
4424 let language_server_names = [
4425 "TypeScriptServer",
4426 "TailwindServer",
4427 "ESLintServer",
4428 "NoHoverCapabilitiesServer",
4429 ];
4430 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4431 "tsx",
4432 true,
4433 FakeLspAdapter {
4434 name: &language_server_names[0],
4435 capabilities: lsp::ServerCapabilities {
4436 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4437 ..lsp::ServerCapabilities::default()
4438 },
4439 ..FakeLspAdapter::default()
4440 },
4441 );
4442 let _a = language_registry.register_specific_fake_lsp_adapter(
4443 "tsx",
4444 false,
4445 FakeLspAdapter {
4446 name: &language_server_names[1],
4447 capabilities: lsp::ServerCapabilities {
4448 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4449 ..lsp::ServerCapabilities::default()
4450 },
4451 ..FakeLspAdapter::default()
4452 },
4453 );
4454 let _b = language_registry.register_specific_fake_lsp_adapter(
4455 "tsx",
4456 false,
4457 FakeLspAdapter {
4458 name: &language_server_names[2],
4459 capabilities: lsp::ServerCapabilities {
4460 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4461 ..lsp::ServerCapabilities::default()
4462 },
4463 ..FakeLspAdapter::default()
4464 },
4465 );
4466 let _c = language_registry.register_specific_fake_lsp_adapter(
4467 "tsx",
4468 false,
4469 FakeLspAdapter {
4470 name: &language_server_names[3],
4471 capabilities: lsp::ServerCapabilities {
4472 hover_provider: None,
4473 ..lsp::ServerCapabilities::default()
4474 },
4475 ..FakeLspAdapter::default()
4476 },
4477 );
4478
4479 let buffer = project
4480 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4481 .await
4482 .unwrap();
4483 cx.executor().run_until_parked();
4484
4485 let mut servers_with_hover_requests = HashMap::default();
4486 for i in 0..language_server_names.len() {
4487 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4488 panic!(
4489 "Failed to get language server #{i} with name {}",
4490 &language_server_names[i]
4491 )
4492 });
4493 let new_server_name = new_server.server.name();
4494 assert!(
4495 !servers_with_hover_requests.contains_key(new_server_name),
4496 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4497 );
4498 let new_server_name = new_server_name.to_string();
4499 match new_server_name.as_str() {
4500 "TailwindServer" | "TypeScriptServer" => {
4501 servers_with_hover_requests.insert(
4502 new_server_name.clone(),
4503 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4504 let name = new_server_name.clone();
4505 async move {
4506 Ok(Some(lsp::Hover {
4507 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4508 format!("{name} hover"),
4509 )),
4510 range: None,
4511 }))
4512 }
4513 }),
4514 );
4515 }
4516 "ESLintServer" => {
4517 servers_with_hover_requests.insert(
4518 new_server_name,
4519 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4520 |_, _| async move { Ok(None) },
4521 ),
4522 );
4523 }
4524 "NoHoverCapabilitiesServer" => {
4525 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4526 |_, _| async move {
4527 panic!(
4528 "Should not call for hovers server with no corresponding capabilities"
4529 )
4530 },
4531 );
4532 }
4533 unexpected => panic!("Unexpected server name: {unexpected}"),
4534 }
4535 }
4536
4537 let hover_task = project.update(cx, |project, cx| {
4538 project.hover(&buffer, Point::new(0, 0), cx)
4539 });
4540 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4541 |mut hover_request| async move {
4542 hover_request
4543 .next()
4544 .await
4545 .expect("All hover requests should have been triggered")
4546 },
4547 ))
4548 .await;
4549 assert_eq!(
4550 vec!["TailwindServer hover", "TypeScriptServer hover"],
4551 hover_task
4552 .await
4553 .into_iter()
4554 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4555 .sorted()
4556 .collect::<Vec<_>>(),
4557 "Should receive hover responses from all related servers with hover capabilities"
4558 );
4559}
4560
4561#[gpui::test]
4562async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4563 init_test(cx);
4564
4565 let fs = FakeFs::new(cx.executor());
4566 fs.insert_tree(
4567 "/dir",
4568 json!({
4569 "a.ts": "a",
4570 }),
4571 )
4572 .await;
4573
4574 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4575
4576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4577 language_registry.add(typescript_lang());
4578 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4579 "TypeScript",
4580 FakeLspAdapter {
4581 capabilities: lsp::ServerCapabilities {
4582 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4583 ..lsp::ServerCapabilities::default()
4584 },
4585 ..FakeLspAdapter::default()
4586 },
4587 );
4588
4589 let buffer = project
4590 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4591 .await
4592 .unwrap();
4593 cx.executor().run_until_parked();
4594
4595 let fake_server = fake_language_servers
4596 .next()
4597 .await
4598 .expect("failed to get the language server");
4599
4600 let mut request_handled =
4601 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4602 Ok(Some(lsp::Hover {
4603 contents: lsp::HoverContents::Array(vec![
4604 lsp::MarkedString::String("".to_string()),
4605 lsp::MarkedString::String(" ".to_string()),
4606 lsp::MarkedString::String("\n\n\n".to_string()),
4607 ]),
4608 range: None,
4609 }))
4610 });
4611
4612 let hover_task = project.update(cx, |project, cx| {
4613 project.hover(&buffer, Point::new(0, 0), cx)
4614 });
4615 let () = request_handled
4616 .next()
4617 .await
4618 .expect("All hover requests should have been triggered");
4619 assert_eq!(
4620 Vec::<String>::new(),
4621 hover_task
4622 .await
4623 .into_iter()
4624 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4625 .sorted()
4626 .collect::<Vec<_>>(),
4627 "Empty hover parts should be ignored"
4628 );
4629}
4630
4631#[gpui::test]
4632async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4633 init_test(cx);
4634
4635 let fs = FakeFs::new(cx.executor());
4636 fs.insert_tree(
4637 "/dir",
4638 json!({
4639 "a.tsx": "a",
4640 }),
4641 )
4642 .await;
4643
4644 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4645
4646 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4647 language_registry.add(tsx_lang());
4648 let language_server_names = [
4649 "TypeScriptServer",
4650 "TailwindServer",
4651 "ESLintServer",
4652 "NoActionsCapabilitiesServer",
4653 ];
4654 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4655 "tsx",
4656 true,
4657 FakeLspAdapter {
4658 name: &language_server_names[0],
4659 capabilities: lsp::ServerCapabilities {
4660 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4661 ..lsp::ServerCapabilities::default()
4662 },
4663 ..FakeLspAdapter::default()
4664 },
4665 );
4666 let _a = language_registry.register_specific_fake_lsp_adapter(
4667 "tsx",
4668 false,
4669 FakeLspAdapter {
4670 name: &language_server_names[1],
4671 capabilities: lsp::ServerCapabilities {
4672 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4673 ..lsp::ServerCapabilities::default()
4674 },
4675 ..FakeLspAdapter::default()
4676 },
4677 );
4678 let _b = language_registry.register_specific_fake_lsp_adapter(
4679 "tsx",
4680 false,
4681 FakeLspAdapter {
4682 name: &language_server_names[2],
4683 capabilities: lsp::ServerCapabilities {
4684 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4685 ..lsp::ServerCapabilities::default()
4686 },
4687 ..FakeLspAdapter::default()
4688 },
4689 );
4690 let _c = language_registry.register_specific_fake_lsp_adapter(
4691 "tsx",
4692 false,
4693 FakeLspAdapter {
4694 name: &language_server_names[3],
4695 capabilities: lsp::ServerCapabilities {
4696 code_action_provider: None,
4697 ..lsp::ServerCapabilities::default()
4698 },
4699 ..FakeLspAdapter::default()
4700 },
4701 );
4702
4703 let buffer = project
4704 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4705 .await
4706 .unwrap();
4707 cx.executor().run_until_parked();
4708
4709 let mut servers_with_actions_requests = HashMap::default();
4710 for i in 0..language_server_names.len() {
4711 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4712 panic!(
4713 "Failed to get language server #{i} with name {}",
4714 &language_server_names[i]
4715 )
4716 });
4717 let new_server_name = new_server.server.name();
4718 assert!(
4719 !servers_with_actions_requests.contains_key(new_server_name),
4720 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4721 );
4722 let new_server_name = new_server_name.to_string();
4723 match new_server_name.as_str() {
4724 "TailwindServer" | "TypeScriptServer" => {
4725 servers_with_actions_requests.insert(
4726 new_server_name.clone(),
4727 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4728 move |_, _| {
4729 let name = new_server_name.clone();
4730 async move {
4731 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4732 lsp::CodeAction {
4733 title: format!("{name} code action"),
4734 ..lsp::CodeAction::default()
4735 },
4736 )]))
4737 }
4738 },
4739 ),
4740 );
4741 }
4742 "ESLintServer" => {
4743 servers_with_actions_requests.insert(
4744 new_server_name,
4745 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4746 |_, _| async move { Ok(None) },
4747 ),
4748 );
4749 }
4750 "NoActionsCapabilitiesServer" => {
4751 let _never_handled = new_server
4752 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4753 panic!(
4754 "Should not call for code actions server with no corresponding capabilities"
4755 )
4756 });
4757 }
4758 unexpected => panic!("Unexpected server name: {unexpected}"),
4759 }
4760 }
4761
4762 let code_actions_task = project.update(cx, |project, cx| {
4763 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4764 });
4765 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4766 |mut code_actions_request| async move {
4767 code_actions_request
4768 .next()
4769 .await
4770 .expect("All code actions requests should have been triggered")
4771 },
4772 ))
4773 .await;
4774 assert_eq!(
4775 vec!["TailwindServer code action", "TypeScriptServer code action"],
4776 code_actions_task
4777 .await
4778 .into_iter()
4779 .map(|code_action| code_action.lsp_action.title)
4780 .sorted()
4781 .collect::<Vec<_>>(),
4782 "Should receive code actions responses from all related servers with hover capabilities"
4783 );
4784}
4785
4786async fn search(
4787 project: &Model<Project>,
4788 query: SearchQuery,
4789 cx: &mut gpui::TestAppContext,
4790) -> Result<HashMap<String, Vec<Range<usize>>>> {
4791 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4792 let mut results = HashMap::default();
4793 while let Some(search_result) = search_rx.next().await {
4794 match search_result {
4795 SearchResult::Buffer { buffer, ranges } => {
4796 results.entry(buffer).or_insert(ranges);
4797 }
4798 SearchResult::LimitReached => {}
4799 }
4800 }
4801 Ok(results
4802 .into_iter()
4803 .map(|(buffer, ranges)| {
4804 buffer.update(cx, |buffer, cx| {
4805 let path = buffer
4806 .file()
4807 .unwrap()
4808 .full_path(cx)
4809 .to_string_lossy()
4810 .to_string();
4811 let ranges = ranges
4812 .into_iter()
4813 .map(|range| range.to_offset(buffer))
4814 .collect::<Vec<_>>();
4815 (path, ranges)
4816 })
4817 })
4818 .collect())
4819}
4820
4821fn init_test(cx: &mut gpui::TestAppContext) {
4822 if std::env::var("RUST_LOG").is_ok() {
4823 env_logger::try_init().ok();
4824 }
4825
4826 cx.update(|cx| {
4827 let settings_store = SettingsStore::test(cx);
4828 cx.set_global(settings_store);
4829 release_channel::init("0.0.0", cx);
4830 language::init(cx);
4831 Project::init_settings(cx);
4832 });
4833}
4834
4835fn json_lang() -> Arc<Language> {
4836 Arc::new(Language::new(
4837 LanguageConfig {
4838 name: "JSON".into(),
4839 matcher: LanguageMatcher {
4840 path_suffixes: vec!["json".to_string()],
4841 ..Default::default()
4842 },
4843 ..Default::default()
4844 },
4845 None,
4846 ))
4847}
4848
4849fn js_lang() -> Arc<Language> {
4850 Arc::new(Language::new(
4851 LanguageConfig {
4852 name: Arc::from("JavaScript"),
4853 matcher: LanguageMatcher {
4854 path_suffixes: vec!["js".to_string()],
4855 ..Default::default()
4856 },
4857 ..Default::default()
4858 },
4859 None,
4860 ))
4861}
4862
4863fn rust_lang() -> Arc<Language> {
4864 Arc::new(Language::new(
4865 LanguageConfig {
4866 name: "Rust".into(),
4867 matcher: LanguageMatcher {
4868 path_suffixes: vec!["rs".to_string()],
4869 ..Default::default()
4870 },
4871 ..Default::default()
4872 },
4873 Some(tree_sitter_rust::language()),
4874 ))
4875}
4876
4877fn typescript_lang() -> Arc<Language> {
4878 Arc::new(Language::new(
4879 LanguageConfig {
4880 name: "TypeScript".into(),
4881 matcher: LanguageMatcher {
4882 path_suffixes: vec!["ts".to_string()],
4883 ..Default::default()
4884 },
4885 ..Default::default()
4886 },
4887 Some(tree_sitter_typescript::language_typescript()),
4888 ))
4889}
4890
4891fn tsx_lang() -> Arc<Language> {
4892 Arc::new(Language::new(
4893 LanguageConfig {
4894 name: "tsx".into(),
4895 matcher: LanguageMatcher {
4896 path_suffixes: vec!["tsx".to_string()],
4897 ..Default::default()
4898 },
4899 ..Default::default()
4900 },
4901 Some(tree_sitter_typescript::language_tsx()),
4902 ))
4903}