1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{TaskContext, TaskSource, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20use worktree::WorktreeModelHandle as _;
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/Users").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees().next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let workree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees().next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: workree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree",
141 };
142 cx.update(|cx| {
143 let tree = worktree.read(cx);
144
145 let settings_a = language_settings(
146 None,
147 Some(
148 &(File::for_entry(
149 tree.entry_for_path("a/a.rs").unwrap().clone(),
150 worktree.clone(),
151 ) as _),
152 ),
153 cx,
154 );
155 let settings_b = language_settings(
156 None,
157 Some(
158 &(File::for_entry(
159 tree.entry_for_path("b/b.rs").unwrap().clone(),
160 worktree.clone(),
161 ) as _),
162 ),
163 cx,
164 );
165
166 assert_eq!(settings_a.tab_size.get(), 8);
167 assert_eq!(settings_b.tab_size.get(), 2);
168
169 let all_tasks = project
170 .update(cx, |project, cx| {
171 project.task_inventory().update(cx, |inventory, cx| {
172 let (mut old, new) = inventory.used_and_current_resolved_tasks(
173 None,
174 Some(workree_id),
175 &task_context,
176 cx,
177 );
178 old.extend(new);
179 old
180 })
181 })
182 .into_iter()
183 .map(|(source_kind, task)| {
184 let resolved = task.resolved.unwrap();
185 (
186 source_kind,
187 task.resolved_label,
188 resolved.args,
189 resolved.env,
190 )
191 })
192 .collect::<Vec<_>>();
193 assert_eq!(
194 all_tasks,
195 vec![
196 (
197 TaskSourceKind::Worktree {
198 id: workree_id,
199 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
200 id_base: "local_tasks_for_worktree",
201 },
202 "cargo check".to_string(),
203 vec!["check".to_string()],
204 HashMap::default(),
205 ),
206 (
207 global_task_source_kind.clone(),
208 "cargo check".to_string(),
209 vec!["check".to_string(), "--all".to_string()],
210 HashMap::default(),
211 ),
212 ]
213 );
214 });
215
216 project.update(cx, |project, cx| {
217 let inventory = project.task_inventory();
218 inventory.update(cx, |inventory, cx| {
219 let (mut old, new) = inventory.used_and_current_resolved_tasks(
220 None,
221 Some(workree_id),
222 &task_context,
223 cx,
224 );
225 old.extend(new);
226 let (_, resolved_task) = old
227 .into_iter()
228 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
229 .expect("should have one global task");
230 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
231 })
232 });
233
234 cx.update(|cx| {
235 let all_tasks = project
236 .update(cx, |project, cx| {
237 project.task_inventory().update(cx, |inventory, cx| {
238 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
239 inventory.add_source(
240 global_task_source_kind.clone(),
241 |cx| {
242 cx.new_model(|_| {
243 let source = TestTaskSource {
244 tasks: TaskTemplates(vec![TaskTemplate {
245 label: "cargo check".to_string(),
246 command: "cargo".to_string(),
247 args: vec![
248 "check".to_string(),
249 "--all".to_string(),
250 "--all-targets".to_string(),
251 ],
252 env: HashMap::from_iter(Some((
253 "RUSTFLAGS".to_string(),
254 "-Zunstable-options".to_string(),
255 ))),
256 ..TaskTemplate::default()
257 }]),
258 };
259 Box::new(source) as Box<_>
260 })
261 },
262 cx,
263 );
264 let (mut old, new) = inventory.used_and_current_resolved_tasks(
265 None,
266 Some(workree_id),
267 &task_context,
268 cx,
269 );
270 old.extend(new);
271 old
272 })
273 })
274 .into_iter()
275 .map(|(source_kind, task)| {
276 let resolved = task.resolved.unwrap();
277 (
278 source_kind,
279 task.resolved_label,
280 resolved.args,
281 resolved.env,
282 )
283 })
284 .collect::<Vec<_>>();
285 assert_eq!(
286 all_tasks,
287 vec![
288 (
289 TaskSourceKind::Worktree {
290 id: workree_id,
291 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
292 id_base: "local_tasks_for_worktree",
293 },
294 "cargo check".to_string(),
295 vec!["check".to_string()],
296 HashMap::default(),
297 ),
298 (
299 TaskSourceKind::Worktree {
300 id: workree_id,
301 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
302 id_base: "local_tasks_for_worktree",
303 },
304 "cargo check".to_string(),
305 vec![
306 "check".to_string(),
307 "--all".to_string(),
308 "--all-targets".to_string()
309 ],
310 HashMap::from_iter(Some((
311 "RUSTFLAGS".to_string(),
312 "-Zunstable-options".to_string()
313 ))),
314 ),
315 ]
316 );
317 });
318}
319
320struct TestTaskSource {
321 tasks: TaskTemplates,
322}
323
324impl TaskSource for TestTaskSource {
325 fn as_any(&mut self) -> &mut dyn std::any::Any {
326 self
327 }
328
329 fn tasks_to_schedule(&mut self, _: &mut ModelContext<Box<dyn TaskSource>>) -> TaskTemplates {
330 self.tasks.clone()
331 }
332}
333
334#[gpui::test]
335async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
336 init_test(cx);
337
338 let fs = FakeFs::new(cx.executor());
339 fs.insert_tree(
340 "/the-root",
341 json!({
342 "test.rs": "const A: i32 = 1;",
343 "test2.rs": "",
344 "Cargo.toml": "a = 1",
345 "package.json": "{\"a\": 1}",
346 }),
347 )
348 .await;
349
350 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
352
353 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
354 "Rust",
355 FakeLspAdapter {
356 name: "the-rust-language-server",
357 capabilities: lsp::ServerCapabilities {
358 completion_provider: Some(lsp::CompletionOptions {
359 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
360 ..Default::default()
361 }),
362 ..Default::default()
363 },
364 ..Default::default()
365 },
366 );
367 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
368 "JSON",
369 FakeLspAdapter {
370 name: "the-json-language-server",
371 capabilities: lsp::ServerCapabilities {
372 completion_provider: Some(lsp::CompletionOptions {
373 trigger_characters: Some(vec![":".to_string()]),
374 ..Default::default()
375 }),
376 ..Default::default()
377 },
378 ..Default::default()
379 },
380 );
381
382 // Open a buffer without an associated language server.
383 let toml_buffer = project
384 .update(cx, |project, cx| {
385 project.open_local_buffer("/the-root/Cargo.toml", cx)
386 })
387 .await
388 .unwrap();
389
390 // Open a buffer with an associated language server before the language for it has been loaded.
391 let rust_buffer = project
392 .update(cx, |project, cx| {
393 project.open_local_buffer("/the-root/test.rs", cx)
394 })
395 .await
396 .unwrap();
397 rust_buffer.update(cx, |buffer, _| {
398 assert_eq!(buffer.language().map(|l| l.name()), None);
399 });
400
401 // Now we add the languages to the project, and ensure they get assigned to all
402 // the relevant open buffers.
403 language_registry.add(json_lang());
404 language_registry.add(rust_lang());
405 cx.executor().run_until_parked();
406 rust_buffer.update(cx, |buffer, _| {
407 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
408 });
409
410 // A server is started up, and it is notified about Rust files.
411 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
412 assert_eq!(
413 fake_rust_server
414 .receive_notification::<lsp::notification::DidOpenTextDocument>()
415 .await
416 .text_document,
417 lsp::TextDocumentItem {
418 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
419 version: 0,
420 text: "const A: i32 = 1;".to_string(),
421 language_id: Default::default()
422 }
423 );
424
425 // The buffer is configured based on the language server's capabilities.
426 rust_buffer.update(cx, |buffer, _| {
427 assert_eq!(
428 buffer.completion_triggers(),
429 &[".".to_string(), "::".to_string()]
430 );
431 });
432 toml_buffer.update(cx, |buffer, _| {
433 assert!(buffer.completion_triggers().is_empty());
434 });
435
436 // Edit a buffer. The changes are reported to the language server.
437 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
438 assert_eq!(
439 fake_rust_server
440 .receive_notification::<lsp::notification::DidChangeTextDocument>()
441 .await
442 .text_document,
443 lsp::VersionedTextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
445 1
446 )
447 );
448
449 // Open a third buffer with a different associated language server.
450 let json_buffer = project
451 .update(cx, |project, cx| {
452 project.open_local_buffer("/the-root/package.json", cx)
453 })
454 .await
455 .unwrap();
456
457 // A json language server is started up and is only notified about the json buffer.
458 let mut fake_json_server = fake_json_servers.next().await.unwrap();
459 assert_eq!(
460 fake_json_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
466 version: 0,
467 text: "{\"a\": 1}".to_string(),
468 language_id: Default::default()
469 }
470 );
471
472 // This buffer is configured based on the second language server's
473 // capabilities.
474 json_buffer.update(cx, |buffer, _| {
475 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
476 });
477
478 // When opening another buffer whose language server is already running,
479 // it is also configured based on the existing language server's capabilities.
480 let rust_buffer2 = project
481 .update(cx, |project, cx| {
482 project.open_local_buffer("/the-root/test2.rs", cx)
483 })
484 .await
485 .unwrap();
486 rust_buffer2.update(cx, |buffer, _| {
487 assert_eq!(
488 buffer.completion_triggers(),
489 &[".".to_string(), "::".to_string()]
490 );
491 });
492
493 // Changes are reported only to servers matching the buffer's language.
494 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
495 rust_buffer2.update(cx, |buffer, cx| {
496 buffer.edit([(0..0, "let x = 1;")], None, cx)
497 });
498 assert_eq!(
499 fake_rust_server
500 .receive_notification::<lsp::notification::DidChangeTextDocument>()
501 .await
502 .text_document,
503 lsp::VersionedTextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
505 1
506 )
507 );
508
509 // Save notifications are reported to all servers.
510 project
511 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
512 .await
513 .unwrap();
514 assert_eq!(
515 fake_rust_server
516 .receive_notification::<lsp::notification::DidSaveTextDocument>()
517 .await
518 .text_document,
519 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
520 );
521 assert_eq!(
522 fake_json_server
523 .receive_notification::<lsp::notification::DidSaveTextDocument>()
524 .await
525 .text_document,
526 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
527 );
528
529 // Renames are reported only to servers matching the buffer's language.
530 fs.rename(
531 Path::new("/the-root/test2.rs"),
532 Path::new("/the-root/test3.rs"),
533 Default::default(),
534 )
535 .await
536 .unwrap();
537 assert_eq!(
538 fake_rust_server
539 .receive_notification::<lsp::notification::DidCloseTextDocument>()
540 .await
541 .text_document,
542 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
543 );
544 assert_eq!(
545 fake_rust_server
546 .receive_notification::<lsp::notification::DidOpenTextDocument>()
547 .await
548 .text_document,
549 lsp::TextDocumentItem {
550 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
551 version: 0,
552 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
553 language_id: Default::default()
554 },
555 );
556
557 rust_buffer2.update(cx, |buffer, cx| {
558 buffer.update_diagnostics(
559 LanguageServerId(0),
560 DiagnosticSet::from_sorted_entries(
561 vec![DiagnosticEntry {
562 diagnostic: Default::default(),
563 range: Anchor::MIN..Anchor::MAX,
564 }],
565 &buffer.snapshot(),
566 ),
567 cx,
568 );
569 assert_eq!(
570 buffer
571 .snapshot()
572 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
573 .count(),
574 1
575 );
576 });
577
578 // When the rename changes the extension of the file, the buffer gets closed on the old
579 // language server and gets opened on the new one.
580 fs.rename(
581 Path::new("/the-root/test3.rs"),
582 Path::new("/the-root/test3.json"),
583 Default::default(),
584 )
585 .await
586 .unwrap();
587 assert_eq!(
588 fake_rust_server
589 .receive_notification::<lsp::notification::DidCloseTextDocument>()
590 .await
591 .text_document,
592 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
593 );
594 assert_eq!(
595 fake_json_server
596 .receive_notification::<lsp::notification::DidOpenTextDocument>()
597 .await
598 .text_document,
599 lsp::TextDocumentItem {
600 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
601 version: 0,
602 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
603 language_id: Default::default()
604 },
605 );
606
607 // We clear the diagnostics, since the language has changed.
608 rust_buffer2.update(cx, |buffer, _| {
609 assert_eq!(
610 buffer
611 .snapshot()
612 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
613 .count(),
614 0
615 );
616 });
617
618 // The renamed file's version resets after changing language server.
619 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
620 assert_eq!(
621 fake_json_server
622 .receive_notification::<lsp::notification::DidChangeTextDocument>()
623 .await
624 .text_document,
625 lsp::VersionedTextDocumentIdentifier::new(
626 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
627 1
628 )
629 );
630
631 // Restart language servers
632 project.update(cx, |project, cx| {
633 project.restart_language_servers_for_buffers(
634 vec![rust_buffer.clone(), json_buffer.clone()],
635 cx,
636 );
637 });
638
639 let mut rust_shutdown_requests = fake_rust_server
640 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
641 let mut json_shutdown_requests = fake_json_server
642 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
643 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
644
645 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
646 let mut fake_json_server = fake_json_servers.next().await.unwrap();
647
648 // Ensure rust document is reopened in new rust language server
649 assert_eq!(
650 fake_rust_server
651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
652 .await
653 .text_document,
654 lsp::TextDocumentItem {
655 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
656 version: 0,
657 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
658 language_id: Default::default()
659 }
660 );
661
662 // Ensure json documents are reopened in new json language server
663 assert_set_eq!(
664 [
665 fake_json_server
666 .receive_notification::<lsp::notification::DidOpenTextDocument>()
667 .await
668 .text_document,
669 fake_json_server
670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
671 .await
672 .text_document,
673 ],
674 [
675 lsp::TextDocumentItem {
676 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
677 version: 0,
678 text: json_buffer.update(cx, |buffer, _| buffer.text()),
679 language_id: Default::default()
680 },
681 lsp::TextDocumentItem {
682 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 version: 0,
684 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
685 language_id: Default::default()
686 }
687 ]
688 );
689
690 // Close notifications are reported only to servers matching the buffer's language.
691 cx.update(|_| drop(json_buffer));
692 let close_message = lsp::DidCloseTextDocumentParams {
693 text_document: lsp::TextDocumentIdentifier::new(
694 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
695 ),
696 };
697 assert_eq!(
698 fake_json_server
699 .receive_notification::<lsp::notification::DidCloseTextDocument>()
700 .await,
701 close_message,
702 );
703}
704
705#[gpui::test]
706async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
707 init_test(cx);
708
709 let fs = FakeFs::new(cx.executor());
710 fs.insert_tree(
711 "/the-root",
712 json!({
713 ".gitignore": "target\n",
714 "src": {
715 "a.rs": "",
716 "b.rs": "",
717 },
718 "target": {
719 "x": {
720 "out": {
721 "x.rs": ""
722 }
723 },
724 "y": {
725 "out": {
726 "y.rs": "",
727 }
728 },
729 "z": {
730 "out": {
731 "z.rs": ""
732 }
733 }
734 }
735 }),
736 )
737 .await;
738
739 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
740 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
741 language_registry.add(rust_lang());
742 let mut fake_servers = language_registry.register_fake_lsp_adapter(
743 "Rust",
744 FakeLspAdapter {
745 name: "the-language-server",
746 ..Default::default()
747 },
748 );
749
750 cx.executor().run_until_parked();
751
752 // Start the language server by opening a buffer with a compatible file extension.
753 let _buffer = project
754 .update(cx, |project, cx| {
755 project.open_local_buffer("/the-root/src/a.rs", cx)
756 })
757 .await
758 .unwrap();
759
760 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
761 project.update(cx, |project, cx| {
762 let worktree = project.worktrees().next().unwrap();
763 assert_eq!(
764 worktree
765 .read(cx)
766 .snapshot()
767 .entries(true)
768 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
769 .collect::<Vec<_>>(),
770 &[
771 (Path::new(""), false),
772 (Path::new(".gitignore"), false),
773 (Path::new("src"), false),
774 (Path::new("src/a.rs"), false),
775 (Path::new("src/b.rs"), false),
776 (Path::new("target"), true),
777 ]
778 );
779 });
780
781 let prev_read_dir_count = fs.read_dir_call_count();
782
783 // Keep track of the FS events reported to the language server.
784 let fake_server = fake_servers.next().await.unwrap();
785 let file_changes = Arc::new(Mutex::new(Vec::new()));
786 fake_server
787 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
788 registrations: vec![lsp::Registration {
789 id: Default::default(),
790 method: "workspace/didChangeWatchedFiles".to_string(),
791 register_options: serde_json::to_value(
792 lsp::DidChangeWatchedFilesRegistrationOptions {
793 watchers: vec![
794 lsp::FileSystemWatcher {
795 glob_pattern: lsp::GlobPattern::String(
796 "/the-root/Cargo.toml".to_string(),
797 ),
798 kind: None,
799 },
800 lsp::FileSystemWatcher {
801 glob_pattern: lsp::GlobPattern::String(
802 "/the-root/src/*.{rs,c}".to_string(),
803 ),
804 kind: None,
805 },
806 lsp::FileSystemWatcher {
807 glob_pattern: lsp::GlobPattern::String(
808 "/the-root/target/y/**/*.rs".to_string(),
809 ),
810 kind: None,
811 },
812 ],
813 },
814 )
815 .ok(),
816 }],
817 })
818 .await
819 .unwrap();
820 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
821 let file_changes = file_changes.clone();
822 move |params, _| {
823 let mut file_changes = file_changes.lock();
824 file_changes.extend(params.changes);
825 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
826 }
827 });
828
829 cx.executor().run_until_parked();
830 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
831 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
832
833 // Now the language server has asked us to watch an ignored directory path,
834 // so we recursively load it.
835 project.update(cx, |project, cx| {
836 let worktree = project.worktrees().next().unwrap();
837 assert_eq!(
838 worktree
839 .read(cx)
840 .snapshot()
841 .entries(true)
842 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
843 .collect::<Vec<_>>(),
844 &[
845 (Path::new(""), false),
846 (Path::new(".gitignore"), false),
847 (Path::new("src"), false),
848 (Path::new("src/a.rs"), false),
849 (Path::new("src/b.rs"), false),
850 (Path::new("target"), true),
851 (Path::new("target/x"), true),
852 (Path::new("target/y"), true),
853 (Path::new("target/y/out"), true),
854 (Path::new("target/y/out/y.rs"), true),
855 (Path::new("target/z"), true),
856 ]
857 );
858 });
859
860 // Perform some file system mutations, two of which match the watched patterns,
861 // and one of which does not.
862 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
863 .await
864 .unwrap();
865 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
866 .await
867 .unwrap();
868 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
869 .await
870 .unwrap();
871 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
872 .await
873 .unwrap();
874 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
875 .await
876 .unwrap();
877
878 // The language server receives events for the FS mutations that match its watch patterns.
879 cx.executor().run_until_parked();
880 assert_eq!(
881 &*file_changes.lock(),
882 &[
883 lsp::FileEvent {
884 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
885 typ: lsp::FileChangeType::DELETED,
886 },
887 lsp::FileEvent {
888 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
889 typ: lsp::FileChangeType::CREATED,
890 },
891 lsp::FileEvent {
892 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
893 typ: lsp::FileChangeType::CREATED,
894 },
895 ]
896 );
897}
898
899#[gpui::test]
900async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
901 init_test(cx);
902
903 let fs = FakeFs::new(cx.executor());
904 fs.insert_tree(
905 "/dir",
906 json!({
907 "a.rs": "let a = 1;",
908 "b.rs": "let b = 2;"
909 }),
910 )
911 .await;
912
913 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
914
915 let buffer_a = project
916 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
917 .await
918 .unwrap();
919 let buffer_b = project
920 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
921 .await
922 .unwrap();
923
924 project.update(cx, |project, cx| {
925 project
926 .update_diagnostics(
927 LanguageServerId(0),
928 lsp::PublishDiagnosticsParams {
929 uri: Url::from_file_path("/dir/a.rs").unwrap(),
930 version: None,
931 diagnostics: vec![lsp::Diagnostic {
932 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
933 severity: Some(lsp::DiagnosticSeverity::ERROR),
934 message: "error 1".to_string(),
935 ..Default::default()
936 }],
937 },
938 &[],
939 cx,
940 )
941 .unwrap();
942 project
943 .update_diagnostics(
944 LanguageServerId(0),
945 lsp::PublishDiagnosticsParams {
946 uri: Url::from_file_path("/dir/b.rs").unwrap(),
947 version: None,
948 diagnostics: vec![lsp::Diagnostic {
949 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
950 severity: Some(lsp::DiagnosticSeverity::WARNING),
951 message: "error 2".to_string(),
952 ..Default::default()
953 }],
954 },
955 &[],
956 cx,
957 )
958 .unwrap();
959 });
960
961 buffer_a.update(cx, |buffer, _| {
962 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
963 assert_eq!(
964 chunks
965 .iter()
966 .map(|(s, d)| (s.as_str(), *d))
967 .collect::<Vec<_>>(),
968 &[
969 ("let ", None),
970 ("a", Some(DiagnosticSeverity::ERROR)),
971 (" = 1;", None),
972 ]
973 );
974 });
975 buffer_b.update(cx, |buffer, _| {
976 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
977 assert_eq!(
978 chunks
979 .iter()
980 .map(|(s, d)| (s.as_str(), *d))
981 .collect::<Vec<_>>(),
982 &[
983 ("let ", None),
984 ("b", Some(DiagnosticSeverity::WARNING)),
985 (" = 2;", None),
986 ]
987 );
988 });
989}
990
991#[gpui::test]
992async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
993 init_test(cx);
994
995 let fs = FakeFs::new(cx.executor());
996 fs.insert_tree(
997 "/root",
998 json!({
999 "dir": {
1000 ".git": {
1001 "HEAD": "ref: refs/heads/main",
1002 },
1003 ".gitignore": "b.rs",
1004 "a.rs": "let a = 1;",
1005 "b.rs": "let b = 2;",
1006 },
1007 "other.rs": "let b = c;"
1008 }),
1009 )
1010 .await;
1011
1012 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1013 let (worktree, _) = project
1014 .update(cx, |project, cx| {
1015 project.find_or_create_local_worktree("/root/dir", true, cx)
1016 })
1017 .await
1018 .unwrap();
1019 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1020
1021 let (worktree, _) = project
1022 .update(cx, |project, cx| {
1023 project.find_or_create_local_worktree("/root/other.rs", false, cx)
1024 })
1025 .await
1026 .unwrap();
1027 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1028
1029 let server_id = LanguageServerId(0);
1030 project.update(cx, |project, cx| {
1031 project
1032 .update_diagnostics(
1033 server_id,
1034 lsp::PublishDiagnosticsParams {
1035 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1036 version: None,
1037 diagnostics: vec![lsp::Diagnostic {
1038 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1039 severity: Some(lsp::DiagnosticSeverity::ERROR),
1040 message: "unused variable 'b'".to_string(),
1041 ..Default::default()
1042 }],
1043 },
1044 &[],
1045 cx,
1046 )
1047 .unwrap();
1048 project
1049 .update_diagnostics(
1050 server_id,
1051 lsp::PublishDiagnosticsParams {
1052 uri: Url::from_file_path("/root/other.rs").unwrap(),
1053 version: None,
1054 diagnostics: vec![lsp::Diagnostic {
1055 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1056 severity: Some(lsp::DiagnosticSeverity::ERROR),
1057 message: "unknown variable 'c'".to_string(),
1058 ..Default::default()
1059 }],
1060 },
1061 &[],
1062 cx,
1063 )
1064 .unwrap();
1065 });
1066
1067 let main_ignored_buffer = project
1068 .update(cx, |project, cx| {
1069 project.open_buffer((main_worktree_id, "b.rs"), cx)
1070 })
1071 .await
1072 .unwrap();
1073 main_ignored_buffer.update(cx, |buffer, _| {
1074 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1075 assert_eq!(
1076 chunks
1077 .iter()
1078 .map(|(s, d)| (s.as_str(), *d))
1079 .collect::<Vec<_>>(),
1080 &[
1081 ("let ", None),
1082 ("b", Some(DiagnosticSeverity::ERROR)),
1083 (" = 2;", None),
1084 ],
1085 "Gigitnored buffers should still get in-buffer diagnostics",
1086 );
1087 });
1088 let other_buffer = project
1089 .update(cx, |project, cx| {
1090 project.open_buffer((other_worktree_id, ""), cx)
1091 })
1092 .await
1093 .unwrap();
1094 other_buffer.update(cx, |buffer, _| {
1095 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1096 assert_eq!(
1097 chunks
1098 .iter()
1099 .map(|(s, d)| (s.as_str(), *d))
1100 .collect::<Vec<_>>(),
1101 &[
1102 ("let b = ", None),
1103 ("c", Some(DiagnosticSeverity::ERROR)),
1104 (";", None),
1105 ],
1106 "Buffers from hidden projects should still get in-buffer diagnostics"
1107 );
1108 });
1109
1110 project.update(cx, |project, cx| {
1111 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1112 assert_eq!(
1113 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1114 vec![(
1115 ProjectPath {
1116 worktree_id: main_worktree_id,
1117 path: Arc::from(Path::new("b.rs")),
1118 },
1119 server_id,
1120 DiagnosticSummary {
1121 error_count: 1,
1122 warning_count: 0,
1123 }
1124 )]
1125 );
1126 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1127 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1128 });
1129}
1130
1131#[gpui::test]
1132async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1133 init_test(cx);
1134
1135 let progress_token = "the-progress-token";
1136
1137 let fs = FakeFs::new(cx.executor());
1138 fs.insert_tree(
1139 "/dir",
1140 json!({
1141 "a.rs": "fn a() { A }",
1142 "b.rs": "const y: i32 = 1",
1143 }),
1144 )
1145 .await;
1146
1147 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1148 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1149
1150 language_registry.add(rust_lang());
1151 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1152 "Rust",
1153 FakeLspAdapter {
1154 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1155 disk_based_diagnostics_sources: vec!["disk".into()],
1156 ..Default::default()
1157 },
1158 );
1159
1160 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1161
1162 // Cause worktree to start the fake language server
1163 let _buffer = project
1164 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1165 .await
1166 .unwrap();
1167
1168 let mut events = cx.events(&project);
1169
1170 let fake_server = fake_servers.next().await.unwrap();
1171 assert_eq!(
1172 events.next().await.unwrap(),
1173 Event::LanguageServerAdded(LanguageServerId(0)),
1174 );
1175
1176 fake_server
1177 .start_progress(format!("{}/0", progress_token))
1178 .await;
1179 assert_eq!(
1180 events.next().await.unwrap(),
1181 Event::DiskBasedDiagnosticsStarted {
1182 language_server_id: LanguageServerId(0),
1183 }
1184 );
1185
1186 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1187 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1188 version: None,
1189 diagnostics: vec![lsp::Diagnostic {
1190 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1191 severity: Some(lsp::DiagnosticSeverity::ERROR),
1192 message: "undefined variable 'A'".to_string(),
1193 ..Default::default()
1194 }],
1195 });
1196 assert_eq!(
1197 events.next().await.unwrap(),
1198 Event::DiagnosticsUpdated {
1199 language_server_id: LanguageServerId(0),
1200 path: (worktree_id, Path::new("a.rs")).into()
1201 }
1202 );
1203
1204 fake_server.end_progress(format!("{}/0", progress_token));
1205 assert_eq!(
1206 events.next().await.unwrap(),
1207 Event::DiskBasedDiagnosticsFinished {
1208 language_server_id: LanguageServerId(0)
1209 }
1210 );
1211
1212 let buffer = project
1213 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1214 .await
1215 .unwrap();
1216
1217 buffer.update(cx, |buffer, _| {
1218 let snapshot = buffer.snapshot();
1219 let diagnostics = snapshot
1220 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1221 .collect::<Vec<_>>();
1222 assert_eq!(
1223 diagnostics,
1224 &[DiagnosticEntry {
1225 range: Point::new(0, 9)..Point::new(0, 10),
1226 diagnostic: Diagnostic {
1227 severity: lsp::DiagnosticSeverity::ERROR,
1228 message: "undefined variable 'A'".to_string(),
1229 group_id: 0,
1230 is_primary: true,
1231 ..Default::default()
1232 }
1233 }]
1234 )
1235 });
1236
1237 // Ensure publishing empty diagnostics twice only results in one update event.
1238 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1239 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1240 version: None,
1241 diagnostics: Default::default(),
1242 });
1243 assert_eq!(
1244 events.next().await.unwrap(),
1245 Event::DiagnosticsUpdated {
1246 language_server_id: LanguageServerId(0),
1247 path: (worktree_id, Path::new("a.rs")).into()
1248 }
1249 );
1250
1251 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1252 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1253 version: None,
1254 diagnostics: Default::default(),
1255 });
1256 cx.executor().run_until_parked();
1257 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1258}
1259
1260#[gpui::test]
1261async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1262 init_test(cx);
1263
1264 let progress_token = "the-progress-token";
1265
1266 let fs = FakeFs::new(cx.executor());
1267 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1268
1269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1270
1271 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1272 language_registry.add(rust_lang());
1273 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1274 "Rust",
1275 FakeLspAdapter {
1276 name: "the-language-server",
1277 disk_based_diagnostics_sources: vec!["disk".into()],
1278 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1279 ..Default::default()
1280 },
1281 );
1282
1283 let buffer = project
1284 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1285 .await
1286 .unwrap();
1287
1288 // Simulate diagnostics starting to update.
1289 let fake_server = fake_servers.next().await.unwrap();
1290 fake_server.start_progress(progress_token).await;
1291
1292 // Restart the server before the diagnostics finish updating.
1293 project.update(cx, |project, cx| {
1294 project.restart_language_servers_for_buffers([buffer], cx);
1295 });
1296 let mut events = cx.events(&project);
1297
1298 // Simulate the newly started server sending more diagnostics.
1299 let fake_server = fake_servers.next().await.unwrap();
1300 assert_eq!(
1301 events.next().await.unwrap(),
1302 Event::LanguageServerAdded(LanguageServerId(1))
1303 );
1304 fake_server.start_progress(progress_token).await;
1305 assert_eq!(
1306 events.next().await.unwrap(),
1307 Event::DiskBasedDiagnosticsStarted {
1308 language_server_id: LanguageServerId(1)
1309 }
1310 );
1311 project.update(cx, |project, _| {
1312 assert_eq!(
1313 project
1314 .language_servers_running_disk_based_diagnostics()
1315 .collect::<Vec<_>>(),
1316 [LanguageServerId(1)]
1317 );
1318 });
1319
1320 // All diagnostics are considered done, despite the old server's diagnostic
1321 // task never completing.
1322 fake_server.end_progress(progress_token);
1323 assert_eq!(
1324 events.next().await.unwrap(),
1325 Event::DiskBasedDiagnosticsFinished {
1326 language_server_id: LanguageServerId(1)
1327 }
1328 );
1329 project.update(cx, |project, _| {
1330 assert_eq!(
1331 project
1332 .language_servers_running_disk_based_diagnostics()
1333 .collect::<Vec<_>>(),
1334 [LanguageServerId(0); 0]
1335 );
1336 });
1337}
1338
1339#[gpui::test]
1340async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1341 init_test(cx);
1342
1343 let fs = FakeFs::new(cx.executor());
1344 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1345
1346 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1347
1348 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1349 language_registry.add(rust_lang());
1350 let mut fake_servers =
1351 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1352
1353 let buffer = project
1354 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1355 .await
1356 .unwrap();
1357
1358 // Publish diagnostics
1359 let fake_server = fake_servers.next().await.unwrap();
1360 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1361 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1362 version: None,
1363 diagnostics: vec![lsp::Diagnostic {
1364 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1365 severity: Some(lsp::DiagnosticSeverity::ERROR),
1366 message: "the message".to_string(),
1367 ..Default::default()
1368 }],
1369 });
1370
1371 cx.executor().run_until_parked();
1372 buffer.update(cx, |buffer, _| {
1373 assert_eq!(
1374 buffer
1375 .snapshot()
1376 .diagnostics_in_range::<_, usize>(0..1, false)
1377 .map(|entry| entry.diagnostic.message.clone())
1378 .collect::<Vec<_>>(),
1379 ["the message".to_string()]
1380 );
1381 });
1382 project.update(cx, |project, cx| {
1383 assert_eq!(
1384 project.diagnostic_summary(false, cx),
1385 DiagnosticSummary {
1386 error_count: 1,
1387 warning_count: 0,
1388 }
1389 );
1390 });
1391
1392 project.update(cx, |project, cx| {
1393 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1394 });
1395
1396 // The diagnostics are cleared.
1397 cx.executor().run_until_parked();
1398 buffer.update(cx, |buffer, _| {
1399 assert_eq!(
1400 buffer
1401 .snapshot()
1402 .diagnostics_in_range::<_, usize>(0..1, false)
1403 .map(|entry| entry.diagnostic.message.clone())
1404 .collect::<Vec<_>>(),
1405 Vec::<String>::new(),
1406 );
1407 });
1408 project.update(cx, |project, cx| {
1409 assert_eq!(
1410 project.diagnostic_summary(false, cx),
1411 DiagnosticSummary {
1412 error_count: 0,
1413 warning_count: 0,
1414 }
1415 );
1416 });
1417}
1418
1419#[gpui::test]
1420async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1421 init_test(cx);
1422
1423 let fs = FakeFs::new(cx.executor());
1424 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1425
1426 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1427 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1428
1429 language_registry.add(rust_lang());
1430 let mut fake_servers =
1431 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1432
1433 let buffer = project
1434 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1435 .await
1436 .unwrap();
1437
1438 // Before restarting the server, report diagnostics with an unknown buffer version.
1439 let fake_server = fake_servers.next().await.unwrap();
1440 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1441 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1442 version: Some(10000),
1443 diagnostics: Vec::new(),
1444 });
1445 cx.executor().run_until_parked();
1446
1447 project.update(cx, |project, cx| {
1448 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1449 });
1450 let mut fake_server = fake_servers.next().await.unwrap();
1451 let notification = fake_server
1452 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1453 .await
1454 .text_document;
1455 assert_eq!(notification.version, 0);
1456}
1457
1458#[gpui::test]
1459async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1460 init_test(cx);
1461
1462 let fs = FakeFs::new(cx.executor());
1463 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1464 .await;
1465
1466 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1468
1469 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1470 "Rust",
1471 FakeLspAdapter {
1472 name: "rust-lsp",
1473 ..Default::default()
1474 },
1475 );
1476 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1477 "JavaScript",
1478 FakeLspAdapter {
1479 name: "js-lsp",
1480 ..Default::default()
1481 },
1482 );
1483 language_registry.add(rust_lang());
1484 language_registry.add(js_lang());
1485
1486 let _rs_buffer = project
1487 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1488 .await
1489 .unwrap();
1490 let _js_buffer = project
1491 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1492 .await
1493 .unwrap();
1494
1495 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1496 assert_eq!(
1497 fake_rust_server_1
1498 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1499 .await
1500 .text_document
1501 .uri
1502 .as_str(),
1503 "file:///dir/a.rs"
1504 );
1505
1506 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1507 assert_eq!(
1508 fake_js_server
1509 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1510 .await
1511 .text_document
1512 .uri
1513 .as_str(),
1514 "file:///dir/b.js"
1515 );
1516
1517 // Disable Rust language server, ensuring only that server gets stopped.
1518 cx.update(|cx| {
1519 cx.update_global(|settings: &mut SettingsStore, cx| {
1520 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1521 settings.languages.insert(
1522 Arc::from("Rust"),
1523 LanguageSettingsContent {
1524 enable_language_server: Some(false),
1525 ..Default::default()
1526 },
1527 );
1528 });
1529 })
1530 });
1531 fake_rust_server_1
1532 .receive_notification::<lsp::notification::Exit>()
1533 .await;
1534
1535 // Enable Rust and disable JavaScript language servers, ensuring that the
1536 // former gets started again and that the latter stops.
1537 cx.update(|cx| {
1538 cx.update_global(|settings: &mut SettingsStore, cx| {
1539 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1540 settings.languages.insert(
1541 Arc::from("Rust"),
1542 LanguageSettingsContent {
1543 enable_language_server: Some(true),
1544 ..Default::default()
1545 },
1546 );
1547 settings.languages.insert(
1548 Arc::from("JavaScript"),
1549 LanguageSettingsContent {
1550 enable_language_server: Some(false),
1551 ..Default::default()
1552 },
1553 );
1554 });
1555 })
1556 });
1557 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1558 assert_eq!(
1559 fake_rust_server_2
1560 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1561 .await
1562 .text_document
1563 .uri
1564 .as_str(),
1565 "file:///dir/a.rs"
1566 );
1567 fake_js_server
1568 .receive_notification::<lsp::notification::Exit>()
1569 .await;
1570}
1571
1572#[gpui::test(iterations = 3)]
1573async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1574 init_test(cx);
1575
1576 let text = "
1577 fn a() { A }
1578 fn b() { BB }
1579 fn c() { CCC }
1580 "
1581 .unindent();
1582
1583 let fs = FakeFs::new(cx.executor());
1584 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1585
1586 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1587 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1588
1589 language_registry.add(rust_lang());
1590 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1591 "Rust",
1592 FakeLspAdapter {
1593 disk_based_diagnostics_sources: vec!["disk".into()],
1594 ..Default::default()
1595 },
1596 );
1597
1598 let buffer = project
1599 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1600 .await
1601 .unwrap();
1602
1603 let mut fake_server = fake_servers.next().await.unwrap();
1604 let open_notification = fake_server
1605 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1606 .await;
1607
1608 // Edit the buffer, moving the content down
1609 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1610 let change_notification_1 = fake_server
1611 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1612 .await;
1613 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1614
1615 // Report some diagnostics for the initial version of the buffer
1616 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1617 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1618 version: Some(open_notification.text_document.version),
1619 diagnostics: vec![
1620 lsp::Diagnostic {
1621 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1622 severity: Some(DiagnosticSeverity::ERROR),
1623 message: "undefined variable 'A'".to_string(),
1624 source: Some("disk".to_string()),
1625 ..Default::default()
1626 },
1627 lsp::Diagnostic {
1628 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1629 severity: Some(DiagnosticSeverity::ERROR),
1630 message: "undefined variable 'BB'".to_string(),
1631 source: Some("disk".to_string()),
1632 ..Default::default()
1633 },
1634 lsp::Diagnostic {
1635 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1636 severity: Some(DiagnosticSeverity::ERROR),
1637 source: Some("disk".to_string()),
1638 message: "undefined variable 'CCC'".to_string(),
1639 ..Default::default()
1640 },
1641 ],
1642 });
1643
1644 // The diagnostics have moved down since they were created.
1645 cx.executor().run_until_parked();
1646 buffer.update(cx, |buffer, _| {
1647 assert_eq!(
1648 buffer
1649 .snapshot()
1650 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1651 .collect::<Vec<_>>(),
1652 &[
1653 DiagnosticEntry {
1654 range: Point::new(3, 9)..Point::new(3, 11),
1655 diagnostic: Diagnostic {
1656 source: Some("disk".into()),
1657 severity: DiagnosticSeverity::ERROR,
1658 message: "undefined variable 'BB'".to_string(),
1659 is_disk_based: true,
1660 group_id: 1,
1661 is_primary: true,
1662 ..Default::default()
1663 },
1664 },
1665 DiagnosticEntry {
1666 range: Point::new(4, 9)..Point::new(4, 12),
1667 diagnostic: Diagnostic {
1668 source: Some("disk".into()),
1669 severity: DiagnosticSeverity::ERROR,
1670 message: "undefined variable 'CCC'".to_string(),
1671 is_disk_based: true,
1672 group_id: 2,
1673 is_primary: true,
1674 ..Default::default()
1675 }
1676 }
1677 ]
1678 );
1679 assert_eq!(
1680 chunks_with_diagnostics(buffer, 0..buffer.len()),
1681 [
1682 ("\n\nfn a() { ".to_string(), None),
1683 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1684 (" }\nfn b() { ".to_string(), None),
1685 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1686 (" }\nfn c() { ".to_string(), None),
1687 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1688 (" }\n".to_string(), None),
1689 ]
1690 );
1691 assert_eq!(
1692 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1693 [
1694 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1695 (" }\nfn c() { ".to_string(), None),
1696 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1697 ]
1698 );
1699 });
1700
1701 // Ensure overlapping diagnostics are highlighted correctly.
1702 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1703 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1704 version: Some(open_notification.text_document.version),
1705 diagnostics: vec![
1706 lsp::Diagnostic {
1707 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1708 severity: Some(DiagnosticSeverity::ERROR),
1709 message: "undefined variable 'A'".to_string(),
1710 source: Some("disk".to_string()),
1711 ..Default::default()
1712 },
1713 lsp::Diagnostic {
1714 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1715 severity: Some(DiagnosticSeverity::WARNING),
1716 message: "unreachable statement".to_string(),
1717 source: Some("disk".to_string()),
1718 ..Default::default()
1719 },
1720 ],
1721 });
1722
1723 cx.executor().run_until_parked();
1724 buffer.update(cx, |buffer, _| {
1725 assert_eq!(
1726 buffer
1727 .snapshot()
1728 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1729 .collect::<Vec<_>>(),
1730 &[
1731 DiagnosticEntry {
1732 range: Point::new(2, 9)..Point::new(2, 12),
1733 diagnostic: Diagnostic {
1734 source: Some("disk".into()),
1735 severity: DiagnosticSeverity::WARNING,
1736 message: "unreachable statement".to_string(),
1737 is_disk_based: true,
1738 group_id: 4,
1739 is_primary: true,
1740 ..Default::default()
1741 }
1742 },
1743 DiagnosticEntry {
1744 range: Point::new(2, 9)..Point::new(2, 10),
1745 diagnostic: Diagnostic {
1746 source: Some("disk".into()),
1747 severity: DiagnosticSeverity::ERROR,
1748 message: "undefined variable 'A'".to_string(),
1749 is_disk_based: true,
1750 group_id: 3,
1751 is_primary: true,
1752 ..Default::default()
1753 },
1754 }
1755 ]
1756 );
1757 assert_eq!(
1758 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1759 [
1760 ("fn a() { ".to_string(), None),
1761 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1762 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1763 ("\n".to_string(), None),
1764 ]
1765 );
1766 assert_eq!(
1767 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1768 [
1769 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1770 ("\n".to_string(), None),
1771 ]
1772 );
1773 });
1774
1775 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1776 // changes since the last save.
1777 buffer.update(cx, |buffer, cx| {
1778 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1779 buffer.edit(
1780 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1781 None,
1782 cx,
1783 );
1784 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1785 });
1786 let change_notification_2 = fake_server
1787 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1788 .await;
1789 assert!(
1790 change_notification_2.text_document.version > change_notification_1.text_document.version
1791 );
1792
1793 // Handle out-of-order diagnostics
1794 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1795 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1796 version: Some(change_notification_2.text_document.version),
1797 diagnostics: vec![
1798 lsp::Diagnostic {
1799 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1800 severity: Some(DiagnosticSeverity::ERROR),
1801 message: "undefined variable 'BB'".to_string(),
1802 source: Some("disk".to_string()),
1803 ..Default::default()
1804 },
1805 lsp::Diagnostic {
1806 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1807 severity: Some(DiagnosticSeverity::WARNING),
1808 message: "undefined variable 'A'".to_string(),
1809 source: Some("disk".to_string()),
1810 ..Default::default()
1811 },
1812 ],
1813 });
1814
1815 cx.executor().run_until_parked();
1816 buffer.update(cx, |buffer, _| {
1817 assert_eq!(
1818 buffer
1819 .snapshot()
1820 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1821 .collect::<Vec<_>>(),
1822 &[
1823 DiagnosticEntry {
1824 range: Point::new(2, 21)..Point::new(2, 22),
1825 diagnostic: Diagnostic {
1826 source: Some("disk".into()),
1827 severity: DiagnosticSeverity::WARNING,
1828 message: "undefined variable 'A'".to_string(),
1829 is_disk_based: true,
1830 group_id: 6,
1831 is_primary: true,
1832 ..Default::default()
1833 }
1834 },
1835 DiagnosticEntry {
1836 range: Point::new(3, 9)..Point::new(3, 14),
1837 diagnostic: Diagnostic {
1838 source: Some("disk".into()),
1839 severity: DiagnosticSeverity::ERROR,
1840 message: "undefined variable 'BB'".to_string(),
1841 is_disk_based: true,
1842 group_id: 5,
1843 is_primary: true,
1844 ..Default::default()
1845 },
1846 }
1847 ]
1848 );
1849 });
1850}
1851
1852#[gpui::test]
1853async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1854 init_test(cx);
1855
1856 let text = concat!(
1857 "let one = ;\n", //
1858 "let two = \n",
1859 "let three = 3;\n",
1860 );
1861
1862 let fs = FakeFs::new(cx.executor());
1863 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1864
1865 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1866 let buffer = project
1867 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1868 .await
1869 .unwrap();
1870
1871 project.update(cx, |project, cx| {
1872 project
1873 .update_buffer_diagnostics(
1874 &buffer,
1875 LanguageServerId(0),
1876 None,
1877 vec![
1878 DiagnosticEntry {
1879 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1880 diagnostic: Diagnostic {
1881 severity: DiagnosticSeverity::ERROR,
1882 message: "syntax error 1".to_string(),
1883 ..Default::default()
1884 },
1885 },
1886 DiagnosticEntry {
1887 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1888 diagnostic: Diagnostic {
1889 severity: DiagnosticSeverity::ERROR,
1890 message: "syntax error 2".to_string(),
1891 ..Default::default()
1892 },
1893 },
1894 ],
1895 cx,
1896 )
1897 .unwrap();
1898 });
1899
1900 // An empty range is extended forward to include the following character.
1901 // At the end of a line, an empty range is extended backward to include
1902 // the preceding character.
1903 buffer.update(cx, |buffer, _| {
1904 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1905 assert_eq!(
1906 chunks
1907 .iter()
1908 .map(|(s, d)| (s.as_str(), *d))
1909 .collect::<Vec<_>>(),
1910 &[
1911 ("let one = ", None),
1912 (";", Some(DiagnosticSeverity::ERROR)),
1913 ("\nlet two =", None),
1914 (" ", Some(DiagnosticSeverity::ERROR)),
1915 ("\nlet three = 3;\n", None)
1916 ]
1917 );
1918 });
1919}
1920
1921#[gpui::test]
1922async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1923 init_test(cx);
1924
1925 let fs = FakeFs::new(cx.executor());
1926 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1927 .await;
1928
1929 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1930
1931 project.update(cx, |project, cx| {
1932 project
1933 .update_diagnostic_entries(
1934 LanguageServerId(0),
1935 Path::new("/dir/a.rs").to_owned(),
1936 None,
1937 vec![DiagnosticEntry {
1938 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1939 diagnostic: Diagnostic {
1940 severity: DiagnosticSeverity::ERROR,
1941 is_primary: true,
1942 message: "syntax error a1".to_string(),
1943 ..Default::default()
1944 },
1945 }],
1946 cx,
1947 )
1948 .unwrap();
1949 project
1950 .update_diagnostic_entries(
1951 LanguageServerId(1),
1952 Path::new("/dir/a.rs").to_owned(),
1953 None,
1954 vec![DiagnosticEntry {
1955 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1956 diagnostic: Diagnostic {
1957 severity: DiagnosticSeverity::ERROR,
1958 is_primary: true,
1959 message: "syntax error b1".to_string(),
1960 ..Default::default()
1961 },
1962 }],
1963 cx,
1964 )
1965 .unwrap();
1966
1967 assert_eq!(
1968 project.diagnostic_summary(false, cx),
1969 DiagnosticSummary {
1970 error_count: 2,
1971 warning_count: 0,
1972 }
1973 );
1974 });
1975}
1976
1977#[gpui::test]
1978async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1979 init_test(cx);
1980
1981 let text = "
1982 fn a() {
1983 f1();
1984 }
1985 fn b() {
1986 f2();
1987 }
1988 fn c() {
1989 f3();
1990 }
1991 "
1992 .unindent();
1993
1994 let fs = FakeFs::new(cx.executor());
1995 fs.insert_tree(
1996 "/dir",
1997 json!({
1998 "a.rs": text.clone(),
1999 }),
2000 )
2001 .await;
2002
2003 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2004
2005 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2006 language_registry.add(rust_lang());
2007 let mut fake_servers =
2008 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2009
2010 let buffer = project
2011 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2012 .await
2013 .unwrap();
2014
2015 let mut fake_server = fake_servers.next().await.unwrap();
2016 let lsp_document_version = fake_server
2017 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2018 .await
2019 .text_document
2020 .version;
2021
2022 // Simulate editing the buffer after the language server computes some edits.
2023 buffer.update(cx, |buffer, cx| {
2024 buffer.edit(
2025 [(
2026 Point::new(0, 0)..Point::new(0, 0),
2027 "// above first function\n",
2028 )],
2029 None,
2030 cx,
2031 );
2032 buffer.edit(
2033 [(
2034 Point::new(2, 0)..Point::new(2, 0),
2035 " // inside first function\n",
2036 )],
2037 None,
2038 cx,
2039 );
2040 buffer.edit(
2041 [(
2042 Point::new(6, 4)..Point::new(6, 4),
2043 "// inside second function ",
2044 )],
2045 None,
2046 cx,
2047 );
2048
2049 assert_eq!(
2050 buffer.text(),
2051 "
2052 // above first function
2053 fn a() {
2054 // inside first function
2055 f1();
2056 }
2057 fn b() {
2058 // inside second function f2();
2059 }
2060 fn c() {
2061 f3();
2062 }
2063 "
2064 .unindent()
2065 );
2066 });
2067
2068 let edits = project
2069 .update(cx, |project, cx| {
2070 project.edits_from_lsp(
2071 &buffer,
2072 vec![
2073 // replace body of first function
2074 lsp::TextEdit {
2075 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2076 new_text: "
2077 fn a() {
2078 f10();
2079 }
2080 "
2081 .unindent(),
2082 },
2083 // edit inside second function
2084 lsp::TextEdit {
2085 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2086 new_text: "00".into(),
2087 },
2088 // edit inside third function via two distinct edits
2089 lsp::TextEdit {
2090 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2091 new_text: "4000".into(),
2092 },
2093 lsp::TextEdit {
2094 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2095 new_text: "".into(),
2096 },
2097 ],
2098 LanguageServerId(0),
2099 Some(lsp_document_version),
2100 cx,
2101 )
2102 })
2103 .await
2104 .unwrap();
2105
2106 buffer.update(cx, |buffer, cx| {
2107 for (range, new_text) in edits {
2108 buffer.edit([(range, new_text)], None, cx);
2109 }
2110 assert_eq!(
2111 buffer.text(),
2112 "
2113 // above first function
2114 fn a() {
2115 // inside first function
2116 f10();
2117 }
2118 fn b() {
2119 // inside second function f200();
2120 }
2121 fn c() {
2122 f4000();
2123 }
2124 "
2125 .unindent()
2126 );
2127 });
2128}
2129
2130#[gpui::test]
2131async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2132 init_test(cx);
2133
2134 let text = "
2135 use a::b;
2136 use a::c;
2137
2138 fn f() {
2139 b();
2140 c();
2141 }
2142 "
2143 .unindent();
2144
2145 let fs = FakeFs::new(cx.executor());
2146 fs.insert_tree(
2147 "/dir",
2148 json!({
2149 "a.rs": text.clone(),
2150 }),
2151 )
2152 .await;
2153
2154 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2155 let buffer = project
2156 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2157 .await
2158 .unwrap();
2159
2160 // Simulate the language server sending us a small edit in the form of a very large diff.
2161 // Rust-analyzer does this when performing a merge-imports code action.
2162 let edits = project
2163 .update(cx, |project, cx| {
2164 project.edits_from_lsp(
2165 &buffer,
2166 [
2167 // Replace the first use statement without editing the semicolon.
2168 lsp::TextEdit {
2169 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2170 new_text: "a::{b, c}".into(),
2171 },
2172 // Reinsert the remainder of the file between the semicolon and the final
2173 // newline of the file.
2174 lsp::TextEdit {
2175 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2176 new_text: "\n\n".into(),
2177 },
2178 lsp::TextEdit {
2179 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2180 new_text: "
2181 fn f() {
2182 b();
2183 c();
2184 }"
2185 .unindent(),
2186 },
2187 // Delete everything after the first newline of the file.
2188 lsp::TextEdit {
2189 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2190 new_text: "".into(),
2191 },
2192 ],
2193 LanguageServerId(0),
2194 None,
2195 cx,
2196 )
2197 })
2198 .await
2199 .unwrap();
2200
2201 buffer.update(cx, |buffer, cx| {
2202 let edits = edits
2203 .into_iter()
2204 .map(|(range, text)| {
2205 (
2206 range.start.to_point(buffer)..range.end.to_point(buffer),
2207 text,
2208 )
2209 })
2210 .collect::<Vec<_>>();
2211
2212 assert_eq!(
2213 edits,
2214 [
2215 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2216 (Point::new(1, 0)..Point::new(2, 0), "".into())
2217 ]
2218 );
2219
2220 for (range, new_text) in edits {
2221 buffer.edit([(range, new_text)], None, cx);
2222 }
2223 assert_eq!(
2224 buffer.text(),
2225 "
2226 use a::{b, c};
2227
2228 fn f() {
2229 b();
2230 c();
2231 }
2232 "
2233 .unindent()
2234 );
2235 });
2236}
2237
2238#[gpui::test]
2239async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2240 init_test(cx);
2241
2242 let text = "
2243 use a::b;
2244 use a::c;
2245
2246 fn f() {
2247 b();
2248 c();
2249 }
2250 "
2251 .unindent();
2252
2253 let fs = FakeFs::new(cx.executor());
2254 fs.insert_tree(
2255 "/dir",
2256 json!({
2257 "a.rs": text.clone(),
2258 }),
2259 )
2260 .await;
2261
2262 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2263 let buffer = project
2264 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2265 .await
2266 .unwrap();
2267
2268 // Simulate the language server sending us edits in a non-ordered fashion,
2269 // with ranges sometimes being inverted or pointing to invalid locations.
2270 let edits = project
2271 .update(cx, |project, cx| {
2272 project.edits_from_lsp(
2273 &buffer,
2274 [
2275 lsp::TextEdit {
2276 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2277 new_text: "\n\n".into(),
2278 },
2279 lsp::TextEdit {
2280 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2281 new_text: "a::{b, c}".into(),
2282 },
2283 lsp::TextEdit {
2284 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2285 new_text: "".into(),
2286 },
2287 lsp::TextEdit {
2288 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2289 new_text: "
2290 fn f() {
2291 b();
2292 c();
2293 }"
2294 .unindent(),
2295 },
2296 ],
2297 LanguageServerId(0),
2298 None,
2299 cx,
2300 )
2301 })
2302 .await
2303 .unwrap();
2304
2305 buffer.update(cx, |buffer, cx| {
2306 let edits = edits
2307 .into_iter()
2308 .map(|(range, text)| {
2309 (
2310 range.start.to_point(buffer)..range.end.to_point(buffer),
2311 text,
2312 )
2313 })
2314 .collect::<Vec<_>>();
2315
2316 assert_eq!(
2317 edits,
2318 [
2319 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2320 (Point::new(1, 0)..Point::new(2, 0), "".into())
2321 ]
2322 );
2323
2324 for (range, new_text) in edits {
2325 buffer.edit([(range, new_text)], None, cx);
2326 }
2327 assert_eq!(
2328 buffer.text(),
2329 "
2330 use a::{b, c};
2331
2332 fn f() {
2333 b();
2334 c();
2335 }
2336 "
2337 .unindent()
2338 );
2339 });
2340}
2341
2342fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2343 buffer: &Buffer,
2344 range: Range<T>,
2345) -> Vec<(String, Option<DiagnosticSeverity>)> {
2346 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2347 for chunk in buffer.snapshot().chunks(range, true) {
2348 if chunks.last().map_or(false, |prev_chunk| {
2349 prev_chunk.1 == chunk.diagnostic_severity
2350 }) {
2351 chunks.last_mut().unwrap().0.push_str(chunk.text);
2352 } else {
2353 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2354 }
2355 }
2356 chunks
2357}
2358
2359#[gpui::test(iterations = 10)]
2360async fn test_definition(cx: &mut gpui::TestAppContext) {
2361 init_test(cx);
2362
2363 let fs = FakeFs::new(cx.executor());
2364 fs.insert_tree(
2365 "/dir",
2366 json!({
2367 "a.rs": "const fn a() { A }",
2368 "b.rs": "const y: i32 = crate::a()",
2369 }),
2370 )
2371 .await;
2372
2373 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2374
2375 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2376 language_registry.add(rust_lang());
2377 let mut fake_servers =
2378 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2379
2380 let buffer = project
2381 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2382 .await
2383 .unwrap();
2384
2385 let fake_server = fake_servers.next().await.unwrap();
2386 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2387 let params = params.text_document_position_params;
2388 assert_eq!(
2389 params.text_document.uri.to_file_path().unwrap(),
2390 Path::new("/dir/b.rs"),
2391 );
2392 assert_eq!(params.position, lsp::Position::new(0, 22));
2393
2394 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2395 lsp::Location::new(
2396 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2397 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2398 ),
2399 )))
2400 });
2401
2402 let mut definitions = project
2403 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2404 .await
2405 .unwrap();
2406
2407 // Assert no new language server started
2408 cx.executor().run_until_parked();
2409 assert!(fake_servers.try_next().is_err());
2410
2411 assert_eq!(definitions.len(), 1);
2412 let definition = definitions.pop().unwrap();
2413 cx.update(|cx| {
2414 let target_buffer = definition.target.buffer.read(cx);
2415 assert_eq!(
2416 target_buffer
2417 .file()
2418 .unwrap()
2419 .as_local()
2420 .unwrap()
2421 .abs_path(cx),
2422 Path::new("/dir/a.rs"),
2423 );
2424 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2425 assert_eq!(
2426 list_worktrees(&project, cx),
2427 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2428 );
2429
2430 drop(definition);
2431 });
2432 cx.update(|cx| {
2433 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2434 });
2435
2436 fn list_worktrees<'a>(
2437 project: &'a Model<Project>,
2438 cx: &'a AppContext,
2439 ) -> Vec<(&'a Path, bool)> {
2440 project
2441 .read(cx)
2442 .worktrees()
2443 .map(|worktree| {
2444 let worktree = worktree.read(cx);
2445 (
2446 worktree.as_local().unwrap().abs_path().as_ref(),
2447 worktree.is_visible(),
2448 )
2449 })
2450 .collect::<Vec<_>>()
2451 }
2452}
2453
2454#[gpui::test]
2455async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let fs = FakeFs::new(cx.executor());
2459 fs.insert_tree(
2460 "/dir",
2461 json!({
2462 "a.ts": "",
2463 }),
2464 )
2465 .await;
2466
2467 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2468
2469 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2470 language_registry.add(typescript_lang());
2471 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2472 "TypeScript",
2473 FakeLspAdapter {
2474 capabilities: lsp::ServerCapabilities {
2475 completion_provider: Some(lsp::CompletionOptions {
2476 trigger_characters: Some(vec![":".to_string()]),
2477 ..Default::default()
2478 }),
2479 ..Default::default()
2480 },
2481 ..Default::default()
2482 },
2483 );
2484
2485 let buffer = project
2486 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2487 .await
2488 .unwrap();
2489
2490 let fake_server = fake_language_servers.next().await.unwrap();
2491
2492 let text = "let a = b.fqn";
2493 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2494 let completions = project.update(cx, |project, cx| {
2495 project.completions(&buffer, text.len(), cx)
2496 });
2497
2498 fake_server
2499 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2500 Ok(Some(lsp::CompletionResponse::Array(vec![
2501 lsp::CompletionItem {
2502 label: "fullyQualifiedName?".into(),
2503 insert_text: Some("fullyQualifiedName".into()),
2504 ..Default::default()
2505 },
2506 ])))
2507 })
2508 .next()
2509 .await;
2510 let completions = completions.await.unwrap();
2511 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2512 assert_eq!(completions.len(), 1);
2513 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2514 assert_eq!(
2515 completions[0].old_range.to_offset(&snapshot),
2516 text.len() - 3..text.len()
2517 );
2518
2519 let text = "let a = \"atoms/cmp\"";
2520 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2521 let completions = project.update(cx, |project, cx| {
2522 project.completions(&buffer, text.len() - 1, cx)
2523 });
2524
2525 fake_server
2526 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2527 Ok(Some(lsp::CompletionResponse::Array(vec![
2528 lsp::CompletionItem {
2529 label: "component".into(),
2530 ..Default::default()
2531 },
2532 ])))
2533 })
2534 .next()
2535 .await;
2536 let completions = completions.await.unwrap();
2537 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2538 assert_eq!(completions.len(), 1);
2539 assert_eq!(completions[0].new_text, "component");
2540 assert_eq!(
2541 completions[0].old_range.to_offset(&snapshot),
2542 text.len() - 4..text.len() - 1
2543 );
2544}
2545
2546#[gpui::test]
2547async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2548 init_test(cx);
2549
2550 let fs = FakeFs::new(cx.executor());
2551 fs.insert_tree(
2552 "/dir",
2553 json!({
2554 "a.ts": "",
2555 }),
2556 )
2557 .await;
2558
2559 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2560
2561 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2562 language_registry.add(typescript_lang());
2563 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2564 "TypeScript",
2565 FakeLspAdapter {
2566 capabilities: lsp::ServerCapabilities {
2567 completion_provider: Some(lsp::CompletionOptions {
2568 trigger_characters: Some(vec![":".to_string()]),
2569 ..Default::default()
2570 }),
2571 ..Default::default()
2572 },
2573 ..Default::default()
2574 },
2575 );
2576
2577 let buffer = project
2578 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2579 .await
2580 .unwrap();
2581
2582 let fake_server = fake_language_servers.next().await.unwrap();
2583
2584 let text = "let a = b.fqn";
2585 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2586 let completions = project.update(cx, |project, cx| {
2587 project.completions(&buffer, text.len(), cx)
2588 });
2589
2590 fake_server
2591 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2592 Ok(Some(lsp::CompletionResponse::Array(vec![
2593 lsp::CompletionItem {
2594 label: "fullyQualifiedName?".into(),
2595 insert_text: Some("fully\rQualified\r\nName".into()),
2596 ..Default::default()
2597 },
2598 ])))
2599 })
2600 .next()
2601 .await;
2602 let completions = completions.await.unwrap();
2603 assert_eq!(completions.len(), 1);
2604 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2605}
2606
2607#[gpui::test(iterations = 10)]
2608async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2609 init_test(cx);
2610
2611 let fs = FakeFs::new(cx.executor());
2612 fs.insert_tree(
2613 "/dir",
2614 json!({
2615 "a.ts": "a",
2616 }),
2617 )
2618 .await;
2619
2620 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2621
2622 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2623 language_registry.add(typescript_lang());
2624 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2625 "TypeScript",
2626 FakeLspAdapter {
2627 capabilities: lsp::ServerCapabilities {
2628 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2629 lsp::CodeActionOptions {
2630 resolve_provider: Some(true),
2631 ..lsp::CodeActionOptions::default()
2632 },
2633 )),
2634 ..lsp::ServerCapabilities::default()
2635 },
2636 ..FakeLspAdapter::default()
2637 },
2638 );
2639
2640 let buffer = project
2641 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2642 .await
2643 .unwrap();
2644
2645 let fake_server = fake_language_servers.next().await.unwrap();
2646
2647 // Language server returns code actions that contain commands, and not edits.
2648 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2649 fake_server
2650 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2651 Ok(Some(vec![
2652 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2653 title: "The code action".into(),
2654 data: Some(serde_json::json!({
2655 "command": "_the/command",
2656 })),
2657 ..lsp::CodeAction::default()
2658 }),
2659 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2660 title: "two".into(),
2661 ..lsp::CodeAction::default()
2662 }),
2663 ]))
2664 })
2665 .next()
2666 .await;
2667
2668 let action = actions.await[0].clone();
2669 let apply = project.update(cx, |project, cx| {
2670 project.apply_code_action(buffer.clone(), action, true, cx)
2671 });
2672
2673 // Resolving the code action does not populate its edits. In absence of
2674 // edits, we must execute the given command.
2675 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2676 |mut action, _| async move {
2677 if action.data.is_some() {
2678 action.command = Some(lsp::Command {
2679 title: "The command".into(),
2680 command: "_the/command".into(),
2681 arguments: Some(vec![json!("the-argument")]),
2682 });
2683 }
2684 Ok(action)
2685 },
2686 );
2687
2688 // While executing the command, the language server sends the editor
2689 // a `workspaceEdit` request.
2690 fake_server
2691 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2692 let fake = fake_server.clone();
2693 move |params, _| {
2694 assert_eq!(params.command, "_the/command");
2695 let fake = fake.clone();
2696 async move {
2697 fake.server
2698 .request::<lsp::request::ApplyWorkspaceEdit>(
2699 lsp::ApplyWorkspaceEditParams {
2700 label: None,
2701 edit: lsp::WorkspaceEdit {
2702 changes: Some(
2703 [(
2704 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2705 vec![lsp::TextEdit {
2706 range: lsp::Range::new(
2707 lsp::Position::new(0, 0),
2708 lsp::Position::new(0, 0),
2709 ),
2710 new_text: "X".into(),
2711 }],
2712 )]
2713 .into_iter()
2714 .collect(),
2715 ),
2716 ..Default::default()
2717 },
2718 },
2719 )
2720 .await
2721 .unwrap();
2722 Ok(Some(json!(null)))
2723 }
2724 }
2725 })
2726 .next()
2727 .await;
2728
2729 // Applying the code action returns a project transaction containing the edits
2730 // sent by the language server in its `workspaceEdit` request.
2731 let transaction = apply.await.unwrap();
2732 assert!(transaction.0.contains_key(&buffer));
2733 buffer.update(cx, |buffer, cx| {
2734 assert_eq!(buffer.text(), "Xa");
2735 buffer.undo(cx);
2736 assert_eq!(buffer.text(), "a");
2737 });
2738}
2739
2740#[gpui::test(iterations = 10)]
2741async fn test_save_file(cx: &mut gpui::TestAppContext) {
2742 init_test(cx);
2743
2744 let fs = FakeFs::new(cx.executor());
2745 fs.insert_tree(
2746 "/dir",
2747 json!({
2748 "file1": "the old contents",
2749 }),
2750 )
2751 .await;
2752
2753 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2754 let buffer = project
2755 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2756 .await
2757 .unwrap();
2758 buffer.update(cx, |buffer, cx| {
2759 assert_eq!(buffer.text(), "the old contents");
2760 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2761 });
2762
2763 project
2764 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2765 .await
2766 .unwrap();
2767
2768 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2769 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2770}
2771
2772#[gpui::test(iterations = 30)]
2773async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2774 init_test(cx);
2775
2776 let fs = FakeFs::new(cx.executor().clone());
2777 fs.insert_tree(
2778 "/dir",
2779 json!({
2780 "file1": "the original contents",
2781 }),
2782 )
2783 .await;
2784
2785 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2786 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2787 let buffer = project
2788 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2789 .await
2790 .unwrap();
2791
2792 // Simulate buffer diffs being slow, so that they don't complete before
2793 // the next file change occurs.
2794 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2795
2796 // Change the buffer's file on disk, and then wait for the file change
2797 // to be detected by the worktree, so that the buffer starts reloading.
2798 fs.save(
2799 "/dir/file1".as_ref(),
2800 &"the first contents".into(),
2801 Default::default(),
2802 )
2803 .await
2804 .unwrap();
2805 worktree.next_event(cx).await;
2806
2807 // Change the buffer's file again. Depending on the random seed, the
2808 // previous file change may still be in progress.
2809 fs.save(
2810 "/dir/file1".as_ref(),
2811 &"the second contents".into(),
2812 Default::default(),
2813 )
2814 .await
2815 .unwrap();
2816 worktree.next_event(cx).await;
2817
2818 cx.executor().run_until_parked();
2819 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2820 buffer.read_with(cx, |buffer, _| {
2821 assert_eq!(buffer.text(), on_disk_text);
2822 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2823 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2824 });
2825}
2826
2827#[gpui::test(iterations = 30)]
2828async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2829 init_test(cx);
2830
2831 let fs = FakeFs::new(cx.executor().clone());
2832 fs.insert_tree(
2833 "/dir",
2834 json!({
2835 "file1": "the original contents",
2836 }),
2837 )
2838 .await;
2839
2840 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2841 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2842 let buffer = project
2843 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2844 .await
2845 .unwrap();
2846
2847 // Simulate buffer diffs being slow, so that they don't complete before
2848 // the next file change occurs.
2849 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2850
2851 // Change the buffer's file on disk, and then wait for the file change
2852 // to be detected by the worktree, so that the buffer starts reloading.
2853 fs.save(
2854 "/dir/file1".as_ref(),
2855 &"the first contents".into(),
2856 Default::default(),
2857 )
2858 .await
2859 .unwrap();
2860 worktree.next_event(cx).await;
2861
2862 cx.executor()
2863 .spawn(cx.executor().simulate_random_delay())
2864 .await;
2865
2866 // Perform a noop edit, causing the buffer's version to increase.
2867 buffer.update(cx, |buffer, cx| {
2868 buffer.edit([(0..0, " ")], None, cx);
2869 buffer.undo(cx);
2870 });
2871
2872 cx.executor().run_until_parked();
2873 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2874 buffer.read_with(cx, |buffer, _| {
2875 let buffer_text = buffer.text();
2876 if buffer_text == on_disk_text {
2877 assert!(
2878 !buffer.is_dirty() && !buffer.has_conflict(),
2879 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2880 );
2881 }
2882 // If the file change occurred while the buffer was processing the first
2883 // change, the buffer will be in a conflicting state.
2884 else {
2885 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2886 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2887 }
2888 });
2889}
2890
2891#[gpui::test]
2892async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2893 init_test(cx);
2894
2895 let fs = FakeFs::new(cx.executor());
2896 fs.insert_tree(
2897 "/dir",
2898 json!({
2899 "file1": "the old contents",
2900 }),
2901 )
2902 .await;
2903
2904 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2905 let buffer = project
2906 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2907 .await
2908 .unwrap();
2909 buffer.update(cx, |buffer, cx| {
2910 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2911 });
2912
2913 project
2914 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2915 .await
2916 .unwrap();
2917
2918 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2919 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2920}
2921
2922#[gpui::test]
2923async fn test_save_as(cx: &mut gpui::TestAppContext) {
2924 init_test(cx);
2925
2926 let fs = FakeFs::new(cx.executor());
2927 fs.insert_tree("/dir", json!({})).await;
2928
2929 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2930
2931 let languages = project.update(cx, |project, _| project.languages().clone());
2932 languages.add(rust_lang());
2933
2934 let buffer = project.update(cx, |project, cx| {
2935 project.create_buffer("", None, cx).unwrap()
2936 });
2937 buffer.update(cx, |buffer, cx| {
2938 buffer.edit([(0..0, "abc")], None, cx);
2939 assert!(buffer.is_dirty());
2940 assert!(!buffer.has_conflict());
2941 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2942 });
2943 project
2944 .update(cx, |project, cx| {
2945 project.save_buffer_as(buffer.clone(), "/dir/file1.rs".into(), cx)
2946 })
2947 .await
2948 .unwrap();
2949 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2950
2951 cx.executor().run_until_parked();
2952 buffer.update(cx, |buffer, cx| {
2953 assert_eq!(
2954 buffer.file().unwrap().full_path(cx),
2955 Path::new("dir/file1.rs")
2956 );
2957 assert!(!buffer.is_dirty());
2958 assert!(!buffer.has_conflict());
2959 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2960 });
2961
2962 let opened_buffer = project
2963 .update(cx, |project, cx| {
2964 project.open_local_buffer("/dir/file1.rs", cx)
2965 })
2966 .await
2967 .unwrap();
2968 assert_eq!(opened_buffer, buffer);
2969}
2970
2971#[gpui::test(retries = 5)]
2972async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2973 init_test(cx);
2974 cx.executor().allow_parking();
2975
2976 let dir = temp_tree(json!({
2977 "a": {
2978 "file1": "",
2979 "file2": "",
2980 "file3": "",
2981 },
2982 "b": {
2983 "c": {
2984 "file4": "",
2985 "file5": "",
2986 }
2987 }
2988 }));
2989
2990 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2991 let rpc = project.update(cx, |p, _| p.client.clone());
2992
2993 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2994 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2995 async move { buffer.await.unwrap() }
2996 };
2997 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2998 project.update(cx, |project, cx| {
2999 let tree = project.worktrees().next().unwrap();
3000 tree.read(cx)
3001 .entry_for_path(path)
3002 .unwrap_or_else(|| panic!("no entry for path {}", path))
3003 .id
3004 })
3005 };
3006
3007 let buffer2 = buffer_for_path("a/file2", cx).await;
3008 let buffer3 = buffer_for_path("a/file3", cx).await;
3009 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3010 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3011
3012 let file2_id = id_for_path("a/file2", cx);
3013 let file3_id = id_for_path("a/file3", cx);
3014 let file4_id = id_for_path("b/c/file4", cx);
3015
3016 // Create a remote copy of this worktree.
3017 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3018
3019 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
3020
3021 let updates = Arc::new(Mutex::new(Vec::new()));
3022 tree.update(cx, |tree, cx| {
3023 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
3024 let updates = updates.clone();
3025 move |update| {
3026 updates.lock().push(update);
3027 async { true }
3028 }
3029 });
3030 });
3031
3032 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
3033
3034 cx.executor().run_until_parked();
3035
3036 cx.update(|cx| {
3037 assert!(!buffer2.read(cx).is_dirty());
3038 assert!(!buffer3.read(cx).is_dirty());
3039 assert!(!buffer4.read(cx).is_dirty());
3040 assert!(!buffer5.read(cx).is_dirty());
3041 });
3042
3043 // Rename and delete files and directories.
3044 tree.flush_fs_events(cx).await;
3045 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3046 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3047 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3048 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3049 tree.flush_fs_events(cx).await;
3050
3051 let expected_paths = vec![
3052 "a",
3053 "a/file1",
3054 "a/file2.new",
3055 "b",
3056 "d",
3057 "d/file3",
3058 "d/file4",
3059 ];
3060
3061 cx.update(|app| {
3062 assert_eq!(
3063 tree.read(app)
3064 .paths()
3065 .map(|p| p.to_str().unwrap())
3066 .collect::<Vec<_>>(),
3067 expected_paths
3068 );
3069 });
3070
3071 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3072 assert_eq!(id_for_path("d/file3", cx), file3_id);
3073 assert_eq!(id_for_path("d/file4", cx), file4_id);
3074
3075 cx.update(|cx| {
3076 assert_eq!(
3077 buffer2.read(cx).file().unwrap().path().as_ref(),
3078 Path::new("a/file2.new")
3079 );
3080 assert_eq!(
3081 buffer3.read(cx).file().unwrap().path().as_ref(),
3082 Path::new("d/file3")
3083 );
3084 assert_eq!(
3085 buffer4.read(cx).file().unwrap().path().as_ref(),
3086 Path::new("d/file4")
3087 );
3088 assert_eq!(
3089 buffer5.read(cx).file().unwrap().path().as_ref(),
3090 Path::new("b/c/file5")
3091 );
3092
3093 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3094 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3095 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3096 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3097 });
3098
3099 // Update the remote worktree. Check that it becomes consistent with the
3100 // local worktree.
3101 cx.executor().run_until_parked();
3102
3103 remote.update(cx, |remote, _| {
3104 for update in updates.lock().drain(..) {
3105 remote.as_remote_mut().unwrap().update_from_remote(update);
3106 }
3107 });
3108 cx.executor().run_until_parked();
3109 remote.update(cx, |remote, _| {
3110 assert_eq!(
3111 remote
3112 .paths()
3113 .map(|p| p.to_str().unwrap())
3114 .collect::<Vec<_>>(),
3115 expected_paths
3116 );
3117 });
3118}
3119
3120#[gpui::test(iterations = 10)]
3121async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3122 init_test(cx);
3123
3124 let fs = FakeFs::new(cx.executor());
3125 fs.insert_tree(
3126 "/dir",
3127 json!({
3128 "a": {
3129 "file1": "",
3130 }
3131 }),
3132 )
3133 .await;
3134
3135 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3136 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3137 let tree_id = tree.update(cx, |tree, _| tree.id());
3138
3139 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3140 project.update(cx, |project, cx| {
3141 let tree = project.worktrees().next().unwrap();
3142 tree.read(cx)
3143 .entry_for_path(path)
3144 .unwrap_or_else(|| panic!("no entry for path {}", path))
3145 .id
3146 })
3147 };
3148
3149 let dir_id = id_for_path("a", cx);
3150 let file_id = id_for_path("a/file1", cx);
3151 let buffer = project
3152 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3153 .await
3154 .unwrap();
3155 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3156
3157 project
3158 .update(cx, |project, cx| {
3159 project.rename_entry(dir_id, Path::new("b"), cx)
3160 })
3161 .unwrap()
3162 .await
3163 .unwrap();
3164 cx.executor().run_until_parked();
3165
3166 assert_eq!(id_for_path("b", cx), dir_id);
3167 assert_eq!(id_for_path("b/file1", cx), file_id);
3168 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3169}
3170
3171#[gpui::test]
3172async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3173 init_test(cx);
3174
3175 let fs = FakeFs::new(cx.executor());
3176 fs.insert_tree(
3177 "/dir",
3178 json!({
3179 "a.txt": "a-contents",
3180 "b.txt": "b-contents",
3181 }),
3182 )
3183 .await;
3184
3185 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3186
3187 // Spawn multiple tasks to open paths, repeating some paths.
3188 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3189 (
3190 p.open_local_buffer("/dir/a.txt", cx),
3191 p.open_local_buffer("/dir/b.txt", cx),
3192 p.open_local_buffer("/dir/a.txt", cx),
3193 )
3194 });
3195
3196 let buffer_a_1 = buffer_a_1.await.unwrap();
3197 let buffer_a_2 = buffer_a_2.await.unwrap();
3198 let buffer_b = buffer_b.await.unwrap();
3199 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3200 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3201
3202 // There is only one buffer per path.
3203 let buffer_a_id = buffer_a_1.entity_id();
3204 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3205
3206 // Open the same path again while it is still open.
3207 drop(buffer_a_1);
3208 let buffer_a_3 = project
3209 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3210 .await
3211 .unwrap();
3212
3213 // There's still only one buffer per path.
3214 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3215}
3216
3217#[gpui::test]
3218async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3219 init_test(cx);
3220
3221 let fs = FakeFs::new(cx.executor());
3222 fs.insert_tree(
3223 "/dir",
3224 json!({
3225 "file1": "abc",
3226 "file2": "def",
3227 "file3": "ghi",
3228 }),
3229 )
3230 .await;
3231
3232 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3233
3234 let buffer1 = project
3235 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3236 .await
3237 .unwrap();
3238 let events = Arc::new(Mutex::new(Vec::new()));
3239
3240 // initially, the buffer isn't dirty.
3241 buffer1.update(cx, |buffer, cx| {
3242 cx.subscribe(&buffer1, {
3243 let events = events.clone();
3244 move |_, _, event, _| match event {
3245 BufferEvent::Operation(_) => {}
3246 _ => events.lock().push(event.clone()),
3247 }
3248 })
3249 .detach();
3250
3251 assert!(!buffer.is_dirty());
3252 assert!(events.lock().is_empty());
3253
3254 buffer.edit([(1..2, "")], None, cx);
3255 });
3256
3257 // after the first edit, the buffer is dirty, and emits a dirtied event.
3258 buffer1.update(cx, |buffer, cx| {
3259 assert!(buffer.text() == "ac");
3260 assert!(buffer.is_dirty());
3261 assert_eq!(
3262 *events.lock(),
3263 &[language::Event::Edited, language::Event::DirtyChanged]
3264 );
3265 events.lock().clear();
3266 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3267 });
3268
3269 // after saving, the buffer is not dirty, and emits a saved event.
3270 buffer1.update(cx, |buffer, cx| {
3271 assert!(!buffer.is_dirty());
3272 assert_eq!(*events.lock(), &[language::Event::Saved]);
3273 events.lock().clear();
3274
3275 buffer.edit([(1..1, "B")], None, cx);
3276 buffer.edit([(2..2, "D")], None, cx);
3277 });
3278
3279 // after editing again, the buffer is dirty, and emits another dirty event.
3280 buffer1.update(cx, |buffer, cx| {
3281 assert!(buffer.text() == "aBDc");
3282 assert!(buffer.is_dirty());
3283 assert_eq!(
3284 *events.lock(),
3285 &[
3286 language::Event::Edited,
3287 language::Event::DirtyChanged,
3288 language::Event::Edited,
3289 ],
3290 );
3291 events.lock().clear();
3292
3293 // After restoring the buffer to its previously-saved state,
3294 // the buffer is not considered dirty anymore.
3295 buffer.edit([(1..3, "")], None, cx);
3296 assert!(buffer.text() == "ac");
3297 assert!(!buffer.is_dirty());
3298 });
3299
3300 assert_eq!(
3301 *events.lock(),
3302 &[language::Event::Edited, language::Event::DirtyChanged]
3303 );
3304
3305 // When a file is deleted, the buffer is considered dirty.
3306 let events = Arc::new(Mutex::new(Vec::new()));
3307 let buffer2 = project
3308 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3309 .await
3310 .unwrap();
3311 buffer2.update(cx, |_, cx| {
3312 cx.subscribe(&buffer2, {
3313 let events = events.clone();
3314 move |_, _, event, _| events.lock().push(event.clone())
3315 })
3316 .detach();
3317 });
3318
3319 fs.remove_file("/dir/file2".as_ref(), Default::default())
3320 .await
3321 .unwrap();
3322 cx.executor().run_until_parked();
3323 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3324 assert_eq!(
3325 *events.lock(),
3326 &[
3327 language::Event::DirtyChanged,
3328 language::Event::FileHandleChanged
3329 ]
3330 );
3331
3332 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3333 let events = Arc::new(Mutex::new(Vec::new()));
3334 let buffer3 = project
3335 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3336 .await
3337 .unwrap();
3338 buffer3.update(cx, |_, cx| {
3339 cx.subscribe(&buffer3, {
3340 let events = events.clone();
3341 move |_, _, event, _| events.lock().push(event.clone())
3342 })
3343 .detach();
3344 });
3345
3346 buffer3.update(cx, |buffer, cx| {
3347 buffer.edit([(0..0, "x")], None, cx);
3348 });
3349 events.lock().clear();
3350 fs.remove_file("/dir/file3".as_ref(), Default::default())
3351 .await
3352 .unwrap();
3353 cx.executor().run_until_parked();
3354 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3355 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3356}
3357
3358#[gpui::test]
3359async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3360 init_test(cx);
3361
3362 let initial_contents = "aaa\nbbbbb\nc\n";
3363 let fs = FakeFs::new(cx.executor());
3364 fs.insert_tree(
3365 "/dir",
3366 json!({
3367 "the-file": initial_contents,
3368 }),
3369 )
3370 .await;
3371 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3372 let buffer = project
3373 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3374 .await
3375 .unwrap();
3376
3377 let anchors = (0..3)
3378 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3379 .collect::<Vec<_>>();
3380
3381 // Change the file on disk, adding two new lines of text, and removing
3382 // one line.
3383 buffer.update(cx, |buffer, _| {
3384 assert!(!buffer.is_dirty());
3385 assert!(!buffer.has_conflict());
3386 });
3387 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3388 fs.save(
3389 "/dir/the-file".as_ref(),
3390 &new_contents.into(),
3391 LineEnding::Unix,
3392 )
3393 .await
3394 .unwrap();
3395
3396 // Because the buffer was not modified, it is reloaded from disk. Its
3397 // contents are edited according to the diff between the old and new
3398 // file contents.
3399 cx.executor().run_until_parked();
3400 buffer.update(cx, |buffer, _| {
3401 assert_eq!(buffer.text(), new_contents);
3402 assert!(!buffer.is_dirty());
3403 assert!(!buffer.has_conflict());
3404
3405 let anchor_positions = anchors
3406 .iter()
3407 .map(|anchor| anchor.to_point(&*buffer))
3408 .collect::<Vec<_>>();
3409 assert_eq!(
3410 anchor_positions,
3411 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3412 );
3413 });
3414
3415 // Modify the buffer
3416 buffer.update(cx, |buffer, cx| {
3417 buffer.edit([(0..0, " ")], None, cx);
3418 assert!(buffer.is_dirty());
3419 assert!(!buffer.has_conflict());
3420 });
3421
3422 // Change the file on disk again, adding blank lines to the beginning.
3423 fs.save(
3424 "/dir/the-file".as_ref(),
3425 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3426 LineEnding::Unix,
3427 )
3428 .await
3429 .unwrap();
3430
3431 // Because the buffer is modified, it doesn't reload from disk, but is
3432 // marked as having a conflict.
3433 cx.executor().run_until_parked();
3434 buffer.update(cx, |buffer, _| {
3435 assert!(buffer.has_conflict());
3436 });
3437}
3438
3439#[gpui::test]
3440async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3441 init_test(cx);
3442
3443 let fs = FakeFs::new(cx.executor());
3444 fs.insert_tree(
3445 "/dir",
3446 json!({
3447 "file1": "a\nb\nc\n",
3448 "file2": "one\r\ntwo\r\nthree\r\n",
3449 }),
3450 )
3451 .await;
3452
3453 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3454 let buffer1 = project
3455 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3456 .await
3457 .unwrap();
3458 let buffer2 = project
3459 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3460 .await
3461 .unwrap();
3462
3463 buffer1.update(cx, |buffer, _| {
3464 assert_eq!(buffer.text(), "a\nb\nc\n");
3465 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3466 });
3467 buffer2.update(cx, |buffer, _| {
3468 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3469 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3470 });
3471
3472 // Change a file's line endings on disk from unix to windows. The buffer's
3473 // state updates correctly.
3474 fs.save(
3475 "/dir/file1".as_ref(),
3476 &"aaa\nb\nc\n".into(),
3477 LineEnding::Windows,
3478 )
3479 .await
3480 .unwrap();
3481 cx.executor().run_until_parked();
3482 buffer1.update(cx, |buffer, _| {
3483 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3484 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3485 });
3486
3487 // Save a file with windows line endings. The file is written correctly.
3488 buffer2.update(cx, |buffer, cx| {
3489 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3490 });
3491 project
3492 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3493 .await
3494 .unwrap();
3495 assert_eq!(
3496 fs.load("/dir/file2".as_ref()).await.unwrap(),
3497 "one\r\ntwo\r\nthree\r\nfour\r\n",
3498 );
3499}
3500
3501#[gpui::test]
3502async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3503 init_test(cx);
3504
3505 let fs = FakeFs::new(cx.executor());
3506 fs.insert_tree(
3507 "/the-dir",
3508 json!({
3509 "a.rs": "
3510 fn foo(mut v: Vec<usize>) {
3511 for x in &v {
3512 v.push(1);
3513 }
3514 }
3515 "
3516 .unindent(),
3517 }),
3518 )
3519 .await;
3520
3521 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3522 let buffer = project
3523 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3524 .await
3525 .unwrap();
3526
3527 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3528 let message = lsp::PublishDiagnosticsParams {
3529 uri: buffer_uri.clone(),
3530 diagnostics: vec![
3531 lsp::Diagnostic {
3532 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3533 severity: Some(DiagnosticSeverity::WARNING),
3534 message: "error 1".to_string(),
3535 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3536 location: lsp::Location {
3537 uri: buffer_uri.clone(),
3538 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3539 },
3540 message: "error 1 hint 1".to_string(),
3541 }]),
3542 ..Default::default()
3543 },
3544 lsp::Diagnostic {
3545 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3546 severity: Some(DiagnosticSeverity::HINT),
3547 message: "error 1 hint 1".to_string(),
3548 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3549 location: lsp::Location {
3550 uri: buffer_uri.clone(),
3551 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3552 },
3553 message: "original diagnostic".to_string(),
3554 }]),
3555 ..Default::default()
3556 },
3557 lsp::Diagnostic {
3558 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3559 severity: Some(DiagnosticSeverity::ERROR),
3560 message: "error 2".to_string(),
3561 related_information: Some(vec![
3562 lsp::DiagnosticRelatedInformation {
3563 location: lsp::Location {
3564 uri: buffer_uri.clone(),
3565 range: lsp::Range::new(
3566 lsp::Position::new(1, 13),
3567 lsp::Position::new(1, 15),
3568 ),
3569 },
3570 message: "error 2 hint 1".to_string(),
3571 },
3572 lsp::DiagnosticRelatedInformation {
3573 location: lsp::Location {
3574 uri: buffer_uri.clone(),
3575 range: lsp::Range::new(
3576 lsp::Position::new(1, 13),
3577 lsp::Position::new(1, 15),
3578 ),
3579 },
3580 message: "error 2 hint 2".to_string(),
3581 },
3582 ]),
3583 ..Default::default()
3584 },
3585 lsp::Diagnostic {
3586 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3587 severity: Some(DiagnosticSeverity::HINT),
3588 message: "error 2 hint 1".to_string(),
3589 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3590 location: lsp::Location {
3591 uri: buffer_uri.clone(),
3592 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3593 },
3594 message: "original diagnostic".to_string(),
3595 }]),
3596 ..Default::default()
3597 },
3598 lsp::Diagnostic {
3599 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3600 severity: Some(DiagnosticSeverity::HINT),
3601 message: "error 2 hint 2".to_string(),
3602 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3603 location: lsp::Location {
3604 uri: buffer_uri,
3605 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3606 },
3607 message: "original diagnostic".to_string(),
3608 }]),
3609 ..Default::default()
3610 },
3611 ],
3612 version: None,
3613 };
3614
3615 project
3616 .update(cx, |p, cx| {
3617 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3618 })
3619 .unwrap();
3620 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3621
3622 assert_eq!(
3623 buffer
3624 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3625 .collect::<Vec<_>>(),
3626 &[
3627 DiagnosticEntry {
3628 range: Point::new(1, 8)..Point::new(1, 9),
3629 diagnostic: Diagnostic {
3630 severity: DiagnosticSeverity::WARNING,
3631 message: "error 1".to_string(),
3632 group_id: 1,
3633 is_primary: true,
3634 ..Default::default()
3635 }
3636 },
3637 DiagnosticEntry {
3638 range: Point::new(1, 8)..Point::new(1, 9),
3639 diagnostic: Diagnostic {
3640 severity: DiagnosticSeverity::HINT,
3641 message: "error 1 hint 1".to_string(),
3642 group_id: 1,
3643 is_primary: false,
3644 ..Default::default()
3645 }
3646 },
3647 DiagnosticEntry {
3648 range: Point::new(1, 13)..Point::new(1, 15),
3649 diagnostic: Diagnostic {
3650 severity: DiagnosticSeverity::HINT,
3651 message: "error 2 hint 1".to_string(),
3652 group_id: 0,
3653 is_primary: false,
3654 ..Default::default()
3655 }
3656 },
3657 DiagnosticEntry {
3658 range: Point::new(1, 13)..Point::new(1, 15),
3659 diagnostic: Diagnostic {
3660 severity: DiagnosticSeverity::HINT,
3661 message: "error 2 hint 2".to_string(),
3662 group_id: 0,
3663 is_primary: false,
3664 ..Default::default()
3665 }
3666 },
3667 DiagnosticEntry {
3668 range: Point::new(2, 8)..Point::new(2, 17),
3669 diagnostic: Diagnostic {
3670 severity: DiagnosticSeverity::ERROR,
3671 message: "error 2".to_string(),
3672 group_id: 0,
3673 is_primary: true,
3674 ..Default::default()
3675 }
3676 }
3677 ]
3678 );
3679
3680 assert_eq!(
3681 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3682 &[
3683 DiagnosticEntry {
3684 range: Point::new(1, 13)..Point::new(1, 15),
3685 diagnostic: Diagnostic {
3686 severity: DiagnosticSeverity::HINT,
3687 message: "error 2 hint 1".to_string(),
3688 group_id: 0,
3689 is_primary: false,
3690 ..Default::default()
3691 }
3692 },
3693 DiagnosticEntry {
3694 range: Point::new(1, 13)..Point::new(1, 15),
3695 diagnostic: Diagnostic {
3696 severity: DiagnosticSeverity::HINT,
3697 message: "error 2 hint 2".to_string(),
3698 group_id: 0,
3699 is_primary: false,
3700 ..Default::default()
3701 }
3702 },
3703 DiagnosticEntry {
3704 range: Point::new(2, 8)..Point::new(2, 17),
3705 diagnostic: Diagnostic {
3706 severity: DiagnosticSeverity::ERROR,
3707 message: "error 2".to_string(),
3708 group_id: 0,
3709 is_primary: true,
3710 ..Default::default()
3711 }
3712 }
3713 ]
3714 );
3715
3716 assert_eq!(
3717 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3718 &[
3719 DiagnosticEntry {
3720 range: Point::new(1, 8)..Point::new(1, 9),
3721 diagnostic: Diagnostic {
3722 severity: DiagnosticSeverity::WARNING,
3723 message: "error 1".to_string(),
3724 group_id: 1,
3725 is_primary: true,
3726 ..Default::default()
3727 }
3728 },
3729 DiagnosticEntry {
3730 range: Point::new(1, 8)..Point::new(1, 9),
3731 diagnostic: Diagnostic {
3732 severity: DiagnosticSeverity::HINT,
3733 message: "error 1 hint 1".to_string(),
3734 group_id: 1,
3735 is_primary: false,
3736 ..Default::default()
3737 }
3738 },
3739 ]
3740 );
3741}
3742
3743#[gpui::test]
3744async fn test_rename(cx: &mut gpui::TestAppContext) {
3745 init_test(cx);
3746
3747 let fs = FakeFs::new(cx.executor());
3748 fs.insert_tree(
3749 "/dir",
3750 json!({
3751 "one.rs": "const ONE: usize = 1;",
3752 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3753 }),
3754 )
3755 .await;
3756
3757 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3758
3759 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3760 language_registry.add(rust_lang());
3761 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3762 "Rust",
3763 FakeLspAdapter {
3764 capabilities: lsp::ServerCapabilities {
3765 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3766 prepare_provider: Some(true),
3767 work_done_progress_options: Default::default(),
3768 })),
3769 ..Default::default()
3770 },
3771 ..Default::default()
3772 },
3773 );
3774
3775 let buffer = project
3776 .update(cx, |project, cx| {
3777 project.open_local_buffer("/dir/one.rs", cx)
3778 })
3779 .await
3780 .unwrap();
3781
3782 let fake_server = fake_servers.next().await.unwrap();
3783
3784 let response = project.update(cx, |project, cx| {
3785 project.prepare_rename(buffer.clone(), 7, cx)
3786 });
3787 fake_server
3788 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3789 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3790 assert_eq!(params.position, lsp::Position::new(0, 7));
3791 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3792 lsp::Position::new(0, 6),
3793 lsp::Position::new(0, 9),
3794 ))))
3795 })
3796 .next()
3797 .await
3798 .unwrap();
3799 let range = response.await.unwrap().unwrap();
3800 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3801 assert_eq!(range, 6..9);
3802
3803 let response = project.update(cx, |project, cx| {
3804 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3805 });
3806 fake_server
3807 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3808 assert_eq!(
3809 params.text_document_position.text_document.uri.as_str(),
3810 "file:///dir/one.rs"
3811 );
3812 assert_eq!(
3813 params.text_document_position.position,
3814 lsp::Position::new(0, 7)
3815 );
3816 assert_eq!(params.new_name, "THREE");
3817 Ok(Some(lsp::WorkspaceEdit {
3818 changes: Some(
3819 [
3820 (
3821 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3822 vec![lsp::TextEdit::new(
3823 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3824 "THREE".to_string(),
3825 )],
3826 ),
3827 (
3828 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3829 vec![
3830 lsp::TextEdit::new(
3831 lsp::Range::new(
3832 lsp::Position::new(0, 24),
3833 lsp::Position::new(0, 27),
3834 ),
3835 "THREE".to_string(),
3836 ),
3837 lsp::TextEdit::new(
3838 lsp::Range::new(
3839 lsp::Position::new(0, 35),
3840 lsp::Position::new(0, 38),
3841 ),
3842 "THREE".to_string(),
3843 ),
3844 ],
3845 ),
3846 ]
3847 .into_iter()
3848 .collect(),
3849 ),
3850 ..Default::default()
3851 }))
3852 })
3853 .next()
3854 .await
3855 .unwrap();
3856 let mut transaction = response.await.unwrap().0;
3857 assert_eq!(transaction.len(), 2);
3858 assert_eq!(
3859 transaction
3860 .remove_entry(&buffer)
3861 .unwrap()
3862 .0
3863 .update(cx, |buffer, _| buffer.text()),
3864 "const THREE: usize = 1;"
3865 );
3866 assert_eq!(
3867 transaction
3868 .into_keys()
3869 .next()
3870 .unwrap()
3871 .update(cx, |buffer, _| buffer.text()),
3872 "const TWO: usize = one::THREE + one::THREE;"
3873 );
3874}
3875
3876#[gpui::test]
3877async fn test_search(cx: &mut gpui::TestAppContext) {
3878 init_test(cx);
3879
3880 let fs = FakeFs::new(cx.executor());
3881 fs.insert_tree(
3882 "/dir",
3883 json!({
3884 "one.rs": "const ONE: usize = 1;",
3885 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3886 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3887 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3888 }),
3889 )
3890 .await;
3891 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3892 assert_eq!(
3893 search(
3894 &project,
3895 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3896 cx
3897 )
3898 .await
3899 .unwrap(),
3900 HashMap::from_iter([
3901 ("dir/two.rs".to_string(), vec![6..9]),
3902 ("dir/three.rs".to_string(), vec![37..40])
3903 ])
3904 );
3905
3906 let buffer_4 = project
3907 .update(cx, |project, cx| {
3908 project.open_local_buffer("/dir/four.rs", cx)
3909 })
3910 .await
3911 .unwrap();
3912 buffer_4.update(cx, |buffer, cx| {
3913 let text = "two::TWO";
3914 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3915 });
3916
3917 assert_eq!(
3918 search(
3919 &project,
3920 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3921 cx
3922 )
3923 .await
3924 .unwrap(),
3925 HashMap::from_iter([
3926 ("dir/two.rs".to_string(), vec![6..9]),
3927 ("dir/three.rs".to_string(), vec![37..40]),
3928 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3929 ])
3930 );
3931}
3932
3933#[gpui::test]
3934async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3935 init_test(cx);
3936
3937 let search_query = "file";
3938
3939 let fs = FakeFs::new(cx.executor());
3940 fs.insert_tree(
3941 "/dir",
3942 json!({
3943 "one.rs": r#"// Rust file one"#,
3944 "one.ts": r#"// TypeScript file one"#,
3945 "two.rs": r#"// Rust file two"#,
3946 "two.ts": r#"// TypeScript file two"#,
3947 }),
3948 )
3949 .await;
3950 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3951
3952 assert!(
3953 search(
3954 &project,
3955 SearchQuery::text(
3956 search_query,
3957 false,
3958 true,
3959 false,
3960 vec![PathMatcher::new("*.odd").unwrap()],
3961 Vec::new()
3962 )
3963 .unwrap(),
3964 cx
3965 )
3966 .await
3967 .unwrap()
3968 .is_empty(),
3969 "If no inclusions match, no files should be returned"
3970 );
3971
3972 assert_eq!(
3973 search(
3974 &project,
3975 SearchQuery::text(
3976 search_query,
3977 false,
3978 true,
3979 false,
3980 vec![PathMatcher::new("*.rs").unwrap()],
3981 Vec::new()
3982 )
3983 .unwrap(),
3984 cx
3985 )
3986 .await
3987 .unwrap(),
3988 HashMap::from_iter([
3989 ("dir/one.rs".to_string(), vec![8..12]),
3990 ("dir/two.rs".to_string(), vec![8..12]),
3991 ]),
3992 "Rust only search should give only Rust files"
3993 );
3994
3995 assert_eq!(
3996 search(
3997 &project,
3998 SearchQuery::text(
3999 search_query,
4000 false,
4001 true,
4002 false,
4003 vec![
4004 PathMatcher::new("*.ts").unwrap(),
4005 PathMatcher::new("*.odd").unwrap(),
4006 ],
4007 Vec::new()
4008 ).unwrap(),
4009 cx
4010 )
4011 .await
4012 .unwrap(),
4013 HashMap::from_iter([
4014 ("dir/one.ts".to_string(), vec![14..18]),
4015 ("dir/two.ts".to_string(), vec![14..18]),
4016 ]),
4017 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4018 );
4019
4020 assert_eq!(
4021 search(
4022 &project,
4023 SearchQuery::text(
4024 search_query,
4025 false,
4026 true,
4027 false,
4028 vec![
4029 PathMatcher::new("*.rs").unwrap(),
4030 PathMatcher::new("*.ts").unwrap(),
4031 PathMatcher::new("*.odd").unwrap(),
4032 ],
4033 Vec::new()
4034 ).unwrap(),
4035 cx
4036 )
4037 .await
4038 .unwrap(),
4039 HashMap::from_iter([
4040 ("dir/two.ts".to_string(), vec![14..18]),
4041 ("dir/one.rs".to_string(), vec![8..12]),
4042 ("dir/one.ts".to_string(), vec![14..18]),
4043 ("dir/two.rs".to_string(), vec![8..12]),
4044 ]),
4045 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4046 );
4047}
4048
4049#[gpui::test]
4050async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4051 init_test(cx);
4052
4053 let search_query = "file";
4054
4055 let fs = FakeFs::new(cx.executor());
4056 fs.insert_tree(
4057 "/dir",
4058 json!({
4059 "one.rs": r#"// Rust file one"#,
4060 "one.ts": r#"// TypeScript file one"#,
4061 "two.rs": r#"// Rust file two"#,
4062 "two.ts": r#"// TypeScript file two"#,
4063 }),
4064 )
4065 .await;
4066 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4067
4068 assert_eq!(
4069 search(
4070 &project,
4071 SearchQuery::text(
4072 search_query,
4073 false,
4074 true,
4075 false,
4076 Vec::new(),
4077 vec![PathMatcher::new("*.odd").unwrap()],
4078 )
4079 .unwrap(),
4080 cx
4081 )
4082 .await
4083 .unwrap(),
4084 HashMap::from_iter([
4085 ("dir/one.rs".to_string(), vec![8..12]),
4086 ("dir/one.ts".to_string(), vec![14..18]),
4087 ("dir/two.rs".to_string(), vec![8..12]),
4088 ("dir/two.ts".to_string(), vec![14..18]),
4089 ]),
4090 "If no exclusions match, all files should be returned"
4091 );
4092
4093 assert_eq!(
4094 search(
4095 &project,
4096 SearchQuery::text(
4097 search_query,
4098 false,
4099 true,
4100 false,
4101 Vec::new(),
4102 vec![PathMatcher::new("*.rs").unwrap()],
4103 )
4104 .unwrap(),
4105 cx
4106 )
4107 .await
4108 .unwrap(),
4109 HashMap::from_iter([
4110 ("dir/one.ts".to_string(), vec![14..18]),
4111 ("dir/two.ts".to_string(), vec![14..18]),
4112 ]),
4113 "Rust exclusion search should give only TypeScript files"
4114 );
4115
4116 assert_eq!(
4117 search(
4118 &project,
4119 SearchQuery::text(
4120 search_query,
4121 false,
4122 true,
4123 false,
4124 Vec::new(),
4125 vec![
4126 PathMatcher::new("*.ts").unwrap(),
4127 PathMatcher::new("*.odd").unwrap(),
4128 ],
4129 ).unwrap(),
4130 cx
4131 )
4132 .await
4133 .unwrap(),
4134 HashMap::from_iter([
4135 ("dir/one.rs".to_string(), vec![8..12]),
4136 ("dir/two.rs".to_string(), vec![8..12]),
4137 ]),
4138 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4139 );
4140
4141 assert!(
4142 search(
4143 &project,
4144 SearchQuery::text(
4145 search_query,
4146 false,
4147 true,
4148 false,
4149 Vec::new(),
4150 vec![
4151 PathMatcher::new("*.rs").unwrap(),
4152 PathMatcher::new("*.ts").unwrap(),
4153 PathMatcher::new("*.odd").unwrap(),
4154 ],
4155 ).unwrap(),
4156 cx
4157 )
4158 .await
4159 .unwrap().is_empty(),
4160 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4161 );
4162}
4163
4164#[gpui::test]
4165async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4166 init_test(cx);
4167
4168 let search_query = "file";
4169
4170 let fs = FakeFs::new(cx.executor());
4171 fs.insert_tree(
4172 "/dir",
4173 json!({
4174 "one.rs": r#"// Rust file one"#,
4175 "one.ts": r#"// TypeScript file one"#,
4176 "two.rs": r#"// Rust file two"#,
4177 "two.ts": r#"// TypeScript file two"#,
4178 }),
4179 )
4180 .await;
4181 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4182
4183 assert!(
4184 search(
4185 &project,
4186 SearchQuery::text(
4187 search_query,
4188 false,
4189 true,
4190 false,
4191 vec![PathMatcher::new("*.odd").unwrap()],
4192 vec![PathMatcher::new("*.odd").unwrap()],
4193 )
4194 .unwrap(),
4195 cx
4196 )
4197 .await
4198 .unwrap()
4199 .is_empty(),
4200 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4201 );
4202
4203 assert!(
4204 search(
4205 &project,
4206 SearchQuery::text(
4207 search_query,
4208 false,
4209 true,
4210 false,
4211 vec![PathMatcher::new("*.ts").unwrap()],
4212 vec![PathMatcher::new("*.ts").unwrap()],
4213 ).unwrap(),
4214 cx
4215 )
4216 .await
4217 .unwrap()
4218 .is_empty(),
4219 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4220 );
4221
4222 assert!(
4223 search(
4224 &project,
4225 SearchQuery::text(
4226 search_query,
4227 false,
4228 true,
4229 false,
4230 vec![
4231 PathMatcher::new("*.ts").unwrap(),
4232 PathMatcher::new("*.odd").unwrap()
4233 ],
4234 vec![
4235 PathMatcher::new("*.ts").unwrap(),
4236 PathMatcher::new("*.odd").unwrap()
4237 ],
4238 )
4239 .unwrap(),
4240 cx
4241 )
4242 .await
4243 .unwrap()
4244 .is_empty(),
4245 "Non-matching inclusions and exclusions should not change that."
4246 );
4247
4248 assert_eq!(
4249 search(
4250 &project,
4251 SearchQuery::text(
4252 search_query,
4253 false,
4254 true,
4255 false,
4256 vec![
4257 PathMatcher::new("*.ts").unwrap(),
4258 PathMatcher::new("*.odd").unwrap()
4259 ],
4260 vec![
4261 PathMatcher::new("*.rs").unwrap(),
4262 PathMatcher::new("*.odd").unwrap()
4263 ],
4264 )
4265 .unwrap(),
4266 cx
4267 )
4268 .await
4269 .unwrap(),
4270 HashMap::from_iter([
4271 ("dir/one.ts".to_string(), vec![14..18]),
4272 ("dir/two.ts".to_string(), vec![14..18]),
4273 ]),
4274 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4275 );
4276}
4277
4278#[gpui::test]
4279async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4280 init_test(cx);
4281
4282 let fs = FakeFs::new(cx.executor());
4283 fs.insert_tree(
4284 "/worktree-a",
4285 json!({
4286 "haystack.rs": r#"// NEEDLE"#,
4287 "haystack.ts": r#"// NEEDLE"#,
4288 }),
4289 )
4290 .await;
4291 fs.insert_tree(
4292 "/worktree-b",
4293 json!({
4294 "haystack.rs": r#"// NEEDLE"#,
4295 "haystack.ts": r#"// NEEDLE"#,
4296 }),
4297 )
4298 .await;
4299
4300 let project = Project::test(
4301 fs.clone(),
4302 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4303 cx,
4304 )
4305 .await;
4306
4307 assert_eq!(
4308 search(
4309 &project,
4310 SearchQuery::text(
4311 "NEEDLE",
4312 false,
4313 true,
4314 false,
4315 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4316 Vec::new()
4317 )
4318 .unwrap(),
4319 cx
4320 )
4321 .await
4322 .unwrap(),
4323 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4324 "should only return results from included worktree"
4325 );
4326 assert_eq!(
4327 search(
4328 &project,
4329 SearchQuery::text(
4330 "NEEDLE",
4331 false,
4332 true,
4333 false,
4334 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4335 Vec::new()
4336 )
4337 .unwrap(),
4338 cx
4339 )
4340 .await
4341 .unwrap(),
4342 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4343 "should only return results from included worktree"
4344 );
4345
4346 assert_eq!(
4347 search(
4348 &project,
4349 SearchQuery::text(
4350 "NEEDLE",
4351 false,
4352 true,
4353 false,
4354 vec![PathMatcher::new("*.ts").unwrap()],
4355 Vec::new()
4356 )
4357 .unwrap(),
4358 cx
4359 )
4360 .await
4361 .unwrap(),
4362 HashMap::from_iter([
4363 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4364 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4365 ]),
4366 "should return results from both worktrees"
4367 );
4368}
4369
4370#[gpui::test]
4371async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4372 init_test(cx);
4373
4374 let fs = FakeFs::new(cx.background_executor.clone());
4375 fs.insert_tree(
4376 "/dir",
4377 json!({
4378 ".git": {},
4379 ".gitignore": "**/target\n/node_modules\n",
4380 "target": {
4381 "index.txt": "index_key:index_value"
4382 },
4383 "node_modules": {
4384 "eslint": {
4385 "index.ts": "const eslint_key = 'eslint value'",
4386 "package.json": r#"{ "some_key": "some value" }"#,
4387 },
4388 "prettier": {
4389 "index.ts": "const prettier_key = 'prettier value'",
4390 "package.json": r#"{ "other_key": "other value" }"#,
4391 },
4392 },
4393 "package.json": r#"{ "main_key": "main value" }"#,
4394 }),
4395 )
4396 .await;
4397 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4398
4399 let query = "key";
4400 assert_eq!(
4401 search(
4402 &project,
4403 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4404 cx
4405 )
4406 .await
4407 .unwrap(),
4408 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4409 "Only one non-ignored file should have the query"
4410 );
4411
4412 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4413 assert_eq!(
4414 search(
4415 &project,
4416 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4417 cx
4418 )
4419 .await
4420 .unwrap(),
4421 HashMap::from_iter([
4422 ("dir/package.json".to_string(), vec![8..11]),
4423 ("dir/target/index.txt".to_string(), vec![6..9]),
4424 (
4425 "dir/node_modules/prettier/package.json".to_string(),
4426 vec![9..12]
4427 ),
4428 (
4429 "dir/node_modules/prettier/index.ts".to_string(),
4430 vec![15..18]
4431 ),
4432 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4433 (
4434 "dir/node_modules/eslint/package.json".to_string(),
4435 vec![8..11]
4436 ),
4437 ]),
4438 "Unrestricted search with ignored directories should find every file with the query"
4439 );
4440
4441 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4442 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4443 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4444 assert_eq!(
4445 search(
4446 &project,
4447 SearchQuery::text(
4448 query,
4449 false,
4450 false,
4451 true,
4452 files_to_include,
4453 files_to_exclude,
4454 )
4455 .unwrap(),
4456 cx
4457 )
4458 .await
4459 .unwrap(),
4460 HashMap::from_iter([(
4461 "dir/node_modules/prettier/package.json".to_string(),
4462 vec![9..12]
4463 )]),
4464 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4465 );
4466}
4467
4468#[test]
4469fn test_glob_literal_prefix() {
4470 assert_eq!(glob_literal_prefix("**/*.js"), "");
4471 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4472 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4473 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4474}
4475
4476#[gpui::test]
4477async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4478 init_test(cx);
4479
4480 let fs = FakeFs::new(cx.executor().clone());
4481 fs.insert_tree(
4482 "/one/two",
4483 json!({
4484 "three": {
4485 "a.txt": "",
4486 "four": {}
4487 },
4488 "c.rs": ""
4489 }),
4490 )
4491 .await;
4492
4493 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4494 project
4495 .update(cx, |project, cx| {
4496 let id = project.worktrees().next().unwrap().read(cx).id();
4497 project.create_entry((id, "b.."), true, cx)
4498 })
4499 .unwrap()
4500 .await
4501 .unwrap();
4502
4503 // Can't create paths outside the project
4504 let result = project
4505 .update(cx, |project, cx| {
4506 let id = project.worktrees().next().unwrap().read(cx).id();
4507 project.create_entry((id, "../../boop"), true, cx)
4508 })
4509 .await;
4510 assert!(result.is_err());
4511
4512 // Can't create paths with '..'
4513 let result = project
4514 .update(cx, |project, cx| {
4515 let id = project.worktrees().next().unwrap().read(cx).id();
4516 project.create_entry((id, "four/../beep"), true, cx)
4517 })
4518 .await;
4519 assert!(result.is_err());
4520
4521 assert_eq!(
4522 fs.paths(true),
4523 vec![
4524 PathBuf::from("/"),
4525 PathBuf::from("/one"),
4526 PathBuf::from("/one/two"),
4527 PathBuf::from("/one/two/c.rs"),
4528 PathBuf::from("/one/two/three"),
4529 PathBuf::from("/one/two/three/a.txt"),
4530 PathBuf::from("/one/two/three/b.."),
4531 PathBuf::from("/one/two/three/four"),
4532 ]
4533 );
4534
4535 // And we cannot open buffers with '..'
4536 let result = project
4537 .update(cx, |project, cx| {
4538 let id = project.worktrees().next().unwrap().read(cx).id();
4539 project.open_buffer((id, "../c.rs"), cx)
4540 })
4541 .await;
4542 assert!(result.is_err())
4543}
4544
4545#[gpui::test]
4546async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4547 init_test(cx);
4548
4549 let fs = FakeFs::new(cx.executor());
4550 fs.insert_tree(
4551 "/dir",
4552 json!({
4553 "a.tsx": "a",
4554 }),
4555 )
4556 .await;
4557
4558 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4559
4560 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4561 language_registry.add(tsx_lang());
4562 let language_server_names = [
4563 "TypeScriptServer",
4564 "TailwindServer",
4565 "ESLintServer",
4566 "NoHoverCapabilitiesServer",
4567 ];
4568 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4569 "tsx",
4570 true,
4571 FakeLspAdapter {
4572 name: &language_server_names[0],
4573 capabilities: lsp::ServerCapabilities {
4574 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4575 ..lsp::ServerCapabilities::default()
4576 },
4577 ..FakeLspAdapter::default()
4578 },
4579 );
4580 let _a = language_registry.register_specific_fake_lsp_adapter(
4581 "tsx",
4582 false,
4583 FakeLspAdapter {
4584 name: &language_server_names[1],
4585 capabilities: lsp::ServerCapabilities {
4586 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4587 ..lsp::ServerCapabilities::default()
4588 },
4589 ..FakeLspAdapter::default()
4590 },
4591 );
4592 let _b = language_registry.register_specific_fake_lsp_adapter(
4593 "tsx",
4594 false,
4595 FakeLspAdapter {
4596 name: &language_server_names[2],
4597 capabilities: lsp::ServerCapabilities {
4598 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4599 ..lsp::ServerCapabilities::default()
4600 },
4601 ..FakeLspAdapter::default()
4602 },
4603 );
4604 let _c = language_registry.register_specific_fake_lsp_adapter(
4605 "tsx",
4606 false,
4607 FakeLspAdapter {
4608 name: &language_server_names[3],
4609 capabilities: lsp::ServerCapabilities {
4610 hover_provider: None,
4611 ..lsp::ServerCapabilities::default()
4612 },
4613 ..FakeLspAdapter::default()
4614 },
4615 );
4616
4617 let buffer = project
4618 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4619 .await
4620 .unwrap();
4621 cx.executor().run_until_parked();
4622
4623 let mut servers_with_hover_requests = HashMap::default();
4624 for i in 0..language_server_names.len() {
4625 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4626 panic!(
4627 "Failed to get language server #{i} with name {}",
4628 &language_server_names[i]
4629 )
4630 });
4631 let new_server_name = new_server.server.name();
4632 assert!(
4633 !servers_with_hover_requests.contains_key(new_server_name),
4634 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4635 );
4636 let new_server_name = new_server_name.to_string();
4637 match new_server_name.as_str() {
4638 "TailwindServer" | "TypeScriptServer" => {
4639 servers_with_hover_requests.insert(
4640 new_server_name.clone(),
4641 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4642 let name = new_server_name.clone();
4643 async move {
4644 Ok(Some(lsp::Hover {
4645 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4646 format!("{name} hover"),
4647 )),
4648 range: None,
4649 }))
4650 }
4651 }),
4652 );
4653 }
4654 "ESLintServer" => {
4655 servers_with_hover_requests.insert(
4656 new_server_name,
4657 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4658 |_, _| async move { Ok(None) },
4659 ),
4660 );
4661 }
4662 "NoHoverCapabilitiesServer" => {
4663 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4664 |_, _| async move {
4665 panic!(
4666 "Should not call for hovers server with no corresponding capabilities"
4667 )
4668 },
4669 );
4670 }
4671 unexpected => panic!("Unexpected server name: {unexpected}"),
4672 }
4673 }
4674
4675 let hover_task = project.update(cx, |project, cx| {
4676 project.hover(&buffer, Point::new(0, 0), cx)
4677 });
4678 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4679 |mut hover_request| async move {
4680 hover_request
4681 .next()
4682 .await
4683 .expect("All hover requests should have been triggered")
4684 },
4685 ))
4686 .await;
4687 assert_eq!(
4688 vec!["TailwindServer hover", "TypeScriptServer hover"],
4689 hover_task
4690 .await
4691 .into_iter()
4692 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4693 .sorted()
4694 .collect::<Vec<_>>(),
4695 "Should receive hover responses from all related servers with hover capabilities"
4696 );
4697}
4698
4699#[gpui::test]
4700async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4701 init_test(cx);
4702
4703 let fs = FakeFs::new(cx.executor());
4704 fs.insert_tree(
4705 "/dir",
4706 json!({
4707 "a.ts": "a",
4708 }),
4709 )
4710 .await;
4711
4712 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4713
4714 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4715 language_registry.add(typescript_lang());
4716 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4717 "TypeScript",
4718 FakeLspAdapter {
4719 capabilities: lsp::ServerCapabilities {
4720 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4721 ..lsp::ServerCapabilities::default()
4722 },
4723 ..FakeLspAdapter::default()
4724 },
4725 );
4726
4727 let buffer = project
4728 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4729 .await
4730 .unwrap();
4731 cx.executor().run_until_parked();
4732
4733 let fake_server = fake_language_servers
4734 .next()
4735 .await
4736 .expect("failed to get the language server");
4737
4738 let mut request_handled =
4739 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4740 Ok(Some(lsp::Hover {
4741 contents: lsp::HoverContents::Array(vec![
4742 lsp::MarkedString::String("".to_string()),
4743 lsp::MarkedString::String(" ".to_string()),
4744 lsp::MarkedString::String("\n\n\n".to_string()),
4745 ]),
4746 range: None,
4747 }))
4748 });
4749
4750 let hover_task = project.update(cx, |project, cx| {
4751 project.hover(&buffer, Point::new(0, 0), cx)
4752 });
4753 let () = request_handled
4754 .next()
4755 .await
4756 .expect("All hover requests should have been triggered");
4757 assert_eq!(
4758 Vec::<String>::new(),
4759 hover_task
4760 .await
4761 .into_iter()
4762 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4763 .sorted()
4764 .collect::<Vec<_>>(),
4765 "Empty hover parts should be ignored"
4766 );
4767}
4768
4769#[gpui::test]
4770async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4771 init_test(cx);
4772
4773 let fs = FakeFs::new(cx.executor());
4774 fs.insert_tree(
4775 "/dir",
4776 json!({
4777 "a.tsx": "a",
4778 }),
4779 )
4780 .await;
4781
4782 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4783
4784 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4785 language_registry.add(tsx_lang());
4786 let language_server_names = [
4787 "TypeScriptServer",
4788 "TailwindServer",
4789 "ESLintServer",
4790 "NoActionsCapabilitiesServer",
4791 ];
4792 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4793 "tsx",
4794 true,
4795 FakeLspAdapter {
4796 name: &language_server_names[0],
4797 capabilities: lsp::ServerCapabilities {
4798 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4799 ..lsp::ServerCapabilities::default()
4800 },
4801 ..FakeLspAdapter::default()
4802 },
4803 );
4804 let _a = language_registry.register_specific_fake_lsp_adapter(
4805 "tsx",
4806 false,
4807 FakeLspAdapter {
4808 name: &language_server_names[1],
4809 capabilities: lsp::ServerCapabilities {
4810 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4811 ..lsp::ServerCapabilities::default()
4812 },
4813 ..FakeLspAdapter::default()
4814 },
4815 );
4816 let _b = language_registry.register_specific_fake_lsp_adapter(
4817 "tsx",
4818 false,
4819 FakeLspAdapter {
4820 name: &language_server_names[2],
4821 capabilities: lsp::ServerCapabilities {
4822 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4823 ..lsp::ServerCapabilities::default()
4824 },
4825 ..FakeLspAdapter::default()
4826 },
4827 );
4828 let _c = language_registry.register_specific_fake_lsp_adapter(
4829 "tsx",
4830 false,
4831 FakeLspAdapter {
4832 name: &language_server_names[3],
4833 capabilities: lsp::ServerCapabilities {
4834 code_action_provider: None,
4835 ..lsp::ServerCapabilities::default()
4836 },
4837 ..FakeLspAdapter::default()
4838 },
4839 );
4840
4841 let buffer = project
4842 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4843 .await
4844 .unwrap();
4845 cx.executor().run_until_parked();
4846
4847 let mut servers_with_actions_requests = HashMap::default();
4848 for i in 0..language_server_names.len() {
4849 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4850 panic!(
4851 "Failed to get language server #{i} with name {}",
4852 &language_server_names[i]
4853 )
4854 });
4855 let new_server_name = new_server.server.name();
4856 assert!(
4857 !servers_with_actions_requests.contains_key(new_server_name),
4858 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4859 );
4860 let new_server_name = new_server_name.to_string();
4861 match new_server_name.as_str() {
4862 "TailwindServer" | "TypeScriptServer" => {
4863 servers_with_actions_requests.insert(
4864 new_server_name.clone(),
4865 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4866 move |_, _| {
4867 let name = new_server_name.clone();
4868 async move {
4869 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4870 lsp::CodeAction {
4871 title: format!("{name} code action"),
4872 ..lsp::CodeAction::default()
4873 },
4874 )]))
4875 }
4876 },
4877 ),
4878 );
4879 }
4880 "ESLintServer" => {
4881 servers_with_actions_requests.insert(
4882 new_server_name,
4883 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4884 |_, _| async move { Ok(None) },
4885 ),
4886 );
4887 }
4888 "NoActionsCapabilitiesServer" => {
4889 let _never_handled = new_server
4890 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4891 panic!(
4892 "Should not call for code actions server with no corresponding capabilities"
4893 )
4894 });
4895 }
4896 unexpected => panic!("Unexpected server name: {unexpected}"),
4897 }
4898 }
4899
4900 let code_actions_task = project.update(cx, |project, cx| {
4901 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4902 });
4903 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4904 |mut code_actions_request| async move {
4905 code_actions_request
4906 .next()
4907 .await
4908 .expect("All code actions requests should have been triggered")
4909 },
4910 ))
4911 .await;
4912 assert_eq!(
4913 vec!["TailwindServer code action", "TypeScriptServer code action"],
4914 code_actions_task
4915 .await
4916 .into_iter()
4917 .map(|code_action| code_action.lsp_action.title)
4918 .sorted()
4919 .collect::<Vec<_>>(),
4920 "Should receive code actions responses from all related servers with hover capabilities"
4921 );
4922}
4923
4924async fn search(
4925 project: &Model<Project>,
4926 query: SearchQuery,
4927 cx: &mut gpui::TestAppContext,
4928) -> Result<HashMap<String, Vec<Range<usize>>>> {
4929 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4930 let mut results = HashMap::default();
4931 while let Some(search_result) = search_rx.next().await {
4932 match search_result {
4933 SearchResult::Buffer { buffer, ranges } => {
4934 results.entry(buffer).or_insert(ranges);
4935 }
4936 SearchResult::LimitReached => {}
4937 }
4938 }
4939 Ok(results
4940 .into_iter()
4941 .map(|(buffer, ranges)| {
4942 buffer.update(cx, |buffer, cx| {
4943 let path = buffer
4944 .file()
4945 .unwrap()
4946 .full_path(cx)
4947 .to_string_lossy()
4948 .to_string();
4949 let ranges = ranges
4950 .into_iter()
4951 .map(|range| range.to_offset(buffer))
4952 .collect::<Vec<_>>();
4953 (path, ranges)
4954 })
4955 })
4956 .collect())
4957}
4958
4959fn init_test(cx: &mut gpui::TestAppContext) {
4960 if std::env::var("RUST_LOG").is_ok() {
4961 env_logger::try_init().ok();
4962 }
4963
4964 cx.update(|cx| {
4965 let settings_store = SettingsStore::test(cx);
4966 cx.set_global(settings_store);
4967 release_channel::init("0.0.0", cx);
4968 language::init(cx);
4969 Project::init_settings(cx);
4970 });
4971}
4972
4973fn json_lang() -> Arc<Language> {
4974 Arc::new(Language::new(
4975 LanguageConfig {
4976 name: "JSON".into(),
4977 matcher: LanguageMatcher {
4978 path_suffixes: vec!["json".to_string()],
4979 ..Default::default()
4980 },
4981 ..Default::default()
4982 },
4983 None,
4984 ))
4985}
4986
4987fn js_lang() -> Arc<Language> {
4988 Arc::new(Language::new(
4989 LanguageConfig {
4990 name: Arc::from("JavaScript"),
4991 matcher: LanguageMatcher {
4992 path_suffixes: vec!["js".to_string()],
4993 ..Default::default()
4994 },
4995 ..Default::default()
4996 },
4997 None,
4998 ))
4999}
5000
5001fn rust_lang() -> Arc<Language> {
5002 Arc::new(Language::new(
5003 LanguageConfig {
5004 name: "Rust".into(),
5005 matcher: LanguageMatcher {
5006 path_suffixes: vec!["rs".to_string()],
5007 ..Default::default()
5008 },
5009 ..Default::default()
5010 },
5011 Some(tree_sitter_rust::language()),
5012 ))
5013}
5014
5015fn typescript_lang() -> Arc<Language> {
5016 Arc::new(Language::new(
5017 LanguageConfig {
5018 name: "TypeScript".into(),
5019 matcher: LanguageMatcher {
5020 path_suffixes: vec!["ts".to_string()],
5021 ..Default::default()
5022 },
5023 ..Default::default()
5024 },
5025 Some(tree_sitter_typescript::language_typescript()),
5026 ))
5027}
5028
5029fn tsx_lang() -> Arc<Language> {
5030 Arc::new(Language::new(
5031 LanguageConfig {
5032 name: "tsx".into(),
5033 matcher: LanguageMatcher {
5034 path_suffixes: vec!["tsx".to_string()],
5035 ..Default::default()
5036 },
5037 ..Default::default()
5038 },
5039 Some(tree_sitter_typescript::language_tsx()),
5040 ))
5041}