1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{TaskContext, TaskSource, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20use worktree::WorktreeModelHandle as _;
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/Users").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees().next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let workree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees().next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: workree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree",
141 };
142 cx.update(|cx| {
143 let tree = worktree.read(cx);
144
145 let settings_a = language_settings(
146 None,
147 Some(
148 &(File::for_entry(
149 tree.entry_for_path("a/a.rs").unwrap().clone(),
150 worktree.clone(),
151 ) as _),
152 ),
153 cx,
154 );
155 let settings_b = language_settings(
156 None,
157 Some(
158 &(File::for_entry(
159 tree.entry_for_path("b/b.rs").unwrap().clone(),
160 worktree.clone(),
161 ) as _),
162 ),
163 cx,
164 );
165
166 assert_eq!(settings_a.tab_size.get(), 8);
167 assert_eq!(settings_b.tab_size.get(), 2);
168
169 let all_tasks = project
170 .update(cx, |project, cx| {
171 project.task_inventory().update(cx, |inventory, cx| {
172 let (mut old, new) = inventory.used_and_current_resolved_tasks(
173 None,
174 Some(workree_id),
175 &task_context,
176 cx,
177 );
178 old.extend(new);
179 old
180 })
181 })
182 .into_iter()
183 .map(|(source_kind, task)| {
184 let resolved = task.resolved.unwrap();
185 (
186 source_kind,
187 task.resolved_label,
188 resolved.args,
189 resolved.env,
190 )
191 })
192 .collect::<Vec<_>>();
193 assert_eq!(
194 all_tasks,
195 vec![
196 (
197 TaskSourceKind::Worktree {
198 id: workree_id,
199 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
200 id_base: "local_tasks_for_worktree",
201 },
202 "cargo check".to_string(),
203 vec!["check".to_string()],
204 HashMap::default(),
205 ),
206 (
207 global_task_source_kind.clone(),
208 "cargo check".to_string(),
209 vec!["check".to_string(), "--all".to_string()],
210 HashMap::default(),
211 ),
212 ]
213 );
214 });
215
216 project.update(cx, |project, cx| {
217 let inventory = project.task_inventory();
218 inventory.update(cx, |inventory, cx| {
219 let (mut old, new) = inventory.used_and_current_resolved_tasks(
220 None,
221 Some(workree_id),
222 &task_context,
223 cx,
224 );
225 old.extend(new);
226 let (_, resolved_task) = old
227 .into_iter()
228 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
229 .expect("should have one global task");
230 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
231 })
232 });
233
234 cx.update(|cx| {
235 let all_tasks = project
236 .update(cx, |project, cx| {
237 project.task_inventory().update(cx, |inventory, cx| {
238 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
239 inventory.add_source(
240 global_task_source_kind.clone(),
241 |cx| {
242 cx.new_model(|_| {
243 let source = TestTaskSource {
244 tasks: TaskTemplates(vec![TaskTemplate {
245 label: "cargo check".to_string(),
246 command: "cargo".to_string(),
247 args: vec![
248 "check".to_string(),
249 "--all".to_string(),
250 "--all-targets".to_string(),
251 ],
252 env: HashMap::from_iter(Some((
253 "RUSTFLAGS".to_string(),
254 "-Zunstable-options".to_string(),
255 ))),
256 ..TaskTemplate::default()
257 }]),
258 };
259 Box::new(source) as Box<_>
260 })
261 },
262 cx,
263 );
264 let (mut old, new) = inventory.used_and_current_resolved_tasks(
265 None,
266 Some(workree_id),
267 &task_context,
268 cx,
269 );
270 old.extend(new);
271 old
272 })
273 })
274 .into_iter()
275 .map(|(source_kind, task)| {
276 let resolved = task.resolved.unwrap();
277 (
278 source_kind,
279 task.resolved_label,
280 resolved.args,
281 resolved.env,
282 )
283 })
284 .collect::<Vec<_>>();
285 assert_eq!(
286 all_tasks,
287 vec![
288 (
289 TaskSourceKind::Worktree {
290 id: workree_id,
291 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
292 id_base: "local_tasks_for_worktree",
293 },
294 "cargo check".to_string(),
295 vec!["check".to_string()],
296 HashMap::default(),
297 ),
298 (
299 TaskSourceKind::Worktree {
300 id: workree_id,
301 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
302 id_base: "local_tasks_for_worktree",
303 },
304 "cargo check".to_string(),
305 vec![
306 "check".to_string(),
307 "--all".to_string(),
308 "--all-targets".to_string()
309 ],
310 HashMap::from_iter(Some((
311 "RUSTFLAGS".to_string(),
312 "-Zunstable-options".to_string()
313 ))),
314 ),
315 ]
316 );
317 });
318}
319
320struct TestTaskSource {
321 tasks: TaskTemplates,
322}
323
324impl TaskSource for TestTaskSource {
325 fn as_any(&mut self) -> &mut dyn std::any::Any {
326 self
327 }
328
329 fn tasks_to_schedule(&mut self, _: &mut ModelContext<Box<dyn TaskSource>>) -> TaskTemplates {
330 self.tasks.clone()
331 }
332}
333
334#[gpui::test]
335async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
336 init_test(cx);
337
338 let fs = FakeFs::new(cx.executor());
339 fs.insert_tree(
340 "/the-root",
341 json!({
342 "test.rs": "const A: i32 = 1;",
343 "test2.rs": "",
344 "Cargo.toml": "a = 1",
345 "package.json": "{\"a\": 1}",
346 }),
347 )
348 .await;
349
350 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
352
353 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
354 "Rust",
355 FakeLspAdapter {
356 name: "the-rust-language-server",
357 capabilities: lsp::ServerCapabilities {
358 completion_provider: Some(lsp::CompletionOptions {
359 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
360 ..Default::default()
361 }),
362 ..Default::default()
363 },
364 ..Default::default()
365 },
366 );
367 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
368 "JSON",
369 FakeLspAdapter {
370 name: "the-json-language-server",
371 capabilities: lsp::ServerCapabilities {
372 completion_provider: Some(lsp::CompletionOptions {
373 trigger_characters: Some(vec![":".to_string()]),
374 ..Default::default()
375 }),
376 ..Default::default()
377 },
378 ..Default::default()
379 },
380 );
381
382 // Open a buffer without an associated language server.
383 let toml_buffer = project
384 .update(cx, |project, cx| {
385 project.open_local_buffer("/the-root/Cargo.toml", cx)
386 })
387 .await
388 .unwrap();
389
390 // Open a buffer with an associated language server before the language for it has been loaded.
391 let rust_buffer = project
392 .update(cx, |project, cx| {
393 project.open_local_buffer("/the-root/test.rs", cx)
394 })
395 .await
396 .unwrap();
397 rust_buffer.update(cx, |buffer, _| {
398 assert_eq!(buffer.language().map(|l| l.name()), None);
399 });
400
401 // Now we add the languages to the project, and ensure they get assigned to all
402 // the relevant open buffers.
403 language_registry.add(json_lang());
404 language_registry.add(rust_lang());
405 cx.executor().run_until_parked();
406 rust_buffer.update(cx, |buffer, _| {
407 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
408 });
409
410 // A server is started up, and it is notified about Rust files.
411 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
412 assert_eq!(
413 fake_rust_server
414 .receive_notification::<lsp::notification::DidOpenTextDocument>()
415 .await
416 .text_document,
417 lsp::TextDocumentItem {
418 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
419 version: 0,
420 text: "const A: i32 = 1;".to_string(),
421 language_id: Default::default()
422 }
423 );
424
425 // The buffer is configured based on the language server's capabilities.
426 rust_buffer.update(cx, |buffer, _| {
427 assert_eq!(
428 buffer.completion_triggers(),
429 &[".".to_string(), "::".to_string()]
430 );
431 });
432 toml_buffer.update(cx, |buffer, _| {
433 assert!(buffer.completion_triggers().is_empty());
434 });
435
436 // Edit a buffer. The changes are reported to the language server.
437 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
438 assert_eq!(
439 fake_rust_server
440 .receive_notification::<lsp::notification::DidChangeTextDocument>()
441 .await
442 .text_document,
443 lsp::VersionedTextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
445 1
446 )
447 );
448
449 // Open a third buffer with a different associated language server.
450 let json_buffer = project
451 .update(cx, |project, cx| {
452 project.open_local_buffer("/the-root/package.json", cx)
453 })
454 .await
455 .unwrap();
456
457 // A json language server is started up and is only notified about the json buffer.
458 let mut fake_json_server = fake_json_servers.next().await.unwrap();
459 assert_eq!(
460 fake_json_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
466 version: 0,
467 text: "{\"a\": 1}".to_string(),
468 language_id: Default::default()
469 }
470 );
471
472 // This buffer is configured based on the second language server's
473 // capabilities.
474 json_buffer.update(cx, |buffer, _| {
475 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
476 });
477
478 // When opening another buffer whose language server is already running,
479 // it is also configured based on the existing language server's capabilities.
480 let rust_buffer2 = project
481 .update(cx, |project, cx| {
482 project.open_local_buffer("/the-root/test2.rs", cx)
483 })
484 .await
485 .unwrap();
486 rust_buffer2.update(cx, |buffer, _| {
487 assert_eq!(
488 buffer.completion_triggers(),
489 &[".".to_string(), "::".to_string()]
490 );
491 });
492
493 // Changes are reported only to servers matching the buffer's language.
494 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
495 rust_buffer2.update(cx, |buffer, cx| {
496 buffer.edit([(0..0, "let x = 1;")], None, cx)
497 });
498 assert_eq!(
499 fake_rust_server
500 .receive_notification::<lsp::notification::DidChangeTextDocument>()
501 .await
502 .text_document,
503 lsp::VersionedTextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
505 1
506 )
507 );
508
509 // Save notifications are reported to all servers.
510 project
511 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
512 .await
513 .unwrap();
514 assert_eq!(
515 fake_rust_server
516 .receive_notification::<lsp::notification::DidSaveTextDocument>()
517 .await
518 .text_document,
519 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
520 );
521 assert_eq!(
522 fake_json_server
523 .receive_notification::<lsp::notification::DidSaveTextDocument>()
524 .await
525 .text_document,
526 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
527 );
528
529 // Renames are reported only to servers matching the buffer's language.
530 fs.rename(
531 Path::new("/the-root/test2.rs"),
532 Path::new("/the-root/test3.rs"),
533 Default::default(),
534 )
535 .await
536 .unwrap();
537 assert_eq!(
538 fake_rust_server
539 .receive_notification::<lsp::notification::DidCloseTextDocument>()
540 .await
541 .text_document,
542 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
543 );
544 assert_eq!(
545 fake_rust_server
546 .receive_notification::<lsp::notification::DidOpenTextDocument>()
547 .await
548 .text_document,
549 lsp::TextDocumentItem {
550 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
551 version: 0,
552 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
553 language_id: Default::default()
554 },
555 );
556
557 rust_buffer2.update(cx, |buffer, cx| {
558 buffer.update_diagnostics(
559 LanguageServerId(0),
560 DiagnosticSet::from_sorted_entries(
561 vec![DiagnosticEntry {
562 diagnostic: Default::default(),
563 range: Anchor::MIN..Anchor::MAX,
564 }],
565 &buffer.snapshot(),
566 ),
567 cx,
568 );
569 assert_eq!(
570 buffer
571 .snapshot()
572 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
573 .count(),
574 1
575 );
576 });
577
578 // When the rename changes the extension of the file, the buffer gets closed on the old
579 // language server and gets opened on the new one.
580 fs.rename(
581 Path::new("/the-root/test3.rs"),
582 Path::new("/the-root/test3.json"),
583 Default::default(),
584 )
585 .await
586 .unwrap();
587 assert_eq!(
588 fake_rust_server
589 .receive_notification::<lsp::notification::DidCloseTextDocument>()
590 .await
591 .text_document,
592 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
593 );
594 assert_eq!(
595 fake_json_server
596 .receive_notification::<lsp::notification::DidOpenTextDocument>()
597 .await
598 .text_document,
599 lsp::TextDocumentItem {
600 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
601 version: 0,
602 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
603 language_id: Default::default()
604 },
605 );
606
607 // We clear the diagnostics, since the language has changed.
608 rust_buffer2.update(cx, |buffer, _| {
609 assert_eq!(
610 buffer
611 .snapshot()
612 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
613 .count(),
614 0
615 );
616 });
617
618 // The renamed file's version resets after changing language server.
619 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
620 assert_eq!(
621 fake_json_server
622 .receive_notification::<lsp::notification::DidChangeTextDocument>()
623 .await
624 .text_document,
625 lsp::VersionedTextDocumentIdentifier::new(
626 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
627 1
628 )
629 );
630
631 // Restart language servers
632 project.update(cx, |project, cx| {
633 project.restart_language_servers_for_buffers(
634 vec![rust_buffer.clone(), json_buffer.clone()],
635 cx,
636 );
637 });
638
639 let mut rust_shutdown_requests = fake_rust_server
640 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
641 let mut json_shutdown_requests = fake_json_server
642 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
643 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
644
645 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
646 let mut fake_json_server = fake_json_servers.next().await.unwrap();
647
648 // Ensure rust document is reopened in new rust language server
649 assert_eq!(
650 fake_rust_server
651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
652 .await
653 .text_document,
654 lsp::TextDocumentItem {
655 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
656 version: 0,
657 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
658 language_id: Default::default()
659 }
660 );
661
662 // Ensure json documents are reopened in new json language server
663 assert_set_eq!(
664 [
665 fake_json_server
666 .receive_notification::<lsp::notification::DidOpenTextDocument>()
667 .await
668 .text_document,
669 fake_json_server
670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
671 .await
672 .text_document,
673 ],
674 [
675 lsp::TextDocumentItem {
676 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
677 version: 0,
678 text: json_buffer.update(cx, |buffer, _| buffer.text()),
679 language_id: Default::default()
680 },
681 lsp::TextDocumentItem {
682 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 version: 0,
684 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
685 language_id: Default::default()
686 }
687 ]
688 );
689
690 // Close notifications are reported only to servers matching the buffer's language.
691 cx.update(|_| drop(json_buffer));
692 let close_message = lsp::DidCloseTextDocumentParams {
693 text_document: lsp::TextDocumentIdentifier::new(
694 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
695 ),
696 };
697 assert_eq!(
698 fake_json_server
699 .receive_notification::<lsp::notification::DidCloseTextDocument>()
700 .await,
701 close_message,
702 );
703}
704
705#[gpui::test]
706async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
707 init_test(cx);
708
709 let fs = FakeFs::new(cx.executor());
710 fs.insert_tree(
711 "/the-root",
712 json!({
713 ".gitignore": "target\n",
714 "src": {
715 "a.rs": "",
716 "b.rs": "",
717 },
718 "target": {
719 "x": {
720 "out": {
721 "x.rs": ""
722 }
723 },
724 "y": {
725 "out": {
726 "y.rs": "",
727 }
728 },
729 "z": {
730 "out": {
731 "z.rs": ""
732 }
733 }
734 }
735 }),
736 )
737 .await;
738
739 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
740 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
741 language_registry.add(rust_lang());
742 let mut fake_servers = language_registry.register_fake_lsp_adapter(
743 "Rust",
744 FakeLspAdapter {
745 name: "the-language-server",
746 ..Default::default()
747 },
748 );
749
750 cx.executor().run_until_parked();
751
752 // Start the language server by opening a buffer with a compatible file extension.
753 let _buffer = project
754 .update(cx, |project, cx| {
755 project.open_local_buffer("/the-root/src/a.rs", cx)
756 })
757 .await
758 .unwrap();
759
760 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
761 project.update(cx, |project, cx| {
762 let worktree = project.worktrees().next().unwrap();
763 assert_eq!(
764 worktree
765 .read(cx)
766 .snapshot()
767 .entries(true)
768 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
769 .collect::<Vec<_>>(),
770 &[
771 (Path::new(""), false),
772 (Path::new(".gitignore"), false),
773 (Path::new("src"), false),
774 (Path::new("src/a.rs"), false),
775 (Path::new("src/b.rs"), false),
776 (Path::new("target"), true),
777 ]
778 );
779 });
780
781 let prev_read_dir_count = fs.read_dir_call_count();
782
783 // Keep track of the FS events reported to the language server.
784 let fake_server = fake_servers.next().await.unwrap();
785 let file_changes = Arc::new(Mutex::new(Vec::new()));
786 fake_server
787 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
788 registrations: vec![lsp::Registration {
789 id: Default::default(),
790 method: "workspace/didChangeWatchedFiles".to_string(),
791 register_options: serde_json::to_value(
792 lsp::DidChangeWatchedFilesRegistrationOptions {
793 watchers: vec![
794 lsp::FileSystemWatcher {
795 glob_pattern: lsp::GlobPattern::String(
796 "/the-root/Cargo.toml".to_string(),
797 ),
798 kind: None,
799 },
800 lsp::FileSystemWatcher {
801 glob_pattern: lsp::GlobPattern::String(
802 "/the-root/src/*.{rs,c}".to_string(),
803 ),
804 kind: None,
805 },
806 lsp::FileSystemWatcher {
807 glob_pattern: lsp::GlobPattern::String(
808 "/the-root/target/y/**/*.rs".to_string(),
809 ),
810 kind: None,
811 },
812 ],
813 },
814 )
815 .ok(),
816 }],
817 })
818 .await
819 .unwrap();
820 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
821 let file_changes = file_changes.clone();
822 move |params, _| {
823 let mut file_changes = file_changes.lock();
824 file_changes.extend(params.changes);
825 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
826 }
827 });
828
829 cx.executor().run_until_parked();
830 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
831 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
832
833 // Now the language server has asked us to watch an ignored directory path,
834 // so we recursively load it.
835 project.update(cx, |project, cx| {
836 let worktree = project.worktrees().next().unwrap();
837 assert_eq!(
838 worktree
839 .read(cx)
840 .snapshot()
841 .entries(true)
842 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
843 .collect::<Vec<_>>(),
844 &[
845 (Path::new(""), false),
846 (Path::new(".gitignore"), false),
847 (Path::new("src"), false),
848 (Path::new("src/a.rs"), false),
849 (Path::new("src/b.rs"), false),
850 (Path::new("target"), true),
851 (Path::new("target/x"), true),
852 (Path::new("target/y"), true),
853 (Path::new("target/y/out"), true),
854 (Path::new("target/y/out/y.rs"), true),
855 (Path::new("target/z"), true),
856 ]
857 );
858 });
859
860 // Perform some file system mutations, two of which match the watched patterns,
861 // and one of which does not.
862 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
863 .await
864 .unwrap();
865 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
866 .await
867 .unwrap();
868 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
869 .await
870 .unwrap();
871 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
872 .await
873 .unwrap();
874 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
875 .await
876 .unwrap();
877
878 // The language server receives events for the FS mutations that match its watch patterns.
879 cx.executor().run_until_parked();
880 assert_eq!(
881 &*file_changes.lock(),
882 &[
883 lsp::FileEvent {
884 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
885 typ: lsp::FileChangeType::DELETED,
886 },
887 lsp::FileEvent {
888 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
889 typ: lsp::FileChangeType::CREATED,
890 },
891 lsp::FileEvent {
892 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
893 typ: lsp::FileChangeType::CREATED,
894 },
895 ]
896 );
897}
898
899#[gpui::test]
900async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
901 init_test(cx);
902
903 let fs = FakeFs::new(cx.executor());
904 fs.insert_tree(
905 "/dir",
906 json!({
907 "a.rs": "let a = 1;",
908 "b.rs": "let b = 2;"
909 }),
910 )
911 .await;
912
913 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
914
915 let buffer_a = project
916 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
917 .await
918 .unwrap();
919 let buffer_b = project
920 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
921 .await
922 .unwrap();
923
924 project.update(cx, |project, cx| {
925 project
926 .update_diagnostics(
927 LanguageServerId(0),
928 lsp::PublishDiagnosticsParams {
929 uri: Url::from_file_path("/dir/a.rs").unwrap(),
930 version: None,
931 diagnostics: vec![lsp::Diagnostic {
932 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
933 severity: Some(lsp::DiagnosticSeverity::ERROR),
934 message: "error 1".to_string(),
935 ..Default::default()
936 }],
937 },
938 &[],
939 cx,
940 )
941 .unwrap();
942 project
943 .update_diagnostics(
944 LanguageServerId(0),
945 lsp::PublishDiagnosticsParams {
946 uri: Url::from_file_path("/dir/b.rs").unwrap(),
947 version: None,
948 diagnostics: vec![lsp::Diagnostic {
949 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
950 severity: Some(lsp::DiagnosticSeverity::WARNING),
951 message: "error 2".to_string(),
952 ..Default::default()
953 }],
954 },
955 &[],
956 cx,
957 )
958 .unwrap();
959 });
960
961 buffer_a.update(cx, |buffer, _| {
962 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
963 assert_eq!(
964 chunks
965 .iter()
966 .map(|(s, d)| (s.as_str(), *d))
967 .collect::<Vec<_>>(),
968 &[
969 ("let ", None),
970 ("a", Some(DiagnosticSeverity::ERROR)),
971 (" = 1;", None),
972 ]
973 );
974 });
975 buffer_b.update(cx, |buffer, _| {
976 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
977 assert_eq!(
978 chunks
979 .iter()
980 .map(|(s, d)| (s.as_str(), *d))
981 .collect::<Vec<_>>(),
982 &[
983 ("let ", None),
984 ("b", Some(DiagnosticSeverity::WARNING)),
985 (" = 2;", None),
986 ]
987 );
988 });
989}
990
991#[gpui::test]
992async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
993 init_test(cx);
994
995 let fs = FakeFs::new(cx.executor());
996 fs.insert_tree(
997 "/root",
998 json!({
999 "dir": {
1000 ".git": {
1001 "HEAD": "ref: refs/heads/main",
1002 },
1003 ".gitignore": "b.rs",
1004 "a.rs": "let a = 1;",
1005 "b.rs": "let b = 2;",
1006 },
1007 "other.rs": "let b = c;"
1008 }),
1009 )
1010 .await;
1011
1012 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1013 let (worktree, _) = project
1014 .update(cx, |project, cx| {
1015 project.find_or_create_local_worktree("/root/dir", true, cx)
1016 })
1017 .await
1018 .unwrap();
1019 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1020
1021 let (worktree, _) = project
1022 .update(cx, |project, cx| {
1023 project.find_or_create_local_worktree("/root/other.rs", false, cx)
1024 })
1025 .await
1026 .unwrap();
1027 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1028
1029 let server_id = LanguageServerId(0);
1030 project.update(cx, |project, cx| {
1031 project
1032 .update_diagnostics(
1033 server_id,
1034 lsp::PublishDiagnosticsParams {
1035 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1036 version: None,
1037 diagnostics: vec![lsp::Diagnostic {
1038 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1039 severity: Some(lsp::DiagnosticSeverity::ERROR),
1040 message: "unused variable 'b'".to_string(),
1041 ..Default::default()
1042 }],
1043 },
1044 &[],
1045 cx,
1046 )
1047 .unwrap();
1048 project
1049 .update_diagnostics(
1050 server_id,
1051 lsp::PublishDiagnosticsParams {
1052 uri: Url::from_file_path("/root/other.rs").unwrap(),
1053 version: None,
1054 diagnostics: vec![lsp::Diagnostic {
1055 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1056 severity: Some(lsp::DiagnosticSeverity::ERROR),
1057 message: "unknown variable 'c'".to_string(),
1058 ..Default::default()
1059 }],
1060 },
1061 &[],
1062 cx,
1063 )
1064 .unwrap();
1065 });
1066
1067 let main_ignored_buffer = project
1068 .update(cx, |project, cx| {
1069 project.open_buffer((main_worktree_id, "b.rs"), cx)
1070 })
1071 .await
1072 .unwrap();
1073 main_ignored_buffer.update(cx, |buffer, _| {
1074 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1075 assert_eq!(
1076 chunks
1077 .iter()
1078 .map(|(s, d)| (s.as_str(), *d))
1079 .collect::<Vec<_>>(),
1080 &[
1081 ("let ", None),
1082 ("b", Some(DiagnosticSeverity::ERROR)),
1083 (" = 2;", None),
1084 ],
1085 "Gigitnored buffers should still get in-buffer diagnostics",
1086 );
1087 });
1088 let other_buffer = project
1089 .update(cx, |project, cx| {
1090 project.open_buffer((other_worktree_id, ""), cx)
1091 })
1092 .await
1093 .unwrap();
1094 other_buffer.update(cx, |buffer, _| {
1095 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1096 assert_eq!(
1097 chunks
1098 .iter()
1099 .map(|(s, d)| (s.as_str(), *d))
1100 .collect::<Vec<_>>(),
1101 &[
1102 ("let b = ", None),
1103 ("c", Some(DiagnosticSeverity::ERROR)),
1104 (";", None),
1105 ],
1106 "Buffers from hidden projects should still get in-buffer diagnostics"
1107 );
1108 });
1109
1110 project.update(cx, |project, cx| {
1111 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1112 assert_eq!(
1113 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1114 vec![(
1115 ProjectPath {
1116 worktree_id: main_worktree_id,
1117 path: Arc::from(Path::new("b.rs")),
1118 },
1119 server_id,
1120 DiagnosticSummary {
1121 error_count: 1,
1122 warning_count: 0,
1123 }
1124 )]
1125 );
1126 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1127 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1128 });
1129}
1130
1131#[gpui::test]
1132async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1133 init_test(cx);
1134
1135 let progress_token = "the-progress-token";
1136
1137 let fs = FakeFs::new(cx.executor());
1138 fs.insert_tree(
1139 "/dir",
1140 json!({
1141 "a.rs": "fn a() { A }",
1142 "b.rs": "const y: i32 = 1",
1143 }),
1144 )
1145 .await;
1146
1147 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1148 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1149
1150 language_registry.add(rust_lang());
1151 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1152 "Rust",
1153 FakeLspAdapter {
1154 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1155 disk_based_diagnostics_sources: vec!["disk".into()],
1156 ..Default::default()
1157 },
1158 );
1159
1160 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1161
1162 // Cause worktree to start the fake language server
1163 let _buffer = project
1164 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1165 .await
1166 .unwrap();
1167
1168 let mut events = cx.events(&project);
1169
1170 let fake_server = fake_servers.next().await.unwrap();
1171 assert_eq!(
1172 events.next().await.unwrap(),
1173 Event::LanguageServerAdded(LanguageServerId(0)),
1174 );
1175
1176 fake_server
1177 .start_progress(format!("{}/0", progress_token))
1178 .await;
1179 assert_eq!(
1180 events.next().await.unwrap(),
1181 Event::DiskBasedDiagnosticsStarted {
1182 language_server_id: LanguageServerId(0),
1183 }
1184 );
1185
1186 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1187 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1188 version: None,
1189 diagnostics: vec![lsp::Diagnostic {
1190 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1191 severity: Some(lsp::DiagnosticSeverity::ERROR),
1192 message: "undefined variable 'A'".to_string(),
1193 ..Default::default()
1194 }],
1195 });
1196 assert_eq!(
1197 events.next().await.unwrap(),
1198 Event::DiagnosticsUpdated {
1199 language_server_id: LanguageServerId(0),
1200 path: (worktree_id, Path::new("a.rs")).into()
1201 }
1202 );
1203
1204 fake_server.end_progress(format!("{}/0", progress_token));
1205 assert_eq!(
1206 events.next().await.unwrap(),
1207 Event::DiskBasedDiagnosticsFinished {
1208 language_server_id: LanguageServerId(0)
1209 }
1210 );
1211
1212 let buffer = project
1213 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1214 .await
1215 .unwrap();
1216
1217 buffer.update(cx, |buffer, _| {
1218 let snapshot = buffer.snapshot();
1219 let diagnostics = snapshot
1220 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1221 .collect::<Vec<_>>();
1222 assert_eq!(
1223 diagnostics,
1224 &[DiagnosticEntry {
1225 range: Point::new(0, 9)..Point::new(0, 10),
1226 diagnostic: Diagnostic {
1227 severity: lsp::DiagnosticSeverity::ERROR,
1228 message: "undefined variable 'A'".to_string(),
1229 group_id: 0,
1230 is_primary: true,
1231 ..Default::default()
1232 }
1233 }]
1234 )
1235 });
1236
1237 // Ensure publishing empty diagnostics twice only results in one update event.
1238 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1239 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1240 version: None,
1241 diagnostics: Default::default(),
1242 });
1243 assert_eq!(
1244 events.next().await.unwrap(),
1245 Event::DiagnosticsUpdated {
1246 language_server_id: LanguageServerId(0),
1247 path: (worktree_id, Path::new("a.rs")).into()
1248 }
1249 );
1250
1251 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1252 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1253 version: None,
1254 diagnostics: Default::default(),
1255 });
1256 cx.executor().run_until_parked();
1257 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1258}
1259
1260#[gpui::test]
1261async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1262 init_test(cx);
1263
1264 let progress_token = "the-progress-token";
1265
1266 let fs = FakeFs::new(cx.executor());
1267 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1268
1269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1270
1271 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1272 language_registry.add(rust_lang());
1273 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1274 "Rust",
1275 FakeLspAdapter {
1276 name: "the-language-server",
1277 disk_based_diagnostics_sources: vec!["disk".into()],
1278 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1279 ..Default::default()
1280 },
1281 );
1282
1283 let buffer = project
1284 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1285 .await
1286 .unwrap();
1287
1288 // Simulate diagnostics starting to update.
1289 let fake_server = fake_servers.next().await.unwrap();
1290 fake_server.start_progress(progress_token).await;
1291
1292 // Restart the server before the diagnostics finish updating.
1293 project.update(cx, |project, cx| {
1294 project.restart_language_servers_for_buffers([buffer], cx);
1295 });
1296 let mut events = cx.events(&project);
1297
1298 // Simulate the newly started server sending more diagnostics.
1299 let fake_server = fake_servers.next().await.unwrap();
1300 assert_eq!(
1301 events.next().await.unwrap(),
1302 Event::LanguageServerAdded(LanguageServerId(1))
1303 );
1304 fake_server.start_progress(progress_token).await;
1305 assert_eq!(
1306 events.next().await.unwrap(),
1307 Event::DiskBasedDiagnosticsStarted {
1308 language_server_id: LanguageServerId(1)
1309 }
1310 );
1311 project.update(cx, |project, _| {
1312 assert_eq!(
1313 project
1314 .language_servers_running_disk_based_diagnostics()
1315 .collect::<Vec<_>>(),
1316 [LanguageServerId(1)]
1317 );
1318 });
1319
1320 // All diagnostics are considered done, despite the old server's diagnostic
1321 // task never completing.
1322 fake_server.end_progress(progress_token);
1323 assert_eq!(
1324 events.next().await.unwrap(),
1325 Event::DiskBasedDiagnosticsFinished {
1326 language_server_id: LanguageServerId(1)
1327 }
1328 );
1329 project.update(cx, |project, _| {
1330 assert_eq!(
1331 project
1332 .language_servers_running_disk_based_diagnostics()
1333 .collect::<Vec<_>>(),
1334 [LanguageServerId(0); 0]
1335 );
1336 });
1337}
1338
1339#[gpui::test]
1340async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1341 init_test(cx);
1342
1343 let fs = FakeFs::new(cx.executor());
1344 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1345
1346 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1347
1348 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1349 language_registry.add(rust_lang());
1350 let mut fake_servers =
1351 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1352
1353 let buffer = project
1354 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1355 .await
1356 .unwrap();
1357
1358 // Publish diagnostics
1359 let fake_server = fake_servers.next().await.unwrap();
1360 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1361 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1362 version: None,
1363 diagnostics: vec![lsp::Diagnostic {
1364 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1365 severity: Some(lsp::DiagnosticSeverity::ERROR),
1366 message: "the message".to_string(),
1367 ..Default::default()
1368 }],
1369 });
1370
1371 cx.executor().run_until_parked();
1372 buffer.update(cx, |buffer, _| {
1373 assert_eq!(
1374 buffer
1375 .snapshot()
1376 .diagnostics_in_range::<_, usize>(0..1, false)
1377 .map(|entry| entry.diagnostic.message.clone())
1378 .collect::<Vec<_>>(),
1379 ["the message".to_string()]
1380 );
1381 });
1382 project.update(cx, |project, cx| {
1383 assert_eq!(
1384 project.diagnostic_summary(false, cx),
1385 DiagnosticSummary {
1386 error_count: 1,
1387 warning_count: 0,
1388 }
1389 );
1390 });
1391
1392 project.update(cx, |project, cx| {
1393 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1394 });
1395
1396 // The diagnostics are cleared.
1397 cx.executor().run_until_parked();
1398 buffer.update(cx, |buffer, _| {
1399 assert_eq!(
1400 buffer
1401 .snapshot()
1402 .diagnostics_in_range::<_, usize>(0..1, false)
1403 .map(|entry| entry.diagnostic.message.clone())
1404 .collect::<Vec<_>>(),
1405 Vec::<String>::new(),
1406 );
1407 });
1408 project.update(cx, |project, cx| {
1409 assert_eq!(
1410 project.diagnostic_summary(false, cx),
1411 DiagnosticSummary {
1412 error_count: 0,
1413 warning_count: 0,
1414 }
1415 );
1416 });
1417}
1418
1419#[gpui::test]
1420async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1421 init_test(cx);
1422
1423 let fs = FakeFs::new(cx.executor());
1424 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1425
1426 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1427 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1428
1429 language_registry.add(rust_lang());
1430 let mut fake_servers =
1431 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1432
1433 let buffer = project
1434 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1435 .await
1436 .unwrap();
1437
1438 // Before restarting the server, report diagnostics with an unknown buffer version.
1439 let fake_server = fake_servers.next().await.unwrap();
1440 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1441 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1442 version: Some(10000),
1443 diagnostics: Vec::new(),
1444 });
1445 cx.executor().run_until_parked();
1446
1447 project.update(cx, |project, cx| {
1448 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1449 });
1450 let mut fake_server = fake_servers.next().await.unwrap();
1451 let notification = fake_server
1452 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1453 .await
1454 .text_document;
1455 assert_eq!(notification.version, 0);
1456}
1457
1458#[gpui::test]
1459async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1460 init_test(cx);
1461
1462 let fs = FakeFs::new(cx.executor());
1463 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1464 .await;
1465
1466 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1468
1469 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1470 "Rust",
1471 FakeLspAdapter {
1472 name: "rust-lsp",
1473 ..Default::default()
1474 },
1475 );
1476 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1477 "JavaScript",
1478 FakeLspAdapter {
1479 name: "js-lsp",
1480 ..Default::default()
1481 },
1482 );
1483 language_registry.add(rust_lang());
1484 language_registry.add(js_lang());
1485
1486 let _rs_buffer = project
1487 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1488 .await
1489 .unwrap();
1490 let _js_buffer = project
1491 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1492 .await
1493 .unwrap();
1494
1495 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1496 assert_eq!(
1497 fake_rust_server_1
1498 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1499 .await
1500 .text_document
1501 .uri
1502 .as_str(),
1503 "file:///dir/a.rs"
1504 );
1505
1506 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1507 assert_eq!(
1508 fake_js_server
1509 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1510 .await
1511 .text_document
1512 .uri
1513 .as_str(),
1514 "file:///dir/b.js"
1515 );
1516
1517 // Disable Rust language server, ensuring only that server gets stopped.
1518 cx.update(|cx| {
1519 cx.update_global(|settings: &mut SettingsStore, cx| {
1520 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1521 settings.languages.insert(
1522 Arc::from("Rust"),
1523 LanguageSettingsContent {
1524 enable_language_server: Some(false),
1525 ..Default::default()
1526 },
1527 );
1528 });
1529 })
1530 });
1531 fake_rust_server_1
1532 .receive_notification::<lsp::notification::Exit>()
1533 .await;
1534
1535 // Enable Rust and disable JavaScript language servers, ensuring that the
1536 // former gets started again and that the latter stops.
1537 cx.update(|cx| {
1538 cx.update_global(|settings: &mut SettingsStore, cx| {
1539 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1540 settings.languages.insert(
1541 Arc::from("Rust"),
1542 LanguageSettingsContent {
1543 enable_language_server: Some(true),
1544 ..Default::default()
1545 },
1546 );
1547 settings.languages.insert(
1548 Arc::from("JavaScript"),
1549 LanguageSettingsContent {
1550 enable_language_server: Some(false),
1551 ..Default::default()
1552 },
1553 );
1554 });
1555 })
1556 });
1557 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1558 assert_eq!(
1559 fake_rust_server_2
1560 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1561 .await
1562 .text_document
1563 .uri
1564 .as_str(),
1565 "file:///dir/a.rs"
1566 );
1567 fake_js_server
1568 .receive_notification::<lsp::notification::Exit>()
1569 .await;
1570}
1571
1572#[gpui::test(iterations = 3)]
1573async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1574 init_test(cx);
1575
1576 let text = "
1577 fn a() { A }
1578 fn b() { BB }
1579 fn c() { CCC }
1580 "
1581 .unindent();
1582
1583 let fs = FakeFs::new(cx.executor());
1584 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1585
1586 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1587 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1588
1589 language_registry.add(rust_lang());
1590 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1591 "Rust",
1592 FakeLspAdapter {
1593 disk_based_diagnostics_sources: vec!["disk".into()],
1594 ..Default::default()
1595 },
1596 );
1597
1598 let buffer = project
1599 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1600 .await
1601 .unwrap();
1602
1603 let mut fake_server = fake_servers.next().await.unwrap();
1604 let open_notification = fake_server
1605 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1606 .await;
1607
1608 // Edit the buffer, moving the content down
1609 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1610 let change_notification_1 = fake_server
1611 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1612 .await;
1613 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1614
1615 // Report some diagnostics for the initial version of the buffer
1616 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1617 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1618 version: Some(open_notification.text_document.version),
1619 diagnostics: vec![
1620 lsp::Diagnostic {
1621 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1622 severity: Some(DiagnosticSeverity::ERROR),
1623 message: "undefined variable 'A'".to_string(),
1624 source: Some("disk".to_string()),
1625 ..Default::default()
1626 },
1627 lsp::Diagnostic {
1628 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1629 severity: Some(DiagnosticSeverity::ERROR),
1630 message: "undefined variable 'BB'".to_string(),
1631 source: Some("disk".to_string()),
1632 ..Default::default()
1633 },
1634 lsp::Diagnostic {
1635 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1636 severity: Some(DiagnosticSeverity::ERROR),
1637 source: Some("disk".to_string()),
1638 message: "undefined variable 'CCC'".to_string(),
1639 ..Default::default()
1640 },
1641 ],
1642 });
1643
1644 // The diagnostics have moved down since they were created.
1645 cx.executor().run_until_parked();
1646 buffer.update(cx, |buffer, _| {
1647 assert_eq!(
1648 buffer
1649 .snapshot()
1650 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1651 .collect::<Vec<_>>(),
1652 &[
1653 DiagnosticEntry {
1654 range: Point::new(3, 9)..Point::new(3, 11),
1655 diagnostic: Diagnostic {
1656 source: Some("disk".into()),
1657 severity: DiagnosticSeverity::ERROR,
1658 message: "undefined variable 'BB'".to_string(),
1659 is_disk_based: true,
1660 group_id: 1,
1661 is_primary: true,
1662 ..Default::default()
1663 },
1664 },
1665 DiagnosticEntry {
1666 range: Point::new(4, 9)..Point::new(4, 12),
1667 diagnostic: Diagnostic {
1668 source: Some("disk".into()),
1669 severity: DiagnosticSeverity::ERROR,
1670 message: "undefined variable 'CCC'".to_string(),
1671 is_disk_based: true,
1672 group_id: 2,
1673 is_primary: true,
1674 ..Default::default()
1675 }
1676 }
1677 ]
1678 );
1679 assert_eq!(
1680 chunks_with_diagnostics(buffer, 0..buffer.len()),
1681 [
1682 ("\n\nfn a() { ".to_string(), None),
1683 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1684 (" }\nfn b() { ".to_string(), None),
1685 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1686 (" }\nfn c() { ".to_string(), None),
1687 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1688 (" }\n".to_string(), None),
1689 ]
1690 );
1691 assert_eq!(
1692 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1693 [
1694 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1695 (" }\nfn c() { ".to_string(), None),
1696 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1697 ]
1698 );
1699 });
1700
1701 // Ensure overlapping diagnostics are highlighted correctly.
1702 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1703 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1704 version: Some(open_notification.text_document.version),
1705 diagnostics: vec![
1706 lsp::Diagnostic {
1707 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1708 severity: Some(DiagnosticSeverity::ERROR),
1709 message: "undefined variable 'A'".to_string(),
1710 source: Some("disk".to_string()),
1711 ..Default::default()
1712 },
1713 lsp::Diagnostic {
1714 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1715 severity: Some(DiagnosticSeverity::WARNING),
1716 message: "unreachable statement".to_string(),
1717 source: Some("disk".to_string()),
1718 ..Default::default()
1719 },
1720 ],
1721 });
1722
1723 cx.executor().run_until_parked();
1724 buffer.update(cx, |buffer, _| {
1725 assert_eq!(
1726 buffer
1727 .snapshot()
1728 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1729 .collect::<Vec<_>>(),
1730 &[
1731 DiagnosticEntry {
1732 range: Point::new(2, 9)..Point::new(2, 12),
1733 diagnostic: Diagnostic {
1734 source: Some("disk".into()),
1735 severity: DiagnosticSeverity::WARNING,
1736 message: "unreachable statement".to_string(),
1737 is_disk_based: true,
1738 group_id: 4,
1739 is_primary: true,
1740 ..Default::default()
1741 }
1742 },
1743 DiagnosticEntry {
1744 range: Point::new(2, 9)..Point::new(2, 10),
1745 diagnostic: Diagnostic {
1746 source: Some("disk".into()),
1747 severity: DiagnosticSeverity::ERROR,
1748 message: "undefined variable 'A'".to_string(),
1749 is_disk_based: true,
1750 group_id: 3,
1751 is_primary: true,
1752 ..Default::default()
1753 },
1754 }
1755 ]
1756 );
1757 assert_eq!(
1758 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1759 [
1760 ("fn a() { ".to_string(), None),
1761 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1762 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1763 ("\n".to_string(), None),
1764 ]
1765 );
1766 assert_eq!(
1767 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1768 [
1769 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1770 ("\n".to_string(), None),
1771 ]
1772 );
1773 });
1774
1775 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1776 // changes since the last save.
1777 buffer.update(cx, |buffer, cx| {
1778 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1779 buffer.edit(
1780 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1781 None,
1782 cx,
1783 );
1784 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1785 });
1786 let change_notification_2 = fake_server
1787 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1788 .await;
1789 assert!(
1790 change_notification_2.text_document.version > change_notification_1.text_document.version
1791 );
1792
1793 // Handle out-of-order diagnostics
1794 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1795 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1796 version: Some(change_notification_2.text_document.version),
1797 diagnostics: vec![
1798 lsp::Diagnostic {
1799 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1800 severity: Some(DiagnosticSeverity::ERROR),
1801 message: "undefined variable 'BB'".to_string(),
1802 source: Some("disk".to_string()),
1803 ..Default::default()
1804 },
1805 lsp::Diagnostic {
1806 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1807 severity: Some(DiagnosticSeverity::WARNING),
1808 message: "undefined variable 'A'".to_string(),
1809 source: Some("disk".to_string()),
1810 ..Default::default()
1811 },
1812 ],
1813 });
1814
1815 cx.executor().run_until_parked();
1816 buffer.update(cx, |buffer, _| {
1817 assert_eq!(
1818 buffer
1819 .snapshot()
1820 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1821 .collect::<Vec<_>>(),
1822 &[
1823 DiagnosticEntry {
1824 range: Point::new(2, 21)..Point::new(2, 22),
1825 diagnostic: Diagnostic {
1826 source: Some("disk".into()),
1827 severity: DiagnosticSeverity::WARNING,
1828 message: "undefined variable 'A'".to_string(),
1829 is_disk_based: true,
1830 group_id: 6,
1831 is_primary: true,
1832 ..Default::default()
1833 }
1834 },
1835 DiagnosticEntry {
1836 range: Point::new(3, 9)..Point::new(3, 14),
1837 diagnostic: Diagnostic {
1838 source: Some("disk".into()),
1839 severity: DiagnosticSeverity::ERROR,
1840 message: "undefined variable 'BB'".to_string(),
1841 is_disk_based: true,
1842 group_id: 5,
1843 is_primary: true,
1844 ..Default::default()
1845 },
1846 }
1847 ]
1848 );
1849 });
1850}
1851
1852#[gpui::test]
1853async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1854 init_test(cx);
1855
1856 let text = concat!(
1857 "let one = ;\n", //
1858 "let two = \n",
1859 "let three = 3;\n",
1860 );
1861
1862 let fs = FakeFs::new(cx.executor());
1863 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1864
1865 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1866 let buffer = project
1867 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1868 .await
1869 .unwrap();
1870
1871 project.update(cx, |project, cx| {
1872 project
1873 .update_buffer_diagnostics(
1874 &buffer,
1875 LanguageServerId(0),
1876 None,
1877 vec![
1878 DiagnosticEntry {
1879 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1880 diagnostic: Diagnostic {
1881 severity: DiagnosticSeverity::ERROR,
1882 message: "syntax error 1".to_string(),
1883 ..Default::default()
1884 },
1885 },
1886 DiagnosticEntry {
1887 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1888 diagnostic: Diagnostic {
1889 severity: DiagnosticSeverity::ERROR,
1890 message: "syntax error 2".to_string(),
1891 ..Default::default()
1892 },
1893 },
1894 ],
1895 cx,
1896 )
1897 .unwrap();
1898 });
1899
1900 // An empty range is extended forward to include the following character.
1901 // At the end of a line, an empty range is extended backward to include
1902 // the preceding character.
1903 buffer.update(cx, |buffer, _| {
1904 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1905 assert_eq!(
1906 chunks
1907 .iter()
1908 .map(|(s, d)| (s.as_str(), *d))
1909 .collect::<Vec<_>>(),
1910 &[
1911 ("let one = ", None),
1912 (";", Some(DiagnosticSeverity::ERROR)),
1913 ("\nlet two =", None),
1914 (" ", Some(DiagnosticSeverity::ERROR)),
1915 ("\nlet three = 3;\n", None)
1916 ]
1917 );
1918 });
1919}
1920
1921#[gpui::test]
1922async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1923 init_test(cx);
1924
1925 let fs = FakeFs::new(cx.executor());
1926 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1927 .await;
1928
1929 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1930
1931 project.update(cx, |project, cx| {
1932 project
1933 .update_diagnostic_entries(
1934 LanguageServerId(0),
1935 Path::new("/dir/a.rs").to_owned(),
1936 None,
1937 vec![DiagnosticEntry {
1938 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1939 diagnostic: Diagnostic {
1940 severity: DiagnosticSeverity::ERROR,
1941 is_primary: true,
1942 message: "syntax error a1".to_string(),
1943 ..Default::default()
1944 },
1945 }],
1946 cx,
1947 )
1948 .unwrap();
1949 project
1950 .update_diagnostic_entries(
1951 LanguageServerId(1),
1952 Path::new("/dir/a.rs").to_owned(),
1953 None,
1954 vec![DiagnosticEntry {
1955 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1956 diagnostic: Diagnostic {
1957 severity: DiagnosticSeverity::ERROR,
1958 is_primary: true,
1959 message: "syntax error b1".to_string(),
1960 ..Default::default()
1961 },
1962 }],
1963 cx,
1964 )
1965 .unwrap();
1966
1967 assert_eq!(
1968 project.diagnostic_summary(false, cx),
1969 DiagnosticSummary {
1970 error_count: 2,
1971 warning_count: 0,
1972 }
1973 );
1974 });
1975}
1976
1977#[gpui::test]
1978async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1979 init_test(cx);
1980
1981 let text = "
1982 fn a() {
1983 f1();
1984 }
1985 fn b() {
1986 f2();
1987 }
1988 fn c() {
1989 f3();
1990 }
1991 "
1992 .unindent();
1993
1994 let fs = FakeFs::new(cx.executor());
1995 fs.insert_tree(
1996 "/dir",
1997 json!({
1998 "a.rs": text.clone(),
1999 }),
2000 )
2001 .await;
2002
2003 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2004
2005 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2006 language_registry.add(rust_lang());
2007 let mut fake_servers =
2008 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2009
2010 let buffer = project
2011 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2012 .await
2013 .unwrap();
2014
2015 let mut fake_server = fake_servers.next().await.unwrap();
2016 let lsp_document_version = fake_server
2017 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2018 .await
2019 .text_document
2020 .version;
2021
2022 // Simulate editing the buffer after the language server computes some edits.
2023 buffer.update(cx, |buffer, cx| {
2024 buffer.edit(
2025 [(
2026 Point::new(0, 0)..Point::new(0, 0),
2027 "// above first function\n",
2028 )],
2029 None,
2030 cx,
2031 );
2032 buffer.edit(
2033 [(
2034 Point::new(2, 0)..Point::new(2, 0),
2035 " // inside first function\n",
2036 )],
2037 None,
2038 cx,
2039 );
2040 buffer.edit(
2041 [(
2042 Point::new(6, 4)..Point::new(6, 4),
2043 "// inside second function ",
2044 )],
2045 None,
2046 cx,
2047 );
2048
2049 assert_eq!(
2050 buffer.text(),
2051 "
2052 // above first function
2053 fn a() {
2054 // inside first function
2055 f1();
2056 }
2057 fn b() {
2058 // inside second function f2();
2059 }
2060 fn c() {
2061 f3();
2062 }
2063 "
2064 .unindent()
2065 );
2066 });
2067
2068 let edits = project
2069 .update(cx, |project, cx| {
2070 project.edits_from_lsp(
2071 &buffer,
2072 vec![
2073 // replace body of first function
2074 lsp::TextEdit {
2075 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2076 new_text: "
2077 fn a() {
2078 f10();
2079 }
2080 "
2081 .unindent(),
2082 },
2083 // edit inside second function
2084 lsp::TextEdit {
2085 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2086 new_text: "00".into(),
2087 },
2088 // edit inside third function via two distinct edits
2089 lsp::TextEdit {
2090 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2091 new_text: "4000".into(),
2092 },
2093 lsp::TextEdit {
2094 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2095 new_text: "".into(),
2096 },
2097 ],
2098 LanguageServerId(0),
2099 Some(lsp_document_version),
2100 cx,
2101 )
2102 })
2103 .await
2104 .unwrap();
2105
2106 buffer.update(cx, |buffer, cx| {
2107 for (range, new_text) in edits {
2108 buffer.edit([(range, new_text)], None, cx);
2109 }
2110 assert_eq!(
2111 buffer.text(),
2112 "
2113 // above first function
2114 fn a() {
2115 // inside first function
2116 f10();
2117 }
2118 fn b() {
2119 // inside second function f200();
2120 }
2121 fn c() {
2122 f4000();
2123 }
2124 "
2125 .unindent()
2126 );
2127 });
2128}
2129
2130#[gpui::test]
2131async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2132 init_test(cx);
2133
2134 let text = "
2135 use a::b;
2136 use a::c;
2137
2138 fn f() {
2139 b();
2140 c();
2141 }
2142 "
2143 .unindent();
2144
2145 let fs = FakeFs::new(cx.executor());
2146 fs.insert_tree(
2147 "/dir",
2148 json!({
2149 "a.rs": text.clone(),
2150 }),
2151 )
2152 .await;
2153
2154 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2155 let buffer = project
2156 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2157 .await
2158 .unwrap();
2159
2160 // Simulate the language server sending us a small edit in the form of a very large diff.
2161 // Rust-analyzer does this when performing a merge-imports code action.
2162 let edits = project
2163 .update(cx, |project, cx| {
2164 project.edits_from_lsp(
2165 &buffer,
2166 [
2167 // Replace the first use statement without editing the semicolon.
2168 lsp::TextEdit {
2169 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2170 new_text: "a::{b, c}".into(),
2171 },
2172 // Reinsert the remainder of the file between the semicolon and the final
2173 // newline of the file.
2174 lsp::TextEdit {
2175 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2176 new_text: "\n\n".into(),
2177 },
2178 lsp::TextEdit {
2179 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2180 new_text: "
2181 fn f() {
2182 b();
2183 c();
2184 }"
2185 .unindent(),
2186 },
2187 // Delete everything after the first newline of the file.
2188 lsp::TextEdit {
2189 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2190 new_text: "".into(),
2191 },
2192 ],
2193 LanguageServerId(0),
2194 None,
2195 cx,
2196 )
2197 })
2198 .await
2199 .unwrap();
2200
2201 buffer.update(cx, |buffer, cx| {
2202 let edits = edits
2203 .into_iter()
2204 .map(|(range, text)| {
2205 (
2206 range.start.to_point(buffer)..range.end.to_point(buffer),
2207 text,
2208 )
2209 })
2210 .collect::<Vec<_>>();
2211
2212 assert_eq!(
2213 edits,
2214 [
2215 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2216 (Point::new(1, 0)..Point::new(2, 0), "".into())
2217 ]
2218 );
2219
2220 for (range, new_text) in edits {
2221 buffer.edit([(range, new_text)], None, cx);
2222 }
2223 assert_eq!(
2224 buffer.text(),
2225 "
2226 use a::{b, c};
2227
2228 fn f() {
2229 b();
2230 c();
2231 }
2232 "
2233 .unindent()
2234 );
2235 });
2236}
2237
2238#[gpui::test]
2239async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2240 init_test(cx);
2241
2242 let text = "
2243 use a::b;
2244 use a::c;
2245
2246 fn f() {
2247 b();
2248 c();
2249 }
2250 "
2251 .unindent();
2252
2253 let fs = FakeFs::new(cx.executor());
2254 fs.insert_tree(
2255 "/dir",
2256 json!({
2257 "a.rs": text.clone(),
2258 }),
2259 )
2260 .await;
2261
2262 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2263 let buffer = project
2264 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2265 .await
2266 .unwrap();
2267
2268 // Simulate the language server sending us edits in a non-ordered fashion,
2269 // with ranges sometimes being inverted or pointing to invalid locations.
2270 let edits = project
2271 .update(cx, |project, cx| {
2272 project.edits_from_lsp(
2273 &buffer,
2274 [
2275 lsp::TextEdit {
2276 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2277 new_text: "\n\n".into(),
2278 },
2279 lsp::TextEdit {
2280 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2281 new_text: "a::{b, c}".into(),
2282 },
2283 lsp::TextEdit {
2284 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2285 new_text: "".into(),
2286 },
2287 lsp::TextEdit {
2288 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2289 new_text: "
2290 fn f() {
2291 b();
2292 c();
2293 }"
2294 .unindent(),
2295 },
2296 ],
2297 LanguageServerId(0),
2298 None,
2299 cx,
2300 )
2301 })
2302 .await
2303 .unwrap();
2304
2305 buffer.update(cx, |buffer, cx| {
2306 let edits = edits
2307 .into_iter()
2308 .map(|(range, text)| {
2309 (
2310 range.start.to_point(buffer)..range.end.to_point(buffer),
2311 text,
2312 )
2313 })
2314 .collect::<Vec<_>>();
2315
2316 assert_eq!(
2317 edits,
2318 [
2319 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2320 (Point::new(1, 0)..Point::new(2, 0), "".into())
2321 ]
2322 );
2323
2324 for (range, new_text) in edits {
2325 buffer.edit([(range, new_text)], None, cx);
2326 }
2327 assert_eq!(
2328 buffer.text(),
2329 "
2330 use a::{b, c};
2331
2332 fn f() {
2333 b();
2334 c();
2335 }
2336 "
2337 .unindent()
2338 );
2339 });
2340}
2341
2342fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2343 buffer: &Buffer,
2344 range: Range<T>,
2345) -> Vec<(String, Option<DiagnosticSeverity>)> {
2346 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2347 for chunk in buffer.snapshot().chunks(range, true) {
2348 if chunks.last().map_or(false, |prev_chunk| {
2349 prev_chunk.1 == chunk.diagnostic_severity
2350 }) {
2351 chunks.last_mut().unwrap().0.push_str(chunk.text);
2352 } else {
2353 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2354 }
2355 }
2356 chunks
2357}
2358
2359#[gpui::test(iterations = 10)]
2360async fn test_definition(cx: &mut gpui::TestAppContext) {
2361 init_test(cx);
2362
2363 let fs = FakeFs::new(cx.executor());
2364 fs.insert_tree(
2365 "/dir",
2366 json!({
2367 "a.rs": "const fn a() { A }",
2368 "b.rs": "const y: i32 = crate::a()",
2369 }),
2370 )
2371 .await;
2372
2373 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2374
2375 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2376 language_registry.add(rust_lang());
2377 let mut fake_servers =
2378 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2379
2380 let buffer = project
2381 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2382 .await
2383 .unwrap();
2384
2385 let fake_server = fake_servers.next().await.unwrap();
2386 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2387 let params = params.text_document_position_params;
2388 assert_eq!(
2389 params.text_document.uri.to_file_path().unwrap(),
2390 Path::new("/dir/b.rs"),
2391 );
2392 assert_eq!(params.position, lsp::Position::new(0, 22));
2393
2394 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2395 lsp::Location::new(
2396 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2397 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2398 ),
2399 )))
2400 });
2401
2402 let mut definitions = project
2403 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2404 .await
2405 .unwrap();
2406
2407 // Assert no new language server started
2408 cx.executor().run_until_parked();
2409 assert!(fake_servers.try_next().is_err());
2410
2411 assert_eq!(definitions.len(), 1);
2412 let definition = definitions.pop().unwrap();
2413 cx.update(|cx| {
2414 let target_buffer = definition.target.buffer.read(cx);
2415 assert_eq!(
2416 target_buffer
2417 .file()
2418 .unwrap()
2419 .as_local()
2420 .unwrap()
2421 .abs_path(cx),
2422 Path::new("/dir/a.rs"),
2423 );
2424 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2425 assert_eq!(
2426 list_worktrees(&project, cx),
2427 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2428 );
2429
2430 drop(definition);
2431 });
2432 cx.update(|cx| {
2433 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2434 });
2435
2436 fn list_worktrees<'a>(
2437 project: &'a Model<Project>,
2438 cx: &'a AppContext,
2439 ) -> Vec<(&'a Path, bool)> {
2440 project
2441 .read(cx)
2442 .worktrees()
2443 .map(|worktree| {
2444 let worktree = worktree.read(cx);
2445 (
2446 worktree.as_local().unwrap().abs_path().as_ref(),
2447 worktree.is_visible(),
2448 )
2449 })
2450 .collect::<Vec<_>>()
2451 }
2452}
2453
2454#[gpui::test]
2455async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let fs = FakeFs::new(cx.executor());
2459 fs.insert_tree(
2460 "/dir",
2461 json!({
2462 "a.ts": "",
2463 }),
2464 )
2465 .await;
2466
2467 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2468
2469 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2470 language_registry.add(typescript_lang());
2471 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2472 "TypeScript",
2473 FakeLspAdapter {
2474 capabilities: lsp::ServerCapabilities {
2475 completion_provider: Some(lsp::CompletionOptions {
2476 trigger_characters: Some(vec![":".to_string()]),
2477 ..Default::default()
2478 }),
2479 ..Default::default()
2480 },
2481 ..Default::default()
2482 },
2483 );
2484
2485 let buffer = project
2486 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2487 .await
2488 .unwrap();
2489
2490 let fake_server = fake_language_servers.next().await.unwrap();
2491
2492 let text = "let a = b.fqn";
2493 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2494 let completions = project.update(cx, |project, cx| {
2495 project.completions(&buffer, text.len(), cx)
2496 });
2497
2498 fake_server
2499 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2500 Ok(Some(lsp::CompletionResponse::Array(vec![
2501 lsp::CompletionItem {
2502 label: "fullyQualifiedName?".into(),
2503 insert_text: Some("fullyQualifiedName".into()),
2504 ..Default::default()
2505 },
2506 ])))
2507 })
2508 .next()
2509 .await;
2510 let completions = completions.await.unwrap();
2511 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2512 assert_eq!(completions.len(), 1);
2513 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2514 assert_eq!(
2515 completions[0].old_range.to_offset(&snapshot),
2516 text.len() - 3..text.len()
2517 );
2518
2519 let text = "let a = \"atoms/cmp\"";
2520 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2521 let completions = project.update(cx, |project, cx| {
2522 project.completions(&buffer, text.len() - 1, cx)
2523 });
2524
2525 fake_server
2526 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2527 Ok(Some(lsp::CompletionResponse::Array(vec![
2528 lsp::CompletionItem {
2529 label: "component".into(),
2530 ..Default::default()
2531 },
2532 ])))
2533 })
2534 .next()
2535 .await;
2536 let completions = completions.await.unwrap();
2537 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2538 assert_eq!(completions.len(), 1);
2539 assert_eq!(completions[0].new_text, "component");
2540 assert_eq!(
2541 completions[0].old_range.to_offset(&snapshot),
2542 text.len() - 4..text.len() - 1
2543 );
2544}
2545
2546#[gpui::test]
2547async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2548 init_test(cx);
2549
2550 let fs = FakeFs::new(cx.executor());
2551 fs.insert_tree(
2552 "/dir",
2553 json!({
2554 "a.ts": "",
2555 }),
2556 )
2557 .await;
2558
2559 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2560
2561 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2562 language_registry.add(typescript_lang());
2563 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2564 "TypeScript",
2565 FakeLspAdapter {
2566 capabilities: lsp::ServerCapabilities {
2567 completion_provider: Some(lsp::CompletionOptions {
2568 trigger_characters: Some(vec![":".to_string()]),
2569 ..Default::default()
2570 }),
2571 ..Default::default()
2572 },
2573 ..Default::default()
2574 },
2575 );
2576
2577 let buffer = project
2578 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2579 .await
2580 .unwrap();
2581
2582 let fake_server = fake_language_servers.next().await.unwrap();
2583
2584 let text = "let a = b.fqn";
2585 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2586 let completions = project.update(cx, |project, cx| {
2587 project.completions(&buffer, text.len(), cx)
2588 });
2589
2590 fake_server
2591 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2592 Ok(Some(lsp::CompletionResponse::Array(vec![
2593 lsp::CompletionItem {
2594 label: "fullyQualifiedName?".into(),
2595 insert_text: Some("fully\rQualified\r\nName".into()),
2596 ..Default::default()
2597 },
2598 ])))
2599 })
2600 .next()
2601 .await;
2602 let completions = completions.await.unwrap();
2603 assert_eq!(completions.len(), 1);
2604 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2605}
2606
2607#[gpui::test(iterations = 10)]
2608async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2609 init_test(cx);
2610
2611 let fs = FakeFs::new(cx.executor());
2612 fs.insert_tree(
2613 "/dir",
2614 json!({
2615 "a.ts": "a",
2616 }),
2617 )
2618 .await;
2619
2620 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2621
2622 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2623 language_registry.add(typescript_lang());
2624 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2625 "TypeScript",
2626 FakeLspAdapter {
2627 capabilities: lsp::ServerCapabilities {
2628 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2629 lsp::CodeActionOptions {
2630 resolve_provider: Some(true),
2631 ..lsp::CodeActionOptions::default()
2632 },
2633 )),
2634 ..lsp::ServerCapabilities::default()
2635 },
2636 ..FakeLspAdapter::default()
2637 },
2638 );
2639
2640 let buffer = project
2641 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2642 .await
2643 .unwrap();
2644
2645 let fake_server = fake_language_servers.next().await.unwrap();
2646
2647 // Language server returns code actions that contain commands, and not edits.
2648 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2649 fake_server
2650 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2651 Ok(Some(vec![
2652 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2653 title: "The code action".into(),
2654 data: Some(serde_json::json!({
2655 "command": "_the/command",
2656 })),
2657 ..lsp::CodeAction::default()
2658 }),
2659 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2660 title: "two".into(),
2661 ..lsp::CodeAction::default()
2662 }),
2663 ]))
2664 })
2665 .next()
2666 .await;
2667
2668 let action = actions.await[0].clone();
2669 let apply = project.update(cx, |project, cx| {
2670 project.apply_code_action(buffer.clone(), action, true, cx)
2671 });
2672
2673 // Resolving the code action does not populate its edits. In absence of
2674 // edits, we must execute the given command.
2675 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2676 |mut action, _| async move {
2677 if action.data.is_some() {
2678 action.command = Some(lsp::Command {
2679 title: "The command".into(),
2680 command: "_the/command".into(),
2681 arguments: Some(vec![json!("the-argument")]),
2682 });
2683 }
2684 Ok(action)
2685 },
2686 );
2687
2688 // While executing the command, the language server sends the editor
2689 // a `workspaceEdit` request.
2690 fake_server
2691 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2692 let fake = fake_server.clone();
2693 move |params, _| {
2694 assert_eq!(params.command, "_the/command");
2695 let fake = fake.clone();
2696 async move {
2697 fake.server
2698 .request::<lsp::request::ApplyWorkspaceEdit>(
2699 lsp::ApplyWorkspaceEditParams {
2700 label: None,
2701 edit: lsp::WorkspaceEdit {
2702 changes: Some(
2703 [(
2704 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2705 vec![lsp::TextEdit {
2706 range: lsp::Range::new(
2707 lsp::Position::new(0, 0),
2708 lsp::Position::new(0, 0),
2709 ),
2710 new_text: "X".into(),
2711 }],
2712 )]
2713 .into_iter()
2714 .collect(),
2715 ),
2716 ..Default::default()
2717 },
2718 },
2719 )
2720 .await
2721 .unwrap();
2722 Ok(Some(json!(null)))
2723 }
2724 }
2725 })
2726 .next()
2727 .await;
2728
2729 // Applying the code action returns a project transaction containing the edits
2730 // sent by the language server in its `workspaceEdit` request.
2731 let transaction = apply.await.unwrap();
2732 assert!(transaction.0.contains_key(&buffer));
2733 buffer.update(cx, |buffer, cx| {
2734 assert_eq!(buffer.text(), "Xa");
2735 buffer.undo(cx);
2736 assert_eq!(buffer.text(), "a");
2737 });
2738}
2739
2740#[gpui::test(iterations = 10)]
2741async fn test_save_file(cx: &mut gpui::TestAppContext) {
2742 init_test(cx);
2743
2744 let fs = FakeFs::new(cx.executor());
2745 fs.insert_tree(
2746 "/dir",
2747 json!({
2748 "file1": "the old contents",
2749 }),
2750 )
2751 .await;
2752
2753 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2754 let buffer = project
2755 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2756 .await
2757 .unwrap();
2758 buffer.update(cx, |buffer, cx| {
2759 assert_eq!(buffer.text(), "the old contents");
2760 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2761 });
2762
2763 project
2764 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2765 .await
2766 .unwrap();
2767
2768 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2769 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2770}
2771
2772#[gpui::test(iterations = 30)]
2773async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2774 init_test(cx);
2775
2776 let fs = FakeFs::new(cx.executor().clone());
2777 fs.insert_tree(
2778 "/dir",
2779 json!({
2780 "file1": "the original contents",
2781 }),
2782 )
2783 .await;
2784
2785 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2786 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2787 let buffer = project
2788 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2789 .await
2790 .unwrap();
2791
2792 // Simulate buffer diffs being slow, so that they don't complete before
2793 // the next file change occurs.
2794 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2795
2796 // Change the buffer's file on disk, and then wait for the file change
2797 // to be detected by the worktree, so that the buffer starts reloading.
2798 fs.save(
2799 "/dir/file1".as_ref(),
2800 &"the first contents".into(),
2801 Default::default(),
2802 )
2803 .await
2804 .unwrap();
2805 worktree.next_event(cx).await;
2806
2807 // Change the buffer's file again. Depending on the random seed, the
2808 // previous file change may still be in progress.
2809 fs.save(
2810 "/dir/file1".as_ref(),
2811 &"the second contents".into(),
2812 Default::default(),
2813 )
2814 .await
2815 .unwrap();
2816 worktree.next_event(cx).await;
2817
2818 cx.executor().run_until_parked();
2819 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2820 buffer.read_with(cx, |buffer, _| {
2821 assert_eq!(buffer.text(), on_disk_text);
2822 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2823 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2824 });
2825}
2826
2827#[gpui::test(iterations = 30)]
2828async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2829 init_test(cx);
2830
2831 let fs = FakeFs::new(cx.executor().clone());
2832 fs.insert_tree(
2833 "/dir",
2834 json!({
2835 "file1": "the original contents",
2836 }),
2837 )
2838 .await;
2839
2840 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2841 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2842 let buffer = project
2843 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2844 .await
2845 .unwrap();
2846
2847 // Simulate buffer diffs being slow, so that they don't complete before
2848 // the next file change occurs.
2849 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2850
2851 // Change the buffer's file on disk, and then wait for the file change
2852 // to be detected by the worktree, so that the buffer starts reloading.
2853 fs.save(
2854 "/dir/file1".as_ref(),
2855 &"the first contents".into(),
2856 Default::default(),
2857 )
2858 .await
2859 .unwrap();
2860 worktree.next_event(cx).await;
2861
2862 cx.executor()
2863 .spawn(cx.executor().simulate_random_delay())
2864 .await;
2865
2866 // Perform a noop edit, causing the buffer's version to increase.
2867 buffer.update(cx, |buffer, cx| {
2868 buffer.edit([(0..0, " ")], None, cx);
2869 buffer.undo(cx);
2870 });
2871
2872 cx.executor().run_until_parked();
2873 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2874 buffer.read_with(cx, |buffer, _| {
2875 let buffer_text = buffer.text();
2876 if buffer_text == on_disk_text {
2877 assert!(
2878 !buffer.is_dirty() && !buffer.has_conflict(),
2879 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2880 );
2881 }
2882 // If the file change occurred while the buffer was processing the first
2883 // change, the buffer will be in a conflicting state.
2884 else {
2885 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2886 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2887 }
2888 });
2889}
2890
2891#[gpui::test]
2892async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2893 init_test(cx);
2894
2895 let fs = FakeFs::new(cx.executor());
2896 fs.insert_tree(
2897 "/dir",
2898 json!({
2899 "file1": "the old contents",
2900 }),
2901 )
2902 .await;
2903
2904 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2905 let buffer = project
2906 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2907 .await
2908 .unwrap();
2909 buffer.update(cx, |buffer, cx| {
2910 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2911 });
2912
2913 project
2914 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2915 .await
2916 .unwrap();
2917
2918 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2919 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2920}
2921
2922#[gpui::test]
2923async fn test_save_as(cx: &mut gpui::TestAppContext) {
2924 init_test(cx);
2925
2926 let fs = FakeFs::new(cx.executor());
2927 fs.insert_tree("/dir", json!({})).await;
2928
2929 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2930
2931 let languages = project.update(cx, |project, _| project.languages().clone());
2932 languages.add(rust_lang());
2933
2934 let buffer = project.update(cx, |project, cx| {
2935 project.create_buffer("", None, cx).unwrap()
2936 });
2937 buffer.update(cx, |buffer, cx| {
2938 buffer.edit([(0..0, "abc")], None, cx);
2939 assert!(buffer.is_dirty());
2940 assert!(!buffer.has_conflict());
2941 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2942 });
2943 project
2944 .update(cx, |project, cx| {
2945 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2946 let path = ProjectPath {
2947 worktree_id,
2948 path: Arc::from(Path::new("file1.rs")),
2949 };
2950 project.save_buffer_as(buffer.clone(), path, cx)
2951 })
2952 .await
2953 .unwrap();
2954 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2955
2956 cx.executor().run_until_parked();
2957 buffer.update(cx, |buffer, cx| {
2958 assert_eq!(
2959 buffer.file().unwrap().full_path(cx),
2960 Path::new("dir/file1.rs")
2961 );
2962 assert!(!buffer.is_dirty());
2963 assert!(!buffer.has_conflict());
2964 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2965 });
2966
2967 let opened_buffer = project
2968 .update(cx, |project, cx| {
2969 project.open_local_buffer("/dir/file1.rs", cx)
2970 })
2971 .await
2972 .unwrap();
2973 assert_eq!(opened_buffer, buffer);
2974}
2975
2976#[gpui::test(retries = 5)]
2977async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2978 init_test(cx);
2979 cx.executor().allow_parking();
2980
2981 let dir = temp_tree(json!({
2982 "a": {
2983 "file1": "",
2984 "file2": "",
2985 "file3": "",
2986 },
2987 "b": {
2988 "c": {
2989 "file4": "",
2990 "file5": "",
2991 }
2992 }
2993 }));
2994
2995 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2996 let rpc = project.update(cx, |p, _| p.client.clone());
2997
2998 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2999 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3000 async move { buffer.await.unwrap() }
3001 };
3002 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3003 project.update(cx, |project, cx| {
3004 let tree = project.worktrees().next().unwrap();
3005 tree.read(cx)
3006 .entry_for_path(path)
3007 .unwrap_or_else(|| panic!("no entry for path {}", path))
3008 .id
3009 })
3010 };
3011
3012 let buffer2 = buffer_for_path("a/file2", cx).await;
3013 let buffer3 = buffer_for_path("a/file3", cx).await;
3014 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3015 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3016
3017 let file2_id = id_for_path("a/file2", cx);
3018 let file3_id = id_for_path("a/file3", cx);
3019 let file4_id = id_for_path("b/c/file4", cx);
3020
3021 // Create a remote copy of this worktree.
3022 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3023
3024 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
3025
3026 let updates = Arc::new(Mutex::new(Vec::new()));
3027 tree.update(cx, |tree, cx| {
3028 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
3029 let updates = updates.clone();
3030 move |update| {
3031 updates.lock().push(update);
3032 async { true }
3033 }
3034 });
3035 });
3036
3037 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
3038
3039 cx.executor().run_until_parked();
3040
3041 cx.update(|cx| {
3042 assert!(!buffer2.read(cx).is_dirty());
3043 assert!(!buffer3.read(cx).is_dirty());
3044 assert!(!buffer4.read(cx).is_dirty());
3045 assert!(!buffer5.read(cx).is_dirty());
3046 });
3047
3048 // Rename and delete files and directories.
3049 tree.flush_fs_events(cx).await;
3050 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3051 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3052 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3053 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3054 tree.flush_fs_events(cx).await;
3055
3056 let expected_paths = vec![
3057 "a",
3058 "a/file1",
3059 "a/file2.new",
3060 "b",
3061 "d",
3062 "d/file3",
3063 "d/file4",
3064 ];
3065
3066 cx.update(|app| {
3067 assert_eq!(
3068 tree.read(app)
3069 .paths()
3070 .map(|p| p.to_str().unwrap())
3071 .collect::<Vec<_>>(),
3072 expected_paths
3073 );
3074 });
3075
3076 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3077 assert_eq!(id_for_path("d/file3", cx), file3_id);
3078 assert_eq!(id_for_path("d/file4", cx), file4_id);
3079
3080 cx.update(|cx| {
3081 assert_eq!(
3082 buffer2.read(cx).file().unwrap().path().as_ref(),
3083 Path::new("a/file2.new")
3084 );
3085 assert_eq!(
3086 buffer3.read(cx).file().unwrap().path().as_ref(),
3087 Path::new("d/file3")
3088 );
3089 assert_eq!(
3090 buffer4.read(cx).file().unwrap().path().as_ref(),
3091 Path::new("d/file4")
3092 );
3093 assert_eq!(
3094 buffer5.read(cx).file().unwrap().path().as_ref(),
3095 Path::new("b/c/file5")
3096 );
3097
3098 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3099 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3100 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3101 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3102 });
3103
3104 // Update the remote worktree. Check that it becomes consistent with the
3105 // local worktree.
3106 cx.executor().run_until_parked();
3107
3108 remote.update(cx, |remote, _| {
3109 for update in updates.lock().drain(..) {
3110 remote.as_remote_mut().unwrap().update_from_remote(update);
3111 }
3112 });
3113 cx.executor().run_until_parked();
3114 remote.update(cx, |remote, _| {
3115 assert_eq!(
3116 remote
3117 .paths()
3118 .map(|p| p.to_str().unwrap())
3119 .collect::<Vec<_>>(),
3120 expected_paths
3121 );
3122 });
3123}
3124
3125#[gpui::test(iterations = 10)]
3126async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3127 init_test(cx);
3128
3129 let fs = FakeFs::new(cx.executor());
3130 fs.insert_tree(
3131 "/dir",
3132 json!({
3133 "a": {
3134 "file1": "",
3135 }
3136 }),
3137 )
3138 .await;
3139
3140 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3141 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3142 let tree_id = tree.update(cx, |tree, _| tree.id());
3143
3144 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3145 project.update(cx, |project, cx| {
3146 let tree = project.worktrees().next().unwrap();
3147 tree.read(cx)
3148 .entry_for_path(path)
3149 .unwrap_or_else(|| panic!("no entry for path {}", path))
3150 .id
3151 })
3152 };
3153
3154 let dir_id = id_for_path("a", cx);
3155 let file_id = id_for_path("a/file1", cx);
3156 let buffer = project
3157 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3158 .await
3159 .unwrap();
3160 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3161
3162 project
3163 .update(cx, |project, cx| {
3164 project.rename_entry(dir_id, Path::new("b"), cx)
3165 })
3166 .unwrap()
3167 .await
3168 .unwrap();
3169 cx.executor().run_until_parked();
3170
3171 assert_eq!(id_for_path("b", cx), dir_id);
3172 assert_eq!(id_for_path("b/file1", cx), file_id);
3173 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3174}
3175
3176#[gpui::test]
3177async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3178 init_test(cx);
3179
3180 let fs = FakeFs::new(cx.executor());
3181 fs.insert_tree(
3182 "/dir",
3183 json!({
3184 "a.txt": "a-contents",
3185 "b.txt": "b-contents",
3186 }),
3187 )
3188 .await;
3189
3190 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3191
3192 // Spawn multiple tasks to open paths, repeating some paths.
3193 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3194 (
3195 p.open_local_buffer("/dir/a.txt", cx),
3196 p.open_local_buffer("/dir/b.txt", cx),
3197 p.open_local_buffer("/dir/a.txt", cx),
3198 )
3199 });
3200
3201 let buffer_a_1 = buffer_a_1.await.unwrap();
3202 let buffer_a_2 = buffer_a_2.await.unwrap();
3203 let buffer_b = buffer_b.await.unwrap();
3204 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3205 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3206
3207 // There is only one buffer per path.
3208 let buffer_a_id = buffer_a_1.entity_id();
3209 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3210
3211 // Open the same path again while it is still open.
3212 drop(buffer_a_1);
3213 let buffer_a_3 = project
3214 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3215 .await
3216 .unwrap();
3217
3218 // There's still only one buffer per path.
3219 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3220}
3221
3222#[gpui::test]
3223async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3224 init_test(cx);
3225
3226 let fs = FakeFs::new(cx.executor());
3227 fs.insert_tree(
3228 "/dir",
3229 json!({
3230 "file1": "abc",
3231 "file2": "def",
3232 "file3": "ghi",
3233 }),
3234 )
3235 .await;
3236
3237 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3238
3239 let buffer1 = project
3240 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3241 .await
3242 .unwrap();
3243 let events = Arc::new(Mutex::new(Vec::new()));
3244
3245 // initially, the buffer isn't dirty.
3246 buffer1.update(cx, |buffer, cx| {
3247 cx.subscribe(&buffer1, {
3248 let events = events.clone();
3249 move |_, _, event, _| match event {
3250 BufferEvent::Operation(_) => {}
3251 _ => events.lock().push(event.clone()),
3252 }
3253 })
3254 .detach();
3255
3256 assert!(!buffer.is_dirty());
3257 assert!(events.lock().is_empty());
3258
3259 buffer.edit([(1..2, "")], None, cx);
3260 });
3261
3262 // after the first edit, the buffer is dirty, and emits a dirtied event.
3263 buffer1.update(cx, |buffer, cx| {
3264 assert!(buffer.text() == "ac");
3265 assert!(buffer.is_dirty());
3266 assert_eq!(
3267 *events.lock(),
3268 &[language::Event::Edited, language::Event::DirtyChanged]
3269 );
3270 events.lock().clear();
3271 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3272 });
3273
3274 // after saving, the buffer is not dirty, and emits a saved event.
3275 buffer1.update(cx, |buffer, cx| {
3276 assert!(!buffer.is_dirty());
3277 assert_eq!(*events.lock(), &[language::Event::Saved]);
3278 events.lock().clear();
3279
3280 buffer.edit([(1..1, "B")], None, cx);
3281 buffer.edit([(2..2, "D")], None, cx);
3282 });
3283
3284 // after editing again, the buffer is dirty, and emits another dirty event.
3285 buffer1.update(cx, |buffer, cx| {
3286 assert!(buffer.text() == "aBDc");
3287 assert!(buffer.is_dirty());
3288 assert_eq!(
3289 *events.lock(),
3290 &[
3291 language::Event::Edited,
3292 language::Event::DirtyChanged,
3293 language::Event::Edited,
3294 ],
3295 );
3296 events.lock().clear();
3297
3298 // After restoring the buffer to its previously-saved state,
3299 // the buffer is not considered dirty anymore.
3300 buffer.edit([(1..3, "")], None, cx);
3301 assert!(buffer.text() == "ac");
3302 assert!(!buffer.is_dirty());
3303 });
3304
3305 assert_eq!(
3306 *events.lock(),
3307 &[language::Event::Edited, language::Event::DirtyChanged]
3308 );
3309
3310 // When a file is deleted, the buffer is considered dirty.
3311 let events = Arc::new(Mutex::new(Vec::new()));
3312 let buffer2 = project
3313 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3314 .await
3315 .unwrap();
3316 buffer2.update(cx, |_, cx| {
3317 cx.subscribe(&buffer2, {
3318 let events = events.clone();
3319 move |_, _, event, _| events.lock().push(event.clone())
3320 })
3321 .detach();
3322 });
3323
3324 fs.remove_file("/dir/file2".as_ref(), Default::default())
3325 .await
3326 .unwrap();
3327 cx.executor().run_until_parked();
3328 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3329 assert_eq!(
3330 *events.lock(),
3331 &[
3332 language::Event::DirtyChanged,
3333 language::Event::FileHandleChanged
3334 ]
3335 );
3336
3337 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3338 let events = Arc::new(Mutex::new(Vec::new()));
3339 let buffer3 = project
3340 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3341 .await
3342 .unwrap();
3343 buffer3.update(cx, |_, cx| {
3344 cx.subscribe(&buffer3, {
3345 let events = events.clone();
3346 move |_, _, event, _| events.lock().push(event.clone())
3347 })
3348 .detach();
3349 });
3350
3351 buffer3.update(cx, |buffer, cx| {
3352 buffer.edit([(0..0, "x")], None, cx);
3353 });
3354 events.lock().clear();
3355 fs.remove_file("/dir/file3".as_ref(), Default::default())
3356 .await
3357 .unwrap();
3358 cx.executor().run_until_parked();
3359 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3360 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3361}
3362
3363#[gpui::test]
3364async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3365 init_test(cx);
3366
3367 let initial_contents = "aaa\nbbbbb\nc\n";
3368 let fs = FakeFs::new(cx.executor());
3369 fs.insert_tree(
3370 "/dir",
3371 json!({
3372 "the-file": initial_contents,
3373 }),
3374 )
3375 .await;
3376 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3377 let buffer = project
3378 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3379 .await
3380 .unwrap();
3381
3382 let anchors = (0..3)
3383 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3384 .collect::<Vec<_>>();
3385
3386 // Change the file on disk, adding two new lines of text, and removing
3387 // one line.
3388 buffer.update(cx, |buffer, _| {
3389 assert!(!buffer.is_dirty());
3390 assert!(!buffer.has_conflict());
3391 });
3392 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3393 fs.save(
3394 "/dir/the-file".as_ref(),
3395 &new_contents.into(),
3396 LineEnding::Unix,
3397 )
3398 .await
3399 .unwrap();
3400
3401 // Because the buffer was not modified, it is reloaded from disk. Its
3402 // contents are edited according to the diff between the old and new
3403 // file contents.
3404 cx.executor().run_until_parked();
3405 buffer.update(cx, |buffer, _| {
3406 assert_eq!(buffer.text(), new_contents);
3407 assert!(!buffer.is_dirty());
3408 assert!(!buffer.has_conflict());
3409
3410 let anchor_positions = anchors
3411 .iter()
3412 .map(|anchor| anchor.to_point(&*buffer))
3413 .collect::<Vec<_>>();
3414 assert_eq!(
3415 anchor_positions,
3416 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3417 );
3418 });
3419
3420 // Modify the buffer
3421 buffer.update(cx, |buffer, cx| {
3422 buffer.edit([(0..0, " ")], None, cx);
3423 assert!(buffer.is_dirty());
3424 assert!(!buffer.has_conflict());
3425 });
3426
3427 // Change the file on disk again, adding blank lines to the beginning.
3428 fs.save(
3429 "/dir/the-file".as_ref(),
3430 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3431 LineEnding::Unix,
3432 )
3433 .await
3434 .unwrap();
3435
3436 // Because the buffer is modified, it doesn't reload from disk, but is
3437 // marked as having a conflict.
3438 cx.executor().run_until_parked();
3439 buffer.update(cx, |buffer, _| {
3440 assert!(buffer.has_conflict());
3441 });
3442}
3443
3444#[gpui::test]
3445async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3446 init_test(cx);
3447
3448 let fs = FakeFs::new(cx.executor());
3449 fs.insert_tree(
3450 "/dir",
3451 json!({
3452 "file1": "a\nb\nc\n",
3453 "file2": "one\r\ntwo\r\nthree\r\n",
3454 }),
3455 )
3456 .await;
3457
3458 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3459 let buffer1 = project
3460 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3461 .await
3462 .unwrap();
3463 let buffer2 = project
3464 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3465 .await
3466 .unwrap();
3467
3468 buffer1.update(cx, |buffer, _| {
3469 assert_eq!(buffer.text(), "a\nb\nc\n");
3470 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3471 });
3472 buffer2.update(cx, |buffer, _| {
3473 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3474 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3475 });
3476
3477 // Change a file's line endings on disk from unix to windows. The buffer's
3478 // state updates correctly.
3479 fs.save(
3480 "/dir/file1".as_ref(),
3481 &"aaa\nb\nc\n".into(),
3482 LineEnding::Windows,
3483 )
3484 .await
3485 .unwrap();
3486 cx.executor().run_until_parked();
3487 buffer1.update(cx, |buffer, _| {
3488 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3489 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3490 });
3491
3492 // Save a file with windows line endings. The file is written correctly.
3493 buffer2.update(cx, |buffer, cx| {
3494 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3495 });
3496 project
3497 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3498 .await
3499 .unwrap();
3500 assert_eq!(
3501 fs.load("/dir/file2".as_ref()).await.unwrap(),
3502 "one\r\ntwo\r\nthree\r\nfour\r\n",
3503 );
3504}
3505
3506#[gpui::test]
3507async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3508 init_test(cx);
3509
3510 let fs = FakeFs::new(cx.executor());
3511 fs.insert_tree(
3512 "/the-dir",
3513 json!({
3514 "a.rs": "
3515 fn foo(mut v: Vec<usize>) {
3516 for x in &v {
3517 v.push(1);
3518 }
3519 }
3520 "
3521 .unindent(),
3522 }),
3523 )
3524 .await;
3525
3526 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3527 let buffer = project
3528 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3529 .await
3530 .unwrap();
3531
3532 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3533 let message = lsp::PublishDiagnosticsParams {
3534 uri: buffer_uri.clone(),
3535 diagnostics: vec![
3536 lsp::Diagnostic {
3537 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3538 severity: Some(DiagnosticSeverity::WARNING),
3539 message: "error 1".to_string(),
3540 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3541 location: lsp::Location {
3542 uri: buffer_uri.clone(),
3543 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3544 },
3545 message: "error 1 hint 1".to_string(),
3546 }]),
3547 ..Default::default()
3548 },
3549 lsp::Diagnostic {
3550 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3551 severity: Some(DiagnosticSeverity::HINT),
3552 message: "error 1 hint 1".to_string(),
3553 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3554 location: lsp::Location {
3555 uri: buffer_uri.clone(),
3556 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3557 },
3558 message: "original diagnostic".to_string(),
3559 }]),
3560 ..Default::default()
3561 },
3562 lsp::Diagnostic {
3563 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3564 severity: Some(DiagnosticSeverity::ERROR),
3565 message: "error 2".to_string(),
3566 related_information: Some(vec![
3567 lsp::DiagnosticRelatedInformation {
3568 location: lsp::Location {
3569 uri: buffer_uri.clone(),
3570 range: lsp::Range::new(
3571 lsp::Position::new(1, 13),
3572 lsp::Position::new(1, 15),
3573 ),
3574 },
3575 message: "error 2 hint 1".to_string(),
3576 },
3577 lsp::DiagnosticRelatedInformation {
3578 location: lsp::Location {
3579 uri: buffer_uri.clone(),
3580 range: lsp::Range::new(
3581 lsp::Position::new(1, 13),
3582 lsp::Position::new(1, 15),
3583 ),
3584 },
3585 message: "error 2 hint 2".to_string(),
3586 },
3587 ]),
3588 ..Default::default()
3589 },
3590 lsp::Diagnostic {
3591 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3592 severity: Some(DiagnosticSeverity::HINT),
3593 message: "error 2 hint 1".to_string(),
3594 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3595 location: lsp::Location {
3596 uri: buffer_uri.clone(),
3597 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3598 },
3599 message: "original diagnostic".to_string(),
3600 }]),
3601 ..Default::default()
3602 },
3603 lsp::Diagnostic {
3604 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3605 severity: Some(DiagnosticSeverity::HINT),
3606 message: "error 2 hint 2".to_string(),
3607 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3608 location: lsp::Location {
3609 uri: buffer_uri,
3610 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3611 },
3612 message: "original diagnostic".to_string(),
3613 }]),
3614 ..Default::default()
3615 },
3616 ],
3617 version: None,
3618 };
3619
3620 project
3621 .update(cx, |p, cx| {
3622 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3623 })
3624 .unwrap();
3625 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3626
3627 assert_eq!(
3628 buffer
3629 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3630 .collect::<Vec<_>>(),
3631 &[
3632 DiagnosticEntry {
3633 range: Point::new(1, 8)..Point::new(1, 9),
3634 diagnostic: Diagnostic {
3635 severity: DiagnosticSeverity::WARNING,
3636 message: "error 1".to_string(),
3637 group_id: 1,
3638 is_primary: true,
3639 ..Default::default()
3640 }
3641 },
3642 DiagnosticEntry {
3643 range: Point::new(1, 8)..Point::new(1, 9),
3644 diagnostic: Diagnostic {
3645 severity: DiagnosticSeverity::HINT,
3646 message: "error 1 hint 1".to_string(),
3647 group_id: 1,
3648 is_primary: false,
3649 ..Default::default()
3650 }
3651 },
3652 DiagnosticEntry {
3653 range: Point::new(1, 13)..Point::new(1, 15),
3654 diagnostic: Diagnostic {
3655 severity: DiagnosticSeverity::HINT,
3656 message: "error 2 hint 1".to_string(),
3657 group_id: 0,
3658 is_primary: false,
3659 ..Default::default()
3660 }
3661 },
3662 DiagnosticEntry {
3663 range: Point::new(1, 13)..Point::new(1, 15),
3664 diagnostic: Diagnostic {
3665 severity: DiagnosticSeverity::HINT,
3666 message: "error 2 hint 2".to_string(),
3667 group_id: 0,
3668 is_primary: false,
3669 ..Default::default()
3670 }
3671 },
3672 DiagnosticEntry {
3673 range: Point::new(2, 8)..Point::new(2, 17),
3674 diagnostic: Diagnostic {
3675 severity: DiagnosticSeverity::ERROR,
3676 message: "error 2".to_string(),
3677 group_id: 0,
3678 is_primary: true,
3679 ..Default::default()
3680 }
3681 }
3682 ]
3683 );
3684
3685 assert_eq!(
3686 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3687 &[
3688 DiagnosticEntry {
3689 range: Point::new(1, 13)..Point::new(1, 15),
3690 diagnostic: Diagnostic {
3691 severity: DiagnosticSeverity::HINT,
3692 message: "error 2 hint 1".to_string(),
3693 group_id: 0,
3694 is_primary: false,
3695 ..Default::default()
3696 }
3697 },
3698 DiagnosticEntry {
3699 range: Point::new(1, 13)..Point::new(1, 15),
3700 diagnostic: Diagnostic {
3701 severity: DiagnosticSeverity::HINT,
3702 message: "error 2 hint 2".to_string(),
3703 group_id: 0,
3704 is_primary: false,
3705 ..Default::default()
3706 }
3707 },
3708 DiagnosticEntry {
3709 range: Point::new(2, 8)..Point::new(2, 17),
3710 diagnostic: Diagnostic {
3711 severity: DiagnosticSeverity::ERROR,
3712 message: "error 2".to_string(),
3713 group_id: 0,
3714 is_primary: true,
3715 ..Default::default()
3716 }
3717 }
3718 ]
3719 );
3720
3721 assert_eq!(
3722 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3723 &[
3724 DiagnosticEntry {
3725 range: Point::new(1, 8)..Point::new(1, 9),
3726 diagnostic: Diagnostic {
3727 severity: DiagnosticSeverity::WARNING,
3728 message: "error 1".to_string(),
3729 group_id: 1,
3730 is_primary: true,
3731 ..Default::default()
3732 }
3733 },
3734 DiagnosticEntry {
3735 range: Point::new(1, 8)..Point::new(1, 9),
3736 diagnostic: Diagnostic {
3737 severity: DiagnosticSeverity::HINT,
3738 message: "error 1 hint 1".to_string(),
3739 group_id: 1,
3740 is_primary: false,
3741 ..Default::default()
3742 }
3743 },
3744 ]
3745 );
3746}
3747
3748#[gpui::test]
3749async fn test_rename(cx: &mut gpui::TestAppContext) {
3750 init_test(cx);
3751
3752 let fs = FakeFs::new(cx.executor());
3753 fs.insert_tree(
3754 "/dir",
3755 json!({
3756 "one.rs": "const ONE: usize = 1;",
3757 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3758 }),
3759 )
3760 .await;
3761
3762 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3763
3764 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3765 language_registry.add(rust_lang());
3766 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3767 "Rust",
3768 FakeLspAdapter {
3769 capabilities: lsp::ServerCapabilities {
3770 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3771 prepare_provider: Some(true),
3772 work_done_progress_options: Default::default(),
3773 })),
3774 ..Default::default()
3775 },
3776 ..Default::default()
3777 },
3778 );
3779
3780 let buffer = project
3781 .update(cx, |project, cx| {
3782 project.open_local_buffer("/dir/one.rs", cx)
3783 })
3784 .await
3785 .unwrap();
3786
3787 let fake_server = fake_servers.next().await.unwrap();
3788
3789 let response = project.update(cx, |project, cx| {
3790 project.prepare_rename(buffer.clone(), 7, cx)
3791 });
3792 fake_server
3793 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3794 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3795 assert_eq!(params.position, lsp::Position::new(0, 7));
3796 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3797 lsp::Position::new(0, 6),
3798 lsp::Position::new(0, 9),
3799 ))))
3800 })
3801 .next()
3802 .await
3803 .unwrap();
3804 let range = response.await.unwrap().unwrap();
3805 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3806 assert_eq!(range, 6..9);
3807
3808 let response = project.update(cx, |project, cx| {
3809 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3810 });
3811 fake_server
3812 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3813 assert_eq!(
3814 params.text_document_position.text_document.uri.as_str(),
3815 "file:///dir/one.rs"
3816 );
3817 assert_eq!(
3818 params.text_document_position.position,
3819 lsp::Position::new(0, 7)
3820 );
3821 assert_eq!(params.new_name, "THREE");
3822 Ok(Some(lsp::WorkspaceEdit {
3823 changes: Some(
3824 [
3825 (
3826 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3827 vec![lsp::TextEdit::new(
3828 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3829 "THREE".to_string(),
3830 )],
3831 ),
3832 (
3833 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3834 vec![
3835 lsp::TextEdit::new(
3836 lsp::Range::new(
3837 lsp::Position::new(0, 24),
3838 lsp::Position::new(0, 27),
3839 ),
3840 "THREE".to_string(),
3841 ),
3842 lsp::TextEdit::new(
3843 lsp::Range::new(
3844 lsp::Position::new(0, 35),
3845 lsp::Position::new(0, 38),
3846 ),
3847 "THREE".to_string(),
3848 ),
3849 ],
3850 ),
3851 ]
3852 .into_iter()
3853 .collect(),
3854 ),
3855 ..Default::default()
3856 }))
3857 })
3858 .next()
3859 .await
3860 .unwrap();
3861 let mut transaction = response.await.unwrap().0;
3862 assert_eq!(transaction.len(), 2);
3863 assert_eq!(
3864 transaction
3865 .remove_entry(&buffer)
3866 .unwrap()
3867 .0
3868 .update(cx, |buffer, _| buffer.text()),
3869 "const THREE: usize = 1;"
3870 );
3871 assert_eq!(
3872 transaction
3873 .into_keys()
3874 .next()
3875 .unwrap()
3876 .update(cx, |buffer, _| buffer.text()),
3877 "const TWO: usize = one::THREE + one::THREE;"
3878 );
3879}
3880
3881#[gpui::test]
3882async fn test_search(cx: &mut gpui::TestAppContext) {
3883 init_test(cx);
3884
3885 let fs = FakeFs::new(cx.executor());
3886 fs.insert_tree(
3887 "/dir",
3888 json!({
3889 "one.rs": "const ONE: usize = 1;",
3890 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3891 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3892 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3893 }),
3894 )
3895 .await;
3896 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3897 assert_eq!(
3898 search(
3899 &project,
3900 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3901 cx
3902 )
3903 .await
3904 .unwrap(),
3905 HashMap::from_iter([
3906 ("dir/two.rs".to_string(), vec![6..9]),
3907 ("dir/three.rs".to_string(), vec![37..40])
3908 ])
3909 );
3910
3911 let buffer_4 = project
3912 .update(cx, |project, cx| {
3913 project.open_local_buffer("/dir/four.rs", cx)
3914 })
3915 .await
3916 .unwrap();
3917 buffer_4.update(cx, |buffer, cx| {
3918 let text = "two::TWO";
3919 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3920 });
3921
3922 assert_eq!(
3923 search(
3924 &project,
3925 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3926 cx
3927 )
3928 .await
3929 .unwrap(),
3930 HashMap::from_iter([
3931 ("dir/two.rs".to_string(), vec![6..9]),
3932 ("dir/three.rs".to_string(), vec![37..40]),
3933 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3934 ])
3935 );
3936}
3937
3938#[gpui::test]
3939async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3940 init_test(cx);
3941
3942 let search_query = "file";
3943
3944 let fs = FakeFs::new(cx.executor());
3945 fs.insert_tree(
3946 "/dir",
3947 json!({
3948 "one.rs": r#"// Rust file one"#,
3949 "one.ts": r#"// TypeScript file one"#,
3950 "two.rs": r#"// Rust file two"#,
3951 "two.ts": r#"// TypeScript file two"#,
3952 }),
3953 )
3954 .await;
3955 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3956
3957 assert!(
3958 search(
3959 &project,
3960 SearchQuery::text(
3961 search_query,
3962 false,
3963 true,
3964 false,
3965 vec![PathMatcher::new("*.odd").unwrap()],
3966 Vec::new()
3967 )
3968 .unwrap(),
3969 cx
3970 )
3971 .await
3972 .unwrap()
3973 .is_empty(),
3974 "If no inclusions match, no files should be returned"
3975 );
3976
3977 assert_eq!(
3978 search(
3979 &project,
3980 SearchQuery::text(
3981 search_query,
3982 false,
3983 true,
3984 false,
3985 vec![PathMatcher::new("*.rs").unwrap()],
3986 Vec::new()
3987 )
3988 .unwrap(),
3989 cx
3990 )
3991 .await
3992 .unwrap(),
3993 HashMap::from_iter([
3994 ("dir/one.rs".to_string(), vec![8..12]),
3995 ("dir/two.rs".to_string(), vec![8..12]),
3996 ]),
3997 "Rust only search should give only Rust files"
3998 );
3999
4000 assert_eq!(
4001 search(
4002 &project,
4003 SearchQuery::text(
4004 search_query,
4005 false,
4006 true,
4007 false,
4008 vec![
4009 PathMatcher::new("*.ts").unwrap(),
4010 PathMatcher::new("*.odd").unwrap(),
4011 ],
4012 Vec::new()
4013 ).unwrap(),
4014 cx
4015 )
4016 .await
4017 .unwrap(),
4018 HashMap::from_iter([
4019 ("dir/one.ts".to_string(), vec![14..18]),
4020 ("dir/two.ts".to_string(), vec![14..18]),
4021 ]),
4022 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4023 );
4024
4025 assert_eq!(
4026 search(
4027 &project,
4028 SearchQuery::text(
4029 search_query,
4030 false,
4031 true,
4032 false,
4033 vec![
4034 PathMatcher::new("*.rs").unwrap(),
4035 PathMatcher::new("*.ts").unwrap(),
4036 PathMatcher::new("*.odd").unwrap(),
4037 ],
4038 Vec::new()
4039 ).unwrap(),
4040 cx
4041 )
4042 .await
4043 .unwrap(),
4044 HashMap::from_iter([
4045 ("dir/two.ts".to_string(), vec![14..18]),
4046 ("dir/one.rs".to_string(), vec![8..12]),
4047 ("dir/one.ts".to_string(), vec![14..18]),
4048 ("dir/two.rs".to_string(), vec![8..12]),
4049 ]),
4050 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4051 );
4052}
4053
4054#[gpui::test]
4055async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4056 init_test(cx);
4057
4058 let search_query = "file";
4059
4060 let fs = FakeFs::new(cx.executor());
4061 fs.insert_tree(
4062 "/dir",
4063 json!({
4064 "one.rs": r#"// Rust file one"#,
4065 "one.ts": r#"// TypeScript file one"#,
4066 "two.rs": r#"// Rust file two"#,
4067 "two.ts": r#"// TypeScript file two"#,
4068 }),
4069 )
4070 .await;
4071 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4072
4073 assert_eq!(
4074 search(
4075 &project,
4076 SearchQuery::text(
4077 search_query,
4078 false,
4079 true,
4080 false,
4081 Vec::new(),
4082 vec![PathMatcher::new("*.odd").unwrap()],
4083 )
4084 .unwrap(),
4085 cx
4086 )
4087 .await
4088 .unwrap(),
4089 HashMap::from_iter([
4090 ("dir/one.rs".to_string(), vec![8..12]),
4091 ("dir/one.ts".to_string(), vec![14..18]),
4092 ("dir/two.rs".to_string(), vec![8..12]),
4093 ("dir/two.ts".to_string(), vec![14..18]),
4094 ]),
4095 "If no exclusions match, all files should be returned"
4096 );
4097
4098 assert_eq!(
4099 search(
4100 &project,
4101 SearchQuery::text(
4102 search_query,
4103 false,
4104 true,
4105 false,
4106 Vec::new(),
4107 vec![PathMatcher::new("*.rs").unwrap()],
4108 )
4109 .unwrap(),
4110 cx
4111 )
4112 .await
4113 .unwrap(),
4114 HashMap::from_iter([
4115 ("dir/one.ts".to_string(), vec![14..18]),
4116 ("dir/two.ts".to_string(), vec![14..18]),
4117 ]),
4118 "Rust exclusion search should give only TypeScript files"
4119 );
4120
4121 assert_eq!(
4122 search(
4123 &project,
4124 SearchQuery::text(
4125 search_query,
4126 false,
4127 true,
4128 false,
4129 Vec::new(),
4130 vec![
4131 PathMatcher::new("*.ts").unwrap(),
4132 PathMatcher::new("*.odd").unwrap(),
4133 ],
4134 ).unwrap(),
4135 cx
4136 )
4137 .await
4138 .unwrap(),
4139 HashMap::from_iter([
4140 ("dir/one.rs".to_string(), vec![8..12]),
4141 ("dir/two.rs".to_string(), vec![8..12]),
4142 ]),
4143 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4144 );
4145
4146 assert!(
4147 search(
4148 &project,
4149 SearchQuery::text(
4150 search_query,
4151 false,
4152 true,
4153 false,
4154 Vec::new(),
4155 vec![
4156 PathMatcher::new("*.rs").unwrap(),
4157 PathMatcher::new("*.ts").unwrap(),
4158 PathMatcher::new("*.odd").unwrap(),
4159 ],
4160 ).unwrap(),
4161 cx
4162 )
4163 .await
4164 .unwrap().is_empty(),
4165 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4166 );
4167}
4168
4169#[gpui::test]
4170async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4171 init_test(cx);
4172
4173 let search_query = "file";
4174
4175 let fs = FakeFs::new(cx.executor());
4176 fs.insert_tree(
4177 "/dir",
4178 json!({
4179 "one.rs": r#"// Rust file one"#,
4180 "one.ts": r#"// TypeScript file one"#,
4181 "two.rs": r#"// Rust file two"#,
4182 "two.ts": r#"// TypeScript file two"#,
4183 }),
4184 )
4185 .await;
4186 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4187
4188 assert!(
4189 search(
4190 &project,
4191 SearchQuery::text(
4192 search_query,
4193 false,
4194 true,
4195 false,
4196 vec![PathMatcher::new("*.odd").unwrap()],
4197 vec![PathMatcher::new("*.odd").unwrap()],
4198 )
4199 .unwrap(),
4200 cx
4201 )
4202 .await
4203 .unwrap()
4204 .is_empty(),
4205 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4206 );
4207
4208 assert!(
4209 search(
4210 &project,
4211 SearchQuery::text(
4212 search_query,
4213 false,
4214 true,
4215 false,
4216 vec![PathMatcher::new("*.ts").unwrap()],
4217 vec![PathMatcher::new("*.ts").unwrap()],
4218 ).unwrap(),
4219 cx
4220 )
4221 .await
4222 .unwrap()
4223 .is_empty(),
4224 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4225 );
4226
4227 assert!(
4228 search(
4229 &project,
4230 SearchQuery::text(
4231 search_query,
4232 false,
4233 true,
4234 false,
4235 vec![
4236 PathMatcher::new("*.ts").unwrap(),
4237 PathMatcher::new("*.odd").unwrap()
4238 ],
4239 vec![
4240 PathMatcher::new("*.ts").unwrap(),
4241 PathMatcher::new("*.odd").unwrap()
4242 ],
4243 )
4244 .unwrap(),
4245 cx
4246 )
4247 .await
4248 .unwrap()
4249 .is_empty(),
4250 "Non-matching inclusions and exclusions should not change that."
4251 );
4252
4253 assert_eq!(
4254 search(
4255 &project,
4256 SearchQuery::text(
4257 search_query,
4258 false,
4259 true,
4260 false,
4261 vec![
4262 PathMatcher::new("*.ts").unwrap(),
4263 PathMatcher::new("*.odd").unwrap()
4264 ],
4265 vec![
4266 PathMatcher::new("*.rs").unwrap(),
4267 PathMatcher::new("*.odd").unwrap()
4268 ],
4269 )
4270 .unwrap(),
4271 cx
4272 )
4273 .await
4274 .unwrap(),
4275 HashMap::from_iter([
4276 ("dir/one.ts".to_string(), vec![14..18]),
4277 ("dir/two.ts".to_string(), vec![14..18]),
4278 ]),
4279 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4280 );
4281}
4282
4283#[gpui::test]
4284async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4285 init_test(cx);
4286
4287 let fs = FakeFs::new(cx.executor());
4288 fs.insert_tree(
4289 "/worktree-a",
4290 json!({
4291 "haystack.rs": r#"// NEEDLE"#,
4292 "haystack.ts": r#"// NEEDLE"#,
4293 }),
4294 )
4295 .await;
4296 fs.insert_tree(
4297 "/worktree-b",
4298 json!({
4299 "haystack.rs": r#"// NEEDLE"#,
4300 "haystack.ts": r#"// NEEDLE"#,
4301 }),
4302 )
4303 .await;
4304
4305 let project = Project::test(
4306 fs.clone(),
4307 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4308 cx,
4309 )
4310 .await;
4311
4312 assert_eq!(
4313 search(
4314 &project,
4315 SearchQuery::text(
4316 "NEEDLE",
4317 false,
4318 true,
4319 false,
4320 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4321 Vec::new()
4322 )
4323 .unwrap(),
4324 cx
4325 )
4326 .await
4327 .unwrap(),
4328 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4329 "should only return results from included worktree"
4330 );
4331 assert_eq!(
4332 search(
4333 &project,
4334 SearchQuery::text(
4335 "NEEDLE",
4336 false,
4337 true,
4338 false,
4339 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4340 Vec::new()
4341 )
4342 .unwrap(),
4343 cx
4344 )
4345 .await
4346 .unwrap(),
4347 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4348 "should only return results from included worktree"
4349 );
4350
4351 assert_eq!(
4352 search(
4353 &project,
4354 SearchQuery::text(
4355 "NEEDLE",
4356 false,
4357 true,
4358 false,
4359 vec![PathMatcher::new("*.ts").unwrap()],
4360 Vec::new()
4361 )
4362 .unwrap(),
4363 cx
4364 )
4365 .await
4366 .unwrap(),
4367 HashMap::from_iter([
4368 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4369 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4370 ]),
4371 "should return results from both worktrees"
4372 );
4373}
4374
4375#[gpui::test]
4376async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4377 init_test(cx);
4378
4379 let fs = FakeFs::new(cx.background_executor.clone());
4380 fs.insert_tree(
4381 "/dir",
4382 json!({
4383 ".git": {},
4384 ".gitignore": "**/target\n/node_modules\n",
4385 "target": {
4386 "index.txt": "index_key:index_value"
4387 },
4388 "node_modules": {
4389 "eslint": {
4390 "index.ts": "const eslint_key = 'eslint value'",
4391 "package.json": r#"{ "some_key": "some value" }"#,
4392 },
4393 "prettier": {
4394 "index.ts": "const prettier_key = 'prettier value'",
4395 "package.json": r#"{ "other_key": "other value" }"#,
4396 },
4397 },
4398 "package.json": r#"{ "main_key": "main value" }"#,
4399 }),
4400 )
4401 .await;
4402 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4403
4404 let query = "key";
4405 assert_eq!(
4406 search(
4407 &project,
4408 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4409 cx
4410 )
4411 .await
4412 .unwrap(),
4413 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4414 "Only one non-ignored file should have the query"
4415 );
4416
4417 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4418 assert_eq!(
4419 search(
4420 &project,
4421 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4422 cx
4423 )
4424 .await
4425 .unwrap(),
4426 HashMap::from_iter([
4427 ("dir/package.json".to_string(), vec![8..11]),
4428 ("dir/target/index.txt".to_string(), vec![6..9]),
4429 (
4430 "dir/node_modules/prettier/package.json".to_string(),
4431 vec![9..12]
4432 ),
4433 (
4434 "dir/node_modules/prettier/index.ts".to_string(),
4435 vec![15..18]
4436 ),
4437 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4438 (
4439 "dir/node_modules/eslint/package.json".to_string(),
4440 vec![8..11]
4441 ),
4442 ]),
4443 "Unrestricted search with ignored directories should find every file with the query"
4444 );
4445
4446 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4447 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4448 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4449 assert_eq!(
4450 search(
4451 &project,
4452 SearchQuery::text(
4453 query,
4454 false,
4455 false,
4456 true,
4457 files_to_include,
4458 files_to_exclude,
4459 )
4460 .unwrap(),
4461 cx
4462 )
4463 .await
4464 .unwrap(),
4465 HashMap::from_iter([(
4466 "dir/node_modules/prettier/package.json".to_string(),
4467 vec![9..12]
4468 )]),
4469 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4470 );
4471}
4472
4473#[test]
4474fn test_glob_literal_prefix() {
4475 assert_eq!(glob_literal_prefix("**/*.js"), "");
4476 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4477 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4478 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4479}
4480
4481#[gpui::test]
4482async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4483 init_test(cx);
4484
4485 let fs = FakeFs::new(cx.executor().clone());
4486 fs.insert_tree(
4487 "/one/two",
4488 json!({
4489 "three": {
4490 "a.txt": "",
4491 "four": {}
4492 },
4493 "c.rs": ""
4494 }),
4495 )
4496 .await;
4497
4498 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4499 project
4500 .update(cx, |project, cx| {
4501 let id = project.worktrees().next().unwrap().read(cx).id();
4502 project.create_entry((id, "b.."), true, cx)
4503 })
4504 .unwrap()
4505 .await
4506 .unwrap();
4507
4508 // Can't create paths outside the project
4509 let result = project
4510 .update(cx, |project, cx| {
4511 let id = project.worktrees().next().unwrap().read(cx).id();
4512 project.create_entry((id, "../../boop"), true, cx)
4513 })
4514 .await;
4515 assert!(result.is_err());
4516
4517 // Can't create paths with '..'
4518 let result = project
4519 .update(cx, |project, cx| {
4520 let id = project.worktrees().next().unwrap().read(cx).id();
4521 project.create_entry((id, "four/../beep"), true, cx)
4522 })
4523 .await;
4524 assert!(result.is_err());
4525
4526 assert_eq!(
4527 fs.paths(true),
4528 vec![
4529 PathBuf::from("/"),
4530 PathBuf::from("/one"),
4531 PathBuf::from("/one/two"),
4532 PathBuf::from("/one/two/c.rs"),
4533 PathBuf::from("/one/two/three"),
4534 PathBuf::from("/one/two/three/a.txt"),
4535 PathBuf::from("/one/two/three/b.."),
4536 PathBuf::from("/one/two/three/four"),
4537 ]
4538 );
4539
4540 // And we cannot open buffers with '..'
4541 let result = project
4542 .update(cx, |project, cx| {
4543 let id = project.worktrees().next().unwrap().read(cx).id();
4544 project.open_buffer((id, "../c.rs"), cx)
4545 })
4546 .await;
4547 assert!(result.is_err())
4548}
4549
4550#[gpui::test]
4551async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4552 init_test(cx);
4553
4554 let fs = FakeFs::new(cx.executor());
4555 fs.insert_tree(
4556 "/dir",
4557 json!({
4558 "a.tsx": "a",
4559 }),
4560 )
4561 .await;
4562
4563 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4564
4565 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4566 language_registry.add(tsx_lang());
4567 let language_server_names = [
4568 "TypeScriptServer",
4569 "TailwindServer",
4570 "ESLintServer",
4571 "NoHoverCapabilitiesServer",
4572 ];
4573 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4574 "tsx",
4575 true,
4576 FakeLspAdapter {
4577 name: &language_server_names[0],
4578 capabilities: lsp::ServerCapabilities {
4579 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4580 ..lsp::ServerCapabilities::default()
4581 },
4582 ..FakeLspAdapter::default()
4583 },
4584 );
4585 let _a = language_registry.register_specific_fake_lsp_adapter(
4586 "tsx",
4587 false,
4588 FakeLspAdapter {
4589 name: &language_server_names[1],
4590 capabilities: lsp::ServerCapabilities {
4591 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4592 ..lsp::ServerCapabilities::default()
4593 },
4594 ..FakeLspAdapter::default()
4595 },
4596 );
4597 let _b = language_registry.register_specific_fake_lsp_adapter(
4598 "tsx",
4599 false,
4600 FakeLspAdapter {
4601 name: &language_server_names[2],
4602 capabilities: lsp::ServerCapabilities {
4603 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4604 ..lsp::ServerCapabilities::default()
4605 },
4606 ..FakeLspAdapter::default()
4607 },
4608 );
4609 let _c = language_registry.register_specific_fake_lsp_adapter(
4610 "tsx",
4611 false,
4612 FakeLspAdapter {
4613 name: &language_server_names[3],
4614 capabilities: lsp::ServerCapabilities {
4615 hover_provider: None,
4616 ..lsp::ServerCapabilities::default()
4617 },
4618 ..FakeLspAdapter::default()
4619 },
4620 );
4621
4622 let buffer = project
4623 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4624 .await
4625 .unwrap();
4626 cx.executor().run_until_parked();
4627
4628 let mut servers_with_hover_requests = HashMap::default();
4629 for i in 0..language_server_names.len() {
4630 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4631 panic!(
4632 "Failed to get language server #{i} with name {}",
4633 &language_server_names[i]
4634 )
4635 });
4636 let new_server_name = new_server.server.name();
4637 assert!(
4638 !servers_with_hover_requests.contains_key(new_server_name),
4639 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4640 );
4641 let new_server_name = new_server_name.to_string();
4642 match new_server_name.as_str() {
4643 "TailwindServer" | "TypeScriptServer" => {
4644 servers_with_hover_requests.insert(
4645 new_server_name.clone(),
4646 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4647 let name = new_server_name.clone();
4648 async move {
4649 Ok(Some(lsp::Hover {
4650 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4651 format!("{name} hover"),
4652 )),
4653 range: None,
4654 }))
4655 }
4656 }),
4657 );
4658 }
4659 "ESLintServer" => {
4660 servers_with_hover_requests.insert(
4661 new_server_name,
4662 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4663 |_, _| async move { Ok(None) },
4664 ),
4665 );
4666 }
4667 "NoHoverCapabilitiesServer" => {
4668 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4669 |_, _| async move {
4670 panic!(
4671 "Should not call for hovers server with no corresponding capabilities"
4672 )
4673 },
4674 );
4675 }
4676 unexpected => panic!("Unexpected server name: {unexpected}"),
4677 }
4678 }
4679
4680 let hover_task = project.update(cx, |project, cx| {
4681 project.hover(&buffer, Point::new(0, 0), cx)
4682 });
4683 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4684 |mut hover_request| async move {
4685 hover_request
4686 .next()
4687 .await
4688 .expect("All hover requests should have been triggered")
4689 },
4690 ))
4691 .await;
4692 assert_eq!(
4693 vec!["TailwindServer hover", "TypeScriptServer hover"],
4694 hover_task
4695 .await
4696 .into_iter()
4697 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4698 .sorted()
4699 .collect::<Vec<_>>(),
4700 "Should receive hover responses from all related servers with hover capabilities"
4701 );
4702}
4703
4704#[gpui::test]
4705async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4706 init_test(cx);
4707
4708 let fs = FakeFs::new(cx.executor());
4709 fs.insert_tree(
4710 "/dir",
4711 json!({
4712 "a.ts": "a",
4713 }),
4714 )
4715 .await;
4716
4717 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4718
4719 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4720 language_registry.add(typescript_lang());
4721 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4722 "TypeScript",
4723 FakeLspAdapter {
4724 capabilities: lsp::ServerCapabilities {
4725 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4726 ..lsp::ServerCapabilities::default()
4727 },
4728 ..FakeLspAdapter::default()
4729 },
4730 );
4731
4732 let buffer = project
4733 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4734 .await
4735 .unwrap();
4736 cx.executor().run_until_parked();
4737
4738 let fake_server = fake_language_servers
4739 .next()
4740 .await
4741 .expect("failed to get the language server");
4742
4743 let mut request_handled =
4744 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4745 Ok(Some(lsp::Hover {
4746 contents: lsp::HoverContents::Array(vec![
4747 lsp::MarkedString::String("".to_string()),
4748 lsp::MarkedString::String(" ".to_string()),
4749 lsp::MarkedString::String("\n\n\n".to_string()),
4750 ]),
4751 range: None,
4752 }))
4753 });
4754
4755 let hover_task = project.update(cx, |project, cx| {
4756 project.hover(&buffer, Point::new(0, 0), cx)
4757 });
4758 let () = request_handled
4759 .next()
4760 .await
4761 .expect("All hover requests should have been triggered");
4762 assert_eq!(
4763 Vec::<String>::new(),
4764 hover_task
4765 .await
4766 .into_iter()
4767 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4768 .sorted()
4769 .collect::<Vec<_>>(),
4770 "Empty hover parts should be ignored"
4771 );
4772}
4773
4774#[gpui::test]
4775async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4776 init_test(cx);
4777
4778 let fs = FakeFs::new(cx.executor());
4779 fs.insert_tree(
4780 "/dir",
4781 json!({
4782 "a.tsx": "a",
4783 }),
4784 )
4785 .await;
4786
4787 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4788
4789 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4790 language_registry.add(tsx_lang());
4791 let language_server_names = [
4792 "TypeScriptServer",
4793 "TailwindServer",
4794 "ESLintServer",
4795 "NoActionsCapabilitiesServer",
4796 ];
4797 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4798 "tsx",
4799 true,
4800 FakeLspAdapter {
4801 name: &language_server_names[0],
4802 capabilities: lsp::ServerCapabilities {
4803 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4804 ..lsp::ServerCapabilities::default()
4805 },
4806 ..FakeLspAdapter::default()
4807 },
4808 );
4809 let _a = language_registry.register_specific_fake_lsp_adapter(
4810 "tsx",
4811 false,
4812 FakeLspAdapter {
4813 name: &language_server_names[1],
4814 capabilities: lsp::ServerCapabilities {
4815 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4816 ..lsp::ServerCapabilities::default()
4817 },
4818 ..FakeLspAdapter::default()
4819 },
4820 );
4821 let _b = language_registry.register_specific_fake_lsp_adapter(
4822 "tsx",
4823 false,
4824 FakeLspAdapter {
4825 name: &language_server_names[2],
4826 capabilities: lsp::ServerCapabilities {
4827 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4828 ..lsp::ServerCapabilities::default()
4829 },
4830 ..FakeLspAdapter::default()
4831 },
4832 );
4833 let _c = language_registry.register_specific_fake_lsp_adapter(
4834 "tsx",
4835 false,
4836 FakeLspAdapter {
4837 name: &language_server_names[3],
4838 capabilities: lsp::ServerCapabilities {
4839 code_action_provider: None,
4840 ..lsp::ServerCapabilities::default()
4841 },
4842 ..FakeLspAdapter::default()
4843 },
4844 );
4845
4846 let buffer = project
4847 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4848 .await
4849 .unwrap();
4850 cx.executor().run_until_parked();
4851
4852 let mut servers_with_actions_requests = HashMap::default();
4853 for i in 0..language_server_names.len() {
4854 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4855 panic!(
4856 "Failed to get language server #{i} with name {}",
4857 &language_server_names[i]
4858 )
4859 });
4860 let new_server_name = new_server.server.name();
4861 assert!(
4862 !servers_with_actions_requests.contains_key(new_server_name),
4863 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4864 );
4865 let new_server_name = new_server_name.to_string();
4866 match new_server_name.as_str() {
4867 "TailwindServer" | "TypeScriptServer" => {
4868 servers_with_actions_requests.insert(
4869 new_server_name.clone(),
4870 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4871 move |_, _| {
4872 let name = new_server_name.clone();
4873 async move {
4874 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4875 lsp::CodeAction {
4876 title: format!("{name} code action"),
4877 ..lsp::CodeAction::default()
4878 },
4879 )]))
4880 }
4881 },
4882 ),
4883 );
4884 }
4885 "ESLintServer" => {
4886 servers_with_actions_requests.insert(
4887 new_server_name,
4888 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4889 |_, _| async move { Ok(None) },
4890 ),
4891 );
4892 }
4893 "NoActionsCapabilitiesServer" => {
4894 let _never_handled = new_server
4895 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4896 panic!(
4897 "Should not call for code actions server with no corresponding capabilities"
4898 )
4899 });
4900 }
4901 unexpected => panic!("Unexpected server name: {unexpected}"),
4902 }
4903 }
4904
4905 let code_actions_task = project.update(cx, |project, cx| {
4906 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4907 });
4908 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4909 |mut code_actions_request| async move {
4910 code_actions_request
4911 .next()
4912 .await
4913 .expect("All code actions requests should have been triggered")
4914 },
4915 ))
4916 .await;
4917 assert_eq!(
4918 vec!["TailwindServer code action", "TypeScriptServer code action"],
4919 code_actions_task
4920 .await
4921 .into_iter()
4922 .map(|code_action| code_action.lsp_action.title)
4923 .sorted()
4924 .collect::<Vec<_>>(),
4925 "Should receive code actions responses from all related servers with hover capabilities"
4926 );
4927}
4928
4929async fn search(
4930 project: &Model<Project>,
4931 query: SearchQuery,
4932 cx: &mut gpui::TestAppContext,
4933) -> Result<HashMap<String, Vec<Range<usize>>>> {
4934 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4935 let mut results = HashMap::default();
4936 while let Some(search_result) = search_rx.next().await {
4937 match search_result {
4938 SearchResult::Buffer { buffer, ranges } => {
4939 results.entry(buffer).or_insert(ranges);
4940 }
4941 SearchResult::LimitReached => {}
4942 }
4943 }
4944 Ok(results
4945 .into_iter()
4946 .map(|(buffer, ranges)| {
4947 buffer.update(cx, |buffer, cx| {
4948 let path = buffer
4949 .file()
4950 .unwrap()
4951 .full_path(cx)
4952 .to_string_lossy()
4953 .to_string();
4954 let ranges = ranges
4955 .into_iter()
4956 .map(|range| range.to_offset(buffer))
4957 .collect::<Vec<_>>();
4958 (path, ranges)
4959 })
4960 })
4961 .collect())
4962}
4963
4964fn init_test(cx: &mut gpui::TestAppContext) {
4965 if std::env::var("RUST_LOG").is_ok() {
4966 env_logger::try_init().ok();
4967 }
4968
4969 cx.update(|cx| {
4970 let settings_store = SettingsStore::test(cx);
4971 cx.set_global(settings_store);
4972 release_channel::init("0.0.0", cx);
4973 language::init(cx);
4974 Project::init_settings(cx);
4975 });
4976}
4977
4978fn json_lang() -> Arc<Language> {
4979 Arc::new(Language::new(
4980 LanguageConfig {
4981 name: "JSON".into(),
4982 matcher: LanguageMatcher {
4983 path_suffixes: vec!["json".to_string()],
4984 ..Default::default()
4985 },
4986 ..Default::default()
4987 },
4988 None,
4989 ))
4990}
4991
4992fn js_lang() -> Arc<Language> {
4993 Arc::new(Language::new(
4994 LanguageConfig {
4995 name: Arc::from("JavaScript"),
4996 matcher: LanguageMatcher {
4997 path_suffixes: vec!["js".to_string()],
4998 ..Default::default()
4999 },
5000 ..Default::default()
5001 },
5002 None,
5003 ))
5004}
5005
5006fn rust_lang() -> Arc<Language> {
5007 Arc::new(Language::new(
5008 LanguageConfig {
5009 name: "Rust".into(),
5010 matcher: LanguageMatcher {
5011 path_suffixes: vec!["rs".to_string()],
5012 ..Default::default()
5013 },
5014 ..Default::default()
5015 },
5016 Some(tree_sitter_rust::language()),
5017 ))
5018}
5019
5020fn typescript_lang() -> Arc<Language> {
5021 Arc::new(Language::new(
5022 LanguageConfig {
5023 name: "TypeScript".into(),
5024 matcher: LanguageMatcher {
5025 path_suffixes: vec!["ts".to_string()],
5026 ..Default::default()
5027 },
5028 ..Default::default()
5029 },
5030 Some(tree_sitter_typescript::language_typescript()),
5031 ))
5032}
5033
5034fn tsx_lang() -> Arc<Language> {
5035 Arc::new(Language::new(
5036 LanguageConfig {
5037 name: "tsx".into(),
5038 matcher: LanguageMatcher {
5039 path_suffixes: vec!["tsx".to_string()],
5040 ..Default::default()
5041 },
5042 ..Default::default()
5043 },
5044 Some(tree_sitter_typescript::language_tsx()),
5045 ))
5046}