1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{TaskContext, TaskSource, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20use worktree::WorktreeModelHandle as _;
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/Users").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees().next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let workree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees().next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: workree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree",
141 };
142 cx.update(|cx| {
143 let tree = worktree.read(cx);
144
145 let settings_a = language_settings(
146 None,
147 Some(
148 &(File::for_entry(
149 tree.entry_for_path("a/a.rs").unwrap().clone(),
150 worktree.clone(),
151 ) as _),
152 ),
153 cx,
154 );
155 let settings_b = language_settings(
156 None,
157 Some(
158 &(File::for_entry(
159 tree.entry_for_path("b/b.rs").unwrap().clone(),
160 worktree.clone(),
161 ) as _),
162 ),
163 cx,
164 );
165
166 assert_eq!(settings_a.tab_size.get(), 8);
167 assert_eq!(settings_b.tab_size.get(), 2);
168
169 let all_tasks = project
170 .update(cx, |project, cx| {
171 project.task_inventory().update(cx, |inventory, cx| {
172 let (mut old, new) = inventory.used_and_current_resolved_tasks(
173 None,
174 Some(workree_id),
175 &task_context,
176 cx,
177 );
178 old.extend(new);
179 old
180 })
181 })
182 .into_iter()
183 .map(|(source_kind, task)| {
184 let resolved = task.resolved.unwrap();
185 (
186 source_kind,
187 task.resolved_label,
188 resolved.args,
189 resolved.env,
190 )
191 })
192 .collect::<Vec<_>>();
193 assert_eq!(
194 all_tasks,
195 vec![
196 (
197 TaskSourceKind::Worktree {
198 id: workree_id,
199 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
200 id_base: "local_tasks_for_worktree",
201 },
202 "cargo check".to_string(),
203 vec!["check".to_string()],
204 HashMap::default(),
205 ),
206 (
207 global_task_source_kind.clone(),
208 "cargo check".to_string(),
209 vec!["check".to_string(), "--all".to_string()],
210 HashMap::default(),
211 ),
212 ]
213 );
214 });
215
216 project.update(cx, |project, cx| {
217 let inventory = project.task_inventory();
218 inventory.update(cx, |inventory, cx| {
219 let (mut old, new) = inventory.used_and_current_resolved_tasks(
220 None,
221 Some(workree_id),
222 &task_context,
223 cx,
224 );
225 old.extend(new);
226 let (_, resolved_task) = old
227 .into_iter()
228 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
229 .expect("should have one global task");
230 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
231 })
232 });
233
234 cx.update(|cx| {
235 let all_tasks = project
236 .update(cx, |project, cx| {
237 project.task_inventory().update(cx, |inventory, cx| {
238 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
239 inventory.add_source(
240 global_task_source_kind.clone(),
241 |cx| {
242 cx.new_model(|_| {
243 let source = TestTaskSource {
244 tasks: TaskTemplates(vec![TaskTemplate {
245 label: "cargo check".to_string(),
246 command: "cargo".to_string(),
247 args: vec![
248 "check".to_string(),
249 "--all".to_string(),
250 "--all-targets".to_string(),
251 ],
252 env: HashMap::from_iter(Some((
253 "RUSTFLAGS".to_string(),
254 "-Zunstable-options".to_string(),
255 ))),
256 ..TaskTemplate::default()
257 }]),
258 };
259 Box::new(source) as Box<_>
260 })
261 },
262 cx,
263 );
264 let (mut old, new) = inventory.used_and_current_resolved_tasks(
265 None,
266 Some(workree_id),
267 &task_context,
268 cx,
269 );
270 old.extend(new);
271 old
272 })
273 })
274 .into_iter()
275 .map(|(source_kind, task)| {
276 let resolved = task.resolved.unwrap();
277 (
278 source_kind,
279 task.resolved_label,
280 resolved.args,
281 resolved.env,
282 )
283 })
284 .collect::<Vec<_>>();
285 assert_eq!(
286 all_tasks,
287 vec![
288 (
289 TaskSourceKind::Worktree {
290 id: workree_id,
291 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
292 id_base: "local_tasks_for_worktree",
293 },
294 "cargo check".to_string(),
295 vec!["check".to_string()],
296 HashMap::default(),
297 ),
298 (
299 TaskSourceKind::Worktree {
300 id: workree_id,
301 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
302 id_base: "local_tasks_for_worktree",
303 },
304 "cargo check".to_string(),
305 vec![
306 "check".to_string(),
307 "--all".to_string(),
308 "--all-targets".to_string()
309 ],
310 HashMap::from_iter(Some((
311 "RUSTFLAGS".to_string(),
312 "-Zunstable-options".to_string()
313 ))),
314 ),
315 ]
316 );
317 });
318}
319
320struct TestTaskSource {
321 tasks: TaskTemplates,
322}
323
324impl TaskSource for TestTaskSource {
325 fn as_any(&mut self) -> &mut dyn std::any::Any {
326 self
327 }
328
329 fn tasks_to_schedule(&mut self, _: &mut ModelContext<Box<dyn TaskSource>>) -> TaskTemplates {
330 self.tasks.clone()
331 }
332}
333
334#[gpui::test]
335async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
336 init_test(cx);
337
338 let fs = FakeFs::new(cx.executor());
339 fs.insert_tree(
340 "/the-root",
341 json!({
342 "test.rs": "const A: i32 = 1;",
343 "test2.rs": "",
344 "Cargo.toml": "a = 1",
345 "package.json": "{\"a\": 1}",
346 }),
347 )
348 .await;
349
350 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
352
353 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
354 "Rust",
355 FakeLspAdapter {
356 name: "the-rust-language-server",
357 capabilities: lsp::ServerCapabilities {
358 completion_provider: Some(lsp::CompletionOptions {
359 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
360 ..Default::default()
361 }),
362 ..Default::default()
363 },
364 ..Default::default()
365 },
366 );
367 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
368 "JSON",
369 FakeLspAdapter {
370 name: "the-json-language-server",
371 capabilities: lsp::ServerCapabilities {
372 completion_provider: Some(lsp::CompletionOptions {
373 trigger_characters: Some(vec![":".to_string()]),
374 ..Default::default()
375 }),
376 ..Default::default()
377 },
378 ..Default::default()
379 },
380 );
381
382 // Open a buffer without an associated language server.
383 let toml_buffer = project
384 .update(cx, |project, cx| {
385 project.open_local_buffer("/the-root/Cargo.toml", cx)
386 })
387 .await
388 .unwrap();
389
390 // Open a buffer with an associated language server before the language for it has been loaded.
391 let rust_buffer = project
392 .update(cx, |project, cx| {
393 project.open_local_buffer("/the-root/test.rs", cx)
394 })
395 .await
396 .unwrap();
397 rust_buffer.update(cx, |buffer, _| {
398 assert_eq!(buffer.language().map(|l| l.name()), None);
399 });
400
401 // Now we add the languages to the project, and ensure they get assigned to all
402 // the relevant open buffers.
403 language_registry.add(json_lang());
404 language_registry.add(rust_lang());
405 cx.executor().run_until_parked();
406 rust_buffer.update(cx, |buffer, _| {
407 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
408 });
409
410 // A server is started up, and it is notified about Rust files.
411 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
412 assert_eq!(
413 fake_rust_server
414 .receive_notification::<lsp::notification::DidOpenTextDocument>()
415 .await
416 .text_document,
417 lsp::TextDocumentItem {
418 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
419 version: 0,
420 text: "const A: i32 = 1;".to_string(),
421 language_id: Default::default()
422 }
423 );
424
425 // The buffer is configured based on the language server's capabilities.
426 rust_buffer.update(cx, |buffer, _| {
427 assert_eq!(
428 buffer.completion_triggers(),
429 &[".".to_string(), "::".to_string()]
430 );
431 });
432 toml_buffer.update(cx, |buffer, _| {
433 assert!(buffer.completion_triggers().is_empty());
434 });
435
436 // Edit a buffer. The changes are reported to the language server.
437 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
438 assert_eq!(
439 fake_rust_server
440 .receive_notification::<lsp::notification::DidChangeTextDocument>()
441 .await
442 .text_document,
443 lsp::VersionedTextDocumentIdentifier::new(
444 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
445 1
446 )
447 );
448
449 // Open a third buffer with a different associated language server.
450 let json_buffer = project
451 .update(cx, |project, cx| {
452 project.open_local_buffer("/the-root/package.json", cx)
453 })
454 .await
455 .unwrap();
456
457 // A json language server is started up and is only notified about the json buffer.
458 let mut fake_json_server = fake_json_servers.next().await.unwrap();
459 assert_eq!(
460 fake_json_server
461 .receive_notification::<lsp::notification::DidOpenTextDocument>()
462 .await
463 .text_document,
464 lsp::TextDocumentItem {
465 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
466 version: 0,
467 text: "{\"a\": 1}".to_string(),
468 language_id: Default::default()
469 }
470 );
471
472 // This buffer is configured based on the second language server's
473 // capabilities.
474 json_buffer.update(cx, |buffer, _| {
475 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
476 });
477
478 // When opening another buffer whose language server is already running,
479 // it is also configured based on the existing language server's capabilities.
480 let rust_buffer2 = project
481 .update(cx, |project, cx| {
482 project.open_local_buffer("/the-root/test2.rs", cx)
483 })
484 .await
485 .unwrap();
486 rust_buffer2.update(cx, |buffer, _| {
487 assert_eq!(
488 buffer.completion_triggers(),
489 &[".".to_string(), "::".to_string()]
490 );
491 });
492
493 // Changes are reported only to servers matching the buffer's language.
494 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
495 rust_buffer2.update(cx, |buffer, cx| {
496 buffer.edit([(0..0, "let x = 1;")], None, cx)
497 });
498 assert_eq!(
499 fake_rust_server
500 .receive_notification::<lsp::notification::DidChangeTextDocument>()
501 .await
502 .text_document,
503 lsp::VersionedTextDocumentIdentifier::new(
504 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
505 1
506 )
507 );
508
509 // Save notifications are reported to all servers.
510 project
511 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
512 .await
513 .unwrap();
514 assert_eq!(
515 fake_rust_server
516 .receive_notification::<lsp::notification::DidSaveTextDocument>()
517 .await
518 .text_document,
519 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
520 );
521 assert_eq!(
522 fake_json_server
523 .receive_notification::<lsp::notification::DidSaveTextDocument>()
524 .await
525 .text_document,
526 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
527 );
528
529 // Renames are reported only to servers matching the buffer's language.
530 fs.rename(
531 Path::new("/the-root/test2.rs"),
532 Path::new("/the-root/test3.rs"),
533 Default::default(),
534 )
535 .await
536 .unwrap();
537 assert_eq!(
538 fake_rust_server
539 .receive_notification::<lsp::notification::DidCloseTextDocument>()
540 .await
541 .text_document,
542 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
543 );
544 assert_eq!(
545 fake_rust_server
546 .receive_notification::<lsp::notification::DidOpenTextDocument>()
547 .await
548 .text_document,
549 lsp::TextDocumentItem {
550 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
551 version: 0,
552 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
553 language_id: Default::default()
554 },
555 );
556
557 rust_buffer2.update(cx, |buffer, cx| {
558 buffer.update_diagnostics(
559 LanguageServerId(0),
560 DiagnosticSet::from_sorted_entries(
561 vec![DiagnosticEntry {
562 diagnostic: Default::default(),
563 range: Anchor::MIN..Anchor::MAX,
564 }],
565 &buffer.snapshot(),
566 ),
567 cx,
568 );
569 assert_eq!(
570 buffer
571 .snapshot()
572 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
573 .count(),
574 1
575 );
576 });
577
578 // When the rename changes the extension of the file, the buffer gets closed on the old
579 // language server and gets opened on the new one.
580 fs.rename(
581 Path::new("/the-root/test3.rs"),
582 Path::new("/the-root/test3.json"),
583 Default::default(),
584 )
585 .await
586 .unwrap();
587 assert_eq!(
588 fake_rust_server
589 .receive_notification::<lsp::notification::DidCloseTextDocument>()
590 .await
591 .text_document,
592 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
593 );
594 assert_eq!(
595 fake_json_server
596 .receive_notification::<lsp::notification::DidOpenTextDocument>()
597 .await
598 .text_document,
599 lsp::TextDocumentItem {
600 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
601 version: 0,
602 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
603 language_id: Default::default()
604 },
605 );
606
607 // We clear the diagnostics, since the language has changed.
608 rust_buffer2.update(cx, |buffer, _| {
609 assert_eq!(
610 buffer
611 .snapshot()
612 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
613 .count(),
614 0
615 );
616 });
617
618 // The renamed file's version resets after changing language server.
619 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
620 assert_eq!(
621 fake_json_server
622 .receive_notification::<lsp::notification::DidChangeTextDocument>()
623 .await
624 .text_document,
625 lsp::VersionedTextDocumentIdentifier::new(
626 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
627 1
628 )
629 );
630
631 // Restart language servers
632 project.update(cx, |project, cx| {
633 project.restart_language_servers_for_buffers(
634 vec![rust_buffer.clone(), json_buffer.clone()],
635 cx,
636 );
637 });
638
639 let mut rust_shutdown_requests = fake_rust_server
640 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
641 let mut json_shutdown_requests = fake_json_server
642 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
643 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
644
645 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
646 let mut fake_json_server = fake_json_servers.next().await.unwrap();
647
648 // Ensure rust document is reopened in new rust language server
649 assert_eq!(
650 fake_rust_server
651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
652 .await
653 .text_document,
654 lsp::TextDocumentItem {
655 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
656 version: 0,
657 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
658 language_id: Default::default()
659 }
660 );
661
662 // Ensure json documents are reopened in new json language server
663 assert_set_eq!(
664 [
665 fake_json_server
666 .receive_notification::<lsp::notification::DidOpenTextDocument>()
667 .await
668 .text_document,
669 fake_json_server
670 .receive_notification::<lsp::notification::DidOpenTextDocument>()
671 .await
672 .text_document,
673 ],
674 [
675 lsp::TextDocumentItem {
676 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
677 version: 0,
678 text: json_buffer.update(cx, |buffer, _| buffer.text()),
679 language_id: Default::default()
680 },
681 lsp::TextDocumentItem {
682 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
683 version: 0,
684 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
685 language_id: Default::default()
686 }
687 ]
688 );
689
690 // Close notifications are reported only to servers matching the buffer's language.
691 cx.update(|_| drop(json_buffer));
692 let close_message = lsp::DidCloseTextDocumentParams {
693 text_document: lsp::TextDocumentIdentifier::new(
694 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
695 ),
696 };
697 assert_eq!(
698 fake_json_server
699 .receive_notification::<lsp::notification::DidCloseTextDocument>()
700 .await,
701 close_message,
702 );
703}
704
705#[gpui::test]
706async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
707 init_test(cx);
708
709 let fs = FakeFs::new(cx.executor());
710 fs.insert_tree(
711 "/the-root",
712 json!({
713 ".gitignore": "target\n",
714 "src": {
715 "a.rs": "",
716 "b.rs": "",
717 },
718 "target": {
719 "x": {
720 "out": {
721 "x.rs": ""
722 }
723 },
724 "y": {
725 "out": {
726 "y.rs": "",
727 }
728 },
729 "z": {
730 "out": {
731 "z.rs": ""
732 }
733 }
734 }
735 }),
736 )
737 .await;
738
739 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
740 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
741 language_registry.add(rust_lang());
742 let mut fake_servers = language_registry.register_fake_lsp_adapter(
743 "Rust",
744 FakeLspAdapter {
745 name: "the-language-server",
746 ..Default::default()
747 },
748 );
749
750 cx.executor().run_until_parked();
751
752 // Start the language server by opening a buffer with a compatible file extension.
753 let _buffer = project
754 .update(cx, |project, cx| {
755 project.open_local_buffer("/the-root/src/a.rs", cx)
756 })
757 .await
758 .unwrap();
759
760 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
761 project.update(cx, |project, cx| {
762 let worktree = project.worktrees().next().unwrap();
763 assert_eq!(
764 worktree
765 .read(cx)
766 .snapshot()
767 .entries(true)
768 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
769 .collect::<Vec<_>>(),
770 &[
771 (Path::new(""), false),
772 (Path::new(".gitignore"), false),
773 (Path::new("src"), false),
774 (Path::new("src/a.rs"), false),
775 (Path::new("src/b.rs"), false),
776 (Path::new("target"), true),
777 ]
778 );
779 });
780
781 let prev_read_dir_count = fs.read_dir_call_count();
782
783 // Keep track of the FS events reported to the language server.
784 let fake_server = fake_servers.next().await.unwrap();
785 let file_changes = Arc::new(Mutex::new(Vec::new()));
786 fake_server
787 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
788 registrations: vec![lsp::Registration {
789 id: Default::default(),
790 method: "workspace/didChangeWatchedFiles".to_string(),
791 register_options: serde_json::to_value(
792 lsp::DidChangeWatchedFilesRegistrationOptions {
793 watchers: vec![
794 lsp::FileSystemWatcher {
795 glob_pattern: lsp::GlobPattern::String(
796 "/the-root/Cargo.toml".to_string(),
797 ),
798 kind: None,
799 },
800 lsp::FileSystemWatcher {
801 glob_pattern: lsp::GlobPattern::String(
802 "/the-root/src/*.{rs,c}".to_string(),
803 ),
804 kind: None,
805 },
806 lsp::FileSystemWatcher {
807 glob_pattern: lsp::GlobPattern::String(
808 "/the-root/target/y/**/*.rs".to_string(),
809 ),
810 kind: None,
811 },
812 ],
813 },
814 )
815 .ok(),
816 }],
817 })
818 .await
819 .unwrap();
820 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
821 let file_changes = file_changes.clone();
822 move |params, _| {
823 let mut file_changes = file_changes.lock();
824 file_changes.extend(params.changes);
825 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
826 }
827 });
828
829 cx.executor().run_until_parked();
830 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
831 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
832
833 // Now the language server has asked us to watch an ignored directory path,
834 // so we recursively load it.
835 project.update(cx, |project, cx| {
836 let worktree = project.worktrees().next().unwrap();
837 assert_eq!(
838 worktree
839 .read(cx)
840 .snapshot()
841 .entries(true)
842 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
843 .collect::<Vec<_>>(),
844 &[
845 (Path::new(""), false),
846 (Path::new(".gitignore"), false),
847 (Path::new("src"), false),
848 (Path::new("src/a.rs"), false),
849 (Path::new("src/b.rs"), false),
850 (Path::new("target"), true),
851 (Path::new("target/x"), true),
852 (Path::new("target/y"), true),
853 (Path::new("target/y/out"), true),
854 (Path::new("target/y/out/y.rs"), true),
855 (Path::new("target/z"), true),
856 ]
857 );
858 });
859
860 // Perform some file system mutations, two of which match the watched patterns,
861 // and one of which does not.
862 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
863 .await
864 .unwrap();
865 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
866 .await
867 .unwrap();
868 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
869 .await
870 .unwrap();
871 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
872 .await
873 .unwrap();
874 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
875 .await
876 .unwrap();
877
878 // The language server receives events for the FS mutations that match its watch patterns.
879 cx.executor().run_until_parked();
880 assert_eq!(
881 &*file_changes.lock(),
882 &[
883 lsp::FileEvent {
884 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
885 typ: lsp::FileChangeType::DELETED,
886 },
887 lsp::FileEvent {
888 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
889 typ: lsp::FileChangeType::CREATED,
890 },
891 lsp::FileEvent {
892 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
893 typ: lsp::FileChangeType::CREATED,
894 },
895 ]
896 );
897}
898
899#[gpui::test]
900async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
901 init_test(cx);
902
903 let fs = FakeFs::new(cx.executor());
904 fs.insert_tree(
905 "/dir",
906 json!({
907 "a.rs": "let a = 1;",
908 "b.rs": "let b = 2;"
909 }),
910 )
911 .await;
912
913 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
914
915 let buffer_a = project
916 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
917 .await
918 .unwrap();
919 let buffer_b = project
920 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
921 .await
922 .unwrap();
923
924 project.update(cx, |project, cx| {
925 project
926 .update_diagnostics(
927 LanguageServerId(0),
928 lsp::PublishDiagnosticsParams {
929 uri: Url::from_file_path("/dir/a.rs").unwrap(),
930 version: None,
931 diagnostics: vec![lsp::Diagnostic {
932 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
933 severity: Some(lsp::DiagnosticSeverity::ERROR),
934 message: "error 1".to_string(),
935 ..Default::default()
936 }],
937 },
938 &[],
939 cx,
940 )
941 .unwrap();
942 project
943 .update_diagnostics(
944 LanguageServerId(0),
945 lsp::PublishDiagnosticsParams {
946 uri: Url::from_file_path("/dir/b.rs").unwrap(),
947 version: None,
948 diagnostics: vec![lsp::Diagnostic {
949 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
950 severity: Some(lsp::DiagnosticSeverity::WARNING),
951 message: "error 2".to_string(),
952 ..Default::default()
953 }],
954 },
955 &[],
956 cx,
957 )
958 .unwrap();
959 });
960
961 buffer_a.update(cx, |buffer, _| {
962 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
963 assert_eq!(
964 chunks
965 .iter()
966 .map(|(s, d)| (s.as_str(), *d))
967 .collect::<Vec<_>>(),
968 &[
969 ("let ", None),
970 ("a", Some(DiagnosticSeverity::ERROR)),
971 (" = 1;", None),
972 ]
973 );
974 });
975 buffer_b.update(cx, |buffer, _| {
976 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
977 assert_eq!(
978 chunks
979 .iter()
980 .map(|(s, d)| (s.as_str(), *d))
981 .collect::<Vec<_>>(),
982 &[
983 ("let ", None),
984 ("b", Some(DiagnosticSeverity::WARNING)),
985 (" = 2;", None),
986 ]
987 );
988 });
989}
990
991#[gpui::test]
992async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
993 init_test(cx);
994
995 let fs = FakeFs::new(cx.executor());
996 fs.insert_tree(
997 "/root",
998 json!({
999 "dir": {
1000 ".git": {
1001 "HEAD": "ref: refs/heads/main",
1002 },
1003 ".gitignore": "b.rs",
1004 "a.rs": "let a = 1;",
1005 "b.rs": "let b = 2;",
1006 },
1007 "other.rs": "let b = c;"
1008 }),
1009 )
1010 .await;
1011
1012 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1013 let (worktree, _) = project
1014 .update(cx, |project, cx| {
1015 project.find_or_create_local_worktree("/root/dir", true, cx)
1016 })
1017 .await
1018 .unwrap();
1019 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1020
1021 let (worktree, _) = project
1022 .update(cx, |project, cx| {
1023 project.find_or_create_local_worktree("/root/other.rs", false, cx)
1024 })
1025 .await
1026 .unwrap();
1027 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1028
1029 let server_id = LanguageServerId(0);
1030 project.update(cx, |project, cx| {
1031 project
1032 .update_diagnostics(
1033 server_id,
1034 lsp::PublishDiagnosticsParams {
1035 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1036 version: None,
1037 diagnostics: vec![lsp::Diagnostic {
1038 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1039 severity: Some(lsp::DiagnosticSeverity::ERROR),
1040 message: "unused variable 'b'".to_string(),
1041 ..Default::default()
1042 }],
1043 },
1044 &[],
1045 cx,
1046 )
1047 .unwrap();
1048 project
1049 .update_diagnostics(
1050 server_id,
1051 lsp::PublishDiagnosticsParams {
1052 uri: Url::from_file_path("/root/other.rs").unwrap(),
1053 version: None,
1054 diagnostics: vec![lsp::Diagnostic {
1055 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1056 severity: Some(lsp::DiagnosticSeverity::ERROR),
1057 message: "unknown variable 'c'".to_string(),
1058 ..Default::default()
1059 }],
1060 },
1061 &[],
1062 cx,
1063 )
1064 .unwrap();
1065 });
1066
1067 let main_ignored_buffer = project
1068 .update(cx, |project, cx| {
1069 project.open_buffer((main_worktree_id, "b.rs"), cx)
1070 })
1071 .await
1072 .unwrap();
1073 main_ignored_buffer.update(cx, |buffer, _| {
1074 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1075 assert_eq!(
1076 chunks
1077 .iter()
1078 .map(|(s, d)| (s.as_str(), *d))
1079 .collect::<Vec<_>>(),
1080 &[
1081 ("let ", None),
1082 ("b", Some(DiagnosticSeverity::ERROR)),
1083 (" = 2;", None),
1084 ],
1085 "Gigitnored buffers should still get in-buffer diagnostics",
1086 );
1087 });
1088 let other_buffer = project
1089 .update(cx, |project, cx| {
1090 project.open_buffer((other_worktree_id, ""), cx)
1091 })
1092 .await
1093 .unwrap();
1094 other_buffer.update(cx, |buffer, _| {
1095 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1096 assert_eq!(
1097 chunks
1098 .iter()
1099 .map(|(s, d)| (s.as_str(), *d))
1100 .collect::<Vec<_>>(),
1101 &[
1102 ("let b = ", None),
1103 ("c", Some(DiagnosticSeverity::ERROR)),
1104 (";", None),
1105 ],
1106 "Buffers from hidden projects should still get in-buffer diagnostics"
1107 );
1108 });
1109
1110 project.update(cx, |project, cx| {
1111 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1112 assert_eq!(
1113 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1114 vec![(
1115 ProjectPath {
1116 worktree_id: main_worktree_id,
1117 path: Arc::from(Path::new("b.rs")),
1118 },
1119 server_id,
1120 DiagnosticSummary {
1121 error_count: 1,
1122 warning_count: 0,
1123 }
1124 )]
1125 );
1126 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1127 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1128 });
1129}
1130
1131#[gpui::test]
1132async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1133 init_test(cx);
1134
1135 let progress_token = "the-progress-token";
1136
1137 let fs = FakeFs::new(cx.executor());
1138 fs.insert_tree(
1139 "/dir",
1140 json!({
1141 "a.rs": "fn a() { A }",
1142 "b.rs": "const y: i32 = 1",
1143 }),
1144 )
1145 .await;
1146
1147 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1148 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1149
1150 language_registry.add(rust_lang());
1151 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1152 "Rust",
1153 FakeLspAdapter {
1154 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1155 disk_based_diagnostics_sources: vec!["disk".into()],
1156 ..Default::default()
1157 },
1158 );
1159
1160 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1161
1162 // Cause worktree to start the fake language server
1163 let _buffer = project
1164 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1165 .await
1166 .unwrap();
1167
1168 let mut events = cx.events(&project);
1169
1170 let fake_server = fake_servers.next().await.unwrap();
1171 assert_eq!(
1172 events.next().await.unwrap(),
1173 Event::LanguageServerAdded(LanguageServerId(0)),
1174 );
1175
1176 fake_server
1177 .start_progress(format!("{}/0", progress_token))
1178 .await;
1179 assert_eq!(
1180 events.next().await.unwrap(),
1181 Event::DiskBasedDiagnosticsStarted {
1182 language_server_id: LanguageServerId(0),
1183 }
1184 );
1185
1186 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1187 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1188 version: None,
1189 diagnostics: vec![lsp::Diagnostic {
1190 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1191 severity: Some(lsp::DiagnosticSeverity::ERROR),
1192 message: "undefined variable 'A'".to_string(),
1193 ..Default::default()
1194 }],
1195 });
1196 assert_eq!(
1197 events.next().await.unwrap(),
1198 Event::DiagnosticsUpdated {
1199 language_server_id: LanguageServerId(0),
1200 path: (worktree_id, Path::new("a.rs")).into()
1201 }
1202 );
1203
1204 fake_server.end_progress(format!("{}/0", progress_token));
1205 assert_eq!(
1206 events.next().await.unwrap(),
1207 Event::DiskBasedDiagnosticsFinished {
1208 language_server_id: LanguageServerId(0)
1209 }
1210 );
1211
1212 let buffer = project
1213 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1214 .await
1215 .unwrap();
1216
1217 buffer.update(cx, |buffer, _| {
1218 let snapshot = buffer.snapshot();
1219 let diagnostics = snapshot
1220 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1221 .collect::<Vec<_>>();
1222 assert_eq!(
1223 diagnostics,
1224 &[DiagnosticEntry {
1225 range: Point::new(0, 9)..Point::new(0, 10),
1226 diagnostic: Diagnostic {
1227 severity: lsp::DiagnosticSeverity::ERROR,
1228 message: "undefined variable 'A'".to_string(),
1229 group_id: 0,
1230 is_primary: true,
1231 ..Default::default()
1232 }
1233 }]
1234 )
1235 });
1236
1237 // Ensure publishing empty diagnostics twice only results in one update event.
1238 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1239 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1240 version: None,
1241 diagnostics: Default::default(),
1242 });
1243 assert_eq!(
1244 events.next().await.unwrap(),
1245 Event::DiagnosticsUpdated {
1246 language_server_id: LanguageServerId(0),
1247 path: (worktree_id, Path::new("a.rs")).into()
1248 }
1249 );
1250
1251 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1252 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1253 version: None,
1254 diagnostics: Default::default(),
1255 });
1256 cx.executor().run_until_parked();
1257 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1258}
1259
1260#[gpui::test]
1261async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1262 init_test(cx);
1263
1264 let progress_token = "the-progress-token";
1265
1266 let fs = FakeFs::new(cx.executor());
1267 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1268
1269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1270
1271 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1272 language_registry.add(rust_lang());
1273 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1274 "Rust",
1275 FakeLspAdapter {
1276 name: "the-language-server",
1277 disk_based_diagnostics_sources: vec!["disk".into()],
1278 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1279 ..Default::default()
1280 },
1281 );
1282
1283 let buffer = project
1284 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1285 .await
1286 .unwrap();
1287
1288 // Simulate diagnostics starting to update.
1289 let fake_server = fake_servers.next().await.unwrap();
1290 fake_server.start_progress(progress_token).await;
1291
1292 // Restart the server before the diagnostics finish updating.
1293 project.update(cx, |project, cx| {
1294 project.restart_language_servers_for_buffers([buffer], cx);
1295 });
1296 let mut events = cx.events(&project);
1297
1298 // Simulate the newly started server sending more diagnostics.
1299 let fake_server = fake_servers.next().await.unwrap();
1300 assert_eq!(
1301 events.next().await.unwrap(),
1302 Event::LanguageServerAdded(LanguageServerId(1))
1303 );
1304 fake_server.start_progress(progress_token).await;
1305 assert_eq!(
1306 events.next().await.unwrap(),
1307 Event::DiskBasedDiagnosticsStarted {
1308 language_server_id: LanguageServerId(1)
1309 }
1310 );
1311 project.update(cx, |project, _| {
1312 assert_eq!(
1313 project
1314 .language_servers_running_disk_based_diagnostics()
1315 .collect::<Vec<_>>(),
1316 [LanguageServerId(1)]
1317 );
1318 });
1319
1320 // All diagnostics are considered done, despite the old server's diagnostic
1321 // task never completing.
1322 fake_server.end_progress(progress_token);
1323 assert_eq!(
1324 events.next().await.unwrap(),
1325 Event::DiskBasedDiagnosticsFinished {
1326 language_server_id: LanguageServerId(1)
1327 }
1328 );
1329 project.update(cx, |project, _| {
1330 assert_eq!(
1331 project
1332 .language_servers_running_disk_based_diagnostics()
1333 .collect::<Vec<_>>(),
1334 [LanguageServerId(0); 0]
1335 );
1336 });
1337}
1338
1339#[gpui::test]
1340async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1341 init_test(cx);
1342
1343 let fs = FakeFs::new(cx.executor());
1344 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1345
1346 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1347
1348 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1349 language_registry.add(rust_lang());
1350 let mut fake_servers =
1351 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1352
1353 let buffer = project
1354 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1355 .await
1356 .unwrap();
1357
1358 // Publish diagnostics
1359 let fake_server = fake_servers.next().await.unwrap();
1360 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1361 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1362 version: None,
1363 diagnostics: vec![lsp::Diagnostic {
1364 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1365 severity: Some(lsp::DiagnosticSeverity::ERROR),
1366 message: "the message".to_string(),
1367 ..Default::default()
1368 }],
1369 });
1370
1371 cx.executor().run_until_parked();
1372 buffer.update(cx, |buffer, _| {
1373 assert_eq!(
1374 buffer
1375 .snapshot()
1376 .diagnostics_in_range::<_, usize>(0..1, false)
1377 .map(|entry| entry.diagnostic.message.clone())
1378 .collect::<Vec<_>>(),
1379 ["the message".to_string()]
1380 );
1381 });
1382 project.update(cx, |project, cx| {
1383 assert_eq!(
1384 project.diagnostic_summary(false, cx),
1385 DiagnosticSummary {
1386 error_count: 1,
1387 warning_count: 0,
1388 }
1389 );
1390 });
1391
1392 project.update(cx, |project, cx| {
1393 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1394 });
1395
1396 // The diagnostics are cleared.
1397 cx.executor().run_until_parked();
1398 buffer.update(cx, |buffer, _| {
1399 assert_eq!(
1400 buffer
1401 .snapshot()
1402 .diagnostics_in_range::<_, usize>(0..1, false)
1403 .map(|entry| entry.diagnostic.message.clone())
1404 .collect::<Vec<_>>(),
1405 Vec::<String>::new(),
1406 );
1407 });
1408 project.update(cx, |project, cx| {
1409 assert_eq!(
1410 project.diagnostic_summary(false, cx),
1411 DiagnosticSummary {
1412 error_count: 0,
1413 warning_count: 0,
1414 }
1415 );
1416 });
1417}
1418
1419#[gpui::test]
1420async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1421 init_test(cx);
1422
1423 let fs = FakeFs::new(cx.executor());
1424 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1425
1426 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1427 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1428
1429 language_registry.add(rust_lang());
1430 let mut fake_servers =
1431 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1432
1433 let buffer = project
1434 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1435 .await
1436 .unwrap();
1437
1438 // Before restarting the server, report diagnostics with an unknown buffer version.
1439 let fake_server = fake_servers.next().await.unwrap();
1440 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1441 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1442 version: Some(10000),
1443 diagnostics: Vec::new(),
1444 });
1445 cx.executor().run_until_parked();
1446
1447 project.update(cx, |project, cx| {
1448 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1449 });
1450 let mut fake_server = fake_servers.next().await.unwrap();
1451 let notification = fake_server
1452 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1453 .await
1454 .text_document;
1455 assert_eq!(notification.version, 0);
1456}
1457
1458#[gpui::test]
1459async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1460 init_test(cx);
1461
1462 let fs = FakeFs::new(cx.executor());
1463 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1464 .await;
1465
1466 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1468
1469 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1470 "Rust",
1471 FakeLspAdapter {
1472 name: "rust-lsp",
1473 ..Default::default()
1474 },
1475 );
1476 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1477 "JavaScript",
1478 FakeLspAdapter {
1479 name: "js-lsp",
1480 ..Default::default()
1481 },
1482 );
1483 language_registry.add(rust_lang());
1484 language_registry.add(js_lang());
1485
1486 let _rs_buffer = project
1487 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1488 .await
1489 .unwrap();
1490 let _js_buffer = project
1491 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1492 .await
1493 .unwrap();
1494
1495 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1496 assert_eq!(
1497 fake_rust_server_1
1498 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1499 .await
1500 .text_document
1501 .uri
1502 .as_str(),
1503 "file:///dir/a.rs"
1504 );
1505
1506 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1507 assert_eq!(
1508 fake_js_server
1509 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1510 .await
1511 .text_document
1512 .uri
1513 .as_str(),
1514 "file:///dir/b.js"
1515 );
1516
1517 // Disable Rust language server, ensuring only that server gets stopped.
1518 cx.update(|cx| {
1519 cx.update_global(|settings: &mut SettingsStore, cx| {
1520 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1521 settings.languages.insert(
1522 Arc::from("Rust"),
1523 LanguageSettingsContent {
1524 enable_language_server: Some(false),
1525 ..Default::default()
1526 },
1527 );
1528 });
1529 })
1530 });
1531 fake_rust_server_1
1532 .receive_notification::<lsp::notification::Exit>()
1533 .await;
1534
1535 // Enable Rust and disable JavaScript language servers, ensuring that the
1536 // former gets started again and that the latter stops.
1537 cx.update(|cx| {
1538 cx.update_global(|settings: &mut SettingsStore, cx| {
1539 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1540 settings.languages.insert(
1541 Arc::from("Rust"),
1542 LanguageSettingsContent {
1543 enable_language_server: Some(true),
1544 ..Default::default()
1545 },
1546 );
1547 settings.languages.insert(
1548 Arc::from("JavaScript"),
1549 LanguageSettingsContent {
1550 enable_language_server: Some(false),
1551 ..Default::default()
1552 },
1553 );
1554 });
1555 })
1556 });
1557 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1558 assert_eq!(
1559 fake_rust_server_2
1560 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1561 .await
1562 .text_document
1563 .uri
1564 .as_str(),
1565 "file:///dir/a.rs"
1566 );
1567 fake_js_server
1568 .receive_notification::<lsp::notification::Exit>()
1569 .await;
1570}
1571
1572#[gpui::test(iterations = 3)]
1573async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1574 init_test(cx);
1575
1576 let text = "
1577 fn a() { A }
1578 fn b() { BB }
1579 fn c() { CCC }
1580 "
1581 .unindent();
1582
1583 let fs = FakeFs::new(cx.executor());
1584 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1585
1586 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1587 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1588
1589 language_registry.add(rust_lang());
1590 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1591 "Rust",
1592 FakeLspAdapter {
1593 disk_based_diagnostics_sources: vec!["disk".into()],
1594 ..Default::default()
1595 },
1596 );
1597
1598 let buffer = project
1599 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1600 .await
1601 .unwrap();
1602
1603 let mut fake_server = fake_servers.next().await.unwrap();
1604 let open_notification = fake_server
1605 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1606 .await;
1607
1608 // Edit the buffer, moving the content down
1609 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1610 let change_notification_1 = fake_server
1611 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1612 .await;
1613 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1614
1615 // Report some diagnostics for the initial version of the buffer
1616 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1617 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1618 version: Some(open_notification.text_document.version),
1619 diagnostics: vec![
1620 lsp::Diagnostic {
1621 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1622 severity: Some(DiagnosticSeverity::ERROR),
1623 message: "undefined variable 'A'".to_string(),
1624 source: Some("disk".to_string()),
1625 ..Default::default()
1626 },
1627 lsp::Diagnostic {
1628 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1629 severity: Some(DiagnosticSeverity::ERROR),
1630 message: "undefined variable 'BB'".to_string(),
1631 source: Some("disk".to_string()),
1632 ..Default::default()
1633 },
1634 lsp::Diagnostic {
1635 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1636 severity: Some(DiagnosticSeverity::ERROR),
1637 source: Some("disk".to_string()),
1638 message: "undefined variable 'CCC'".to_string(),
1639 ..Default::default()
1640 },
1641 ],
1642 });
1643
1644 // The diagnostics have moved down since they were created.
1645 cx.executor().run_until_parked();
1646 buffer.update(cx, |buffer, _| {
1647 assert_eq!(
1648 buffer
1649 .snapshot()
1650 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1651 .collect::<Vec<_>>(),
1652 &[
1653 DiagnosticEntry {
1654 range: Point::new(3, 9)..Point::new(3, 11),
1655 diagnostic: Diagnostic {
1656 source: Some("disk".into()),
1657 severity: DiagnosticSeverity::ERROR,
1658 message: "undefined variable 'BB'".to_string(),
1659 is_disk_based: true,
1660 group_id: 1,
1661 is_primary: true,
1662 ..Default::default()
1663 },
1664 },
1665 DiagnosticEntry {
1666 range: Point::new(4, 9)..Point::new(4, 12),
1667 diagnostic: Diagnostic {
1668 source: Some("disk".into()),
1669 severity: DiagnosticSeverity::ERROR,
1670 message: "undefined variable 'CCC'".to_string(),
1671 is_disk_based: true,
1672 group_id: 2,
1673 is_primary: true,
1674 ..Default::default()
1675 }
1676 }
1677 ]
1678 );
1679 assert_eq!(
1680 chunks_with_diagnostics(buffer, 0..buffer.len()),
1681 [
1682 ("\n\nfn a() { ".to_string(), None),
1683 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1684 (" }\nfn b() { ".to_string(), None),
1685 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1686 (" }\nfn c() { ".to_string(), None),
1687 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1688 (" }\n".to_string(), None),
1689 ]
1690 );
1691 assert_eq!(
1692 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1693 [
1694 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1695 (" }\nfn c() { ".to_string(), None),
1696 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1697 ]
1698 );
1699 });
1700
1701 // Ensure overlapping diagnostics are highlighted correctly.
1702 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1703 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1704 version: Some(open_notification.text_document.version),
1705 diagnostics: vec![
1706 lsp::Diagnostic {
1707 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1708 severity: Some(DiagnosticSeverity::ERROR),
1709 message: "undefined variable 'A'".to_string(),
1710 source: Some("disk".to_string()),
1711 ..Default::default()
1712 },
1713 lsp::Diagnostic {
1714 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1715 severity: Some(DiagnosticSeverity::WARNING),
1716 message: "unreachable statement".to_string(),
1717 source: Some("disk".to_string()),
1718 ..Default::default()
1719 },
1720 ],
1721 });
1722
1723 cx.executor().run_until_parked();
1724 buffer.update(cx, |buffer, _| {
1725 assert_eq!(
1726 buffer
1727 .snapshot()
1728 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1729 .collect::<Vec<_>>(),
1730 &[
1731 DiagnosticEntry {
1732 range: Point::new(2, 9)..Point::new(2, 12),
1733 diagnostic: Diagnostic {
1734 source: Some("disk".into()),
1735 severity: DiagnosticSeverity::WARNING,
1736 message: "unreachable statement".to_string(),
1737 is_disk_based: true,
1738 group_id: 4,
1739 is_primary: true,
1740 ..Default::default()
1741 }
1742 },
1743 DiagnosticEntry {
1744 range: Point::new(2, 9)..Point::new(2, 10),
1745 diagnostic: Diagnostic {
1746 source: Some("disk".into()),
1747 severity: DiagnosticSeverity::ERROR,
1748 message: "undefined variable 'A'".to_string(),
1749 is_disk_based: true,
1750 group_id: 3,
1751 is_primary: true,
1752 ..Default::default()
1753 },
1754 }
1755 ]
1756 );
1757 assert_eq!(
1758 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1759 [
1760 ("fn a() { ".to_string(), None),
1761 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1762 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1763 ("\n".to_string(), None),
1764 ]
1765 );
1766 assert_eq!(
1767 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1768 [
1769 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1770 ("\n".to_string(), None),
1771 ]
1772 );
1773 });
1774
1775 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1776 // changes since the last save.
1777 buffer.update(cx, |buffer, cx| {
1778 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1779 buffer.edit(
1780 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1781 None,
1782 cx,
1783 );
1784 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1785 });
1786 let change_notification_2 = fake_server
1787 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1788 .await;
1789 assert!(
1790 change_notification_2.text_document.version > change_notification_1.text_document.version
1791 );
1792
1793 // Handle out-of-order diagnostics
1794 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1795 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1796 version: Some(change_notification_2.text_document.version),
1797 diagnostics: vec![
1798 lsp::Diagnostic {
1799 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1800 severity: Some(DiagnosticSeverity::ERROR),
1801 message: "undefined variable 'BB'".to_string(),
1802 source: Some("disk".to_string()),
1803 ..Default::default()
1804 },
1805 lsp::Diagnostic {
1806 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1807 severity: Some(DiagnosticSeverity::WARNING),
1808 message: "undefined variable 'A'".to_string(),
1809 source: Some("disk".to_string()),
1810 ..Default::default()
1811 },
1812 ],
1813 });
1814
1815 cx.executor().run_until_parked();
1816 buffer.update(cx, |buffer, _| {
1817 assert_eq!(
1818 buffer
1819 .snapshot()
1820 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1821 .collect::<Vec<_>>(),
1822 &[
1823 DiagnosticEntry {
1824 range: Point::new(2, 21)..Point::new(2, 22),
1825 diagnostic: Diagnostic {
1826 source: Some("disk".into()),
1827 severity: DiagnosticSeverity::WARNING,
1828 message: "undefined variable 'A'".to_string(),
1829 is_disk_based: true,
1830 group_id: 6,
1831 is_primary: true,
1832 ..Default::default()
1833 }
1834 },
1835 DiagnosticEntry {
1836 range: Point::new(3, 9)..Point::new(3, 14),
1837 diagnostic: Diagnostic {
1838 source: Some("disk".into()),
1839 severity: DiagnosticSeverity::ERROR,
1840 message: "undefined variable 'BB'".to_string(),
1841 is_disk_based: true,
1842 group_id: 5,
1843 is_primary: true,
1844 ..Default::default()
1845 },
1846 }
1847 ]
1848 );
1849 });
1850}
1851
1852#[gpui::test]
1853async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1854 init_test(cx);
1855
1856 let text = concat!(
1857 "let one = ;\n", //
1858 "let two = \n",
1859 "let three = 3;\n",
1860 );
1861
1862 let fs = FakeFs::new(cx.executor());
1863 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1864
1865 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1866 let buffer = project
1867 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1868 .await
1869 .unwrap();
1870
1871 project.update(cx, |project, cx| {
1872 project
1873 .update_buffer_diagnostics(
1874 &buffer,
1875 LanguageServerId(0),
1876 None,
1877 vec![
1878 DiagnosticEntry {
1879 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1880 diagnostic: Diagnostic {
1881 severity: DiagnosticSeverity::ERROR,
1882 message: "syntax error 1".to_string(),
1883 ..Default::default()
1884 },
1885 },
1886 DiagnosticEntry {
1887 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1888 diagnostic: Diagnostic {
1889 severity: DiagnosticSeverity::ERROR,
1890 message: "syntax error 2".to_string(),
1891 ..Default::default()
1892 },
1893 },
1894 ],
1895 cx,
1896 )
1897 .unwrap();
1898 });
1899
1900 // An empty range is extended forward to include the following character.
1901 // At the end of a line, an empty range is extended backward to include
1902 // the preceding character.
1903 buffer.update(cx, |buffer, _| {
1904 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1905 assert_eq!(
1906 chunks
1907 .iter()
1908 .map(|(s, d)| (s.as_str(), *d))
1909 .collect::<Vec<_>>(),
1910 &[
1911 ("let one = ", None),
1912 (";", Some(DiagnosticSeverity::ERROR)),
1913 ("\nlet two =", None),
1914 (" ", Some(DiagnosticSeverity::ERROR)),
1915 ("\nlet three = 3;\n", None)
1916 ]
1917 );
1918 });
1919}
1920
1921#[gpui::test]
1922async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1923 init_test(cx);
1924
1925 let fs = FakeFs::new(cx.executor());
1926 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1927 .await;
1928
1929 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1930
1931 project.update(cx, |project, cx| {
1932 project
1933 .update_diagnostic_entries(
1934 LanguageServerId(0),
1935 Path::new("/dir/a.rs").to_owned(),
1936 None,
1937 vec![DiagnosticEntry {
1938 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1939 diagnostic: Diagnostic {
1940 severity: DiagnosticSeverity::ERROR,
1941 is_primary: true,
1942 message: "syntax error a1".to_string(),
1943 ..Default::default()
1944 },
1945 }],
1946 cx,
1947 )
1948 .unwrap();
1949 project
1950 .update_diagnostic_entries(
1951 LanguageServerId(1),
1952 Path::new("/dir/a.rs").to_owned(),
1953 None,
1954 vec![DiagnosticEntry {
1955 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1956 diagnostic: Diagnostic {
1957 severity: DiagnosticSeverity::ERROR,
1958 is_primary: true,
1959 message: "syntax error b1".to_string(),
1960 ..Default::default()
1961 },
1962 }],
1963 cx,
1964 )
1965 .unwrap();
1966
1967 assert_eq!(
1968 project.diagnostic_summary(false, cx),
1969 DiagnosticSummary {
1970 error_count: 2,
1971 warning_count: 0,
1972 }
1973 );
1974 });
1975}
1976
1977#[gpui::test]
1978async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1979 init_test(cx);
1980
1981 let text = "
1982 fn a() {
1983 f1();
1984 }
1985 fn b() {
1986 f2();
1987 }
1988 fn c() {
1989 f3();
1990 }
1991 "
1992 .unindent();
1993
1994 let fs = FakeFs::new(cx.executor());
1995 fs.insert_tree(
1996 "/dir",
1997 json!({
1998 "a.rs": text.clone(),
1999 }),
2000 )
2001 .await;
2002
2003 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2004
2005 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2006 language_registry.add(rust_lang());
2007 let mut fake_servers =
2008 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2009
2010 let buffer = project
2011 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2012 .await
2013 .unwrap();
2014
2015 let mut fake_server = fake_servers.next().await.unwrap();
2016 let lsp_document_version = fake_server
2017 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2018 .await
2019 .text_document
2020 .version;
2021
2022 // Simulate editing the buffer after the language server computes some edits.
2023 buffer.update(cx, |buffer, cx| {
2024 buffer.edit(
2025 [(
2026 Point::new(0, 0)..Point::new(0, 0),
2027 "// above first function\n",
2028 )],
2029 None,
2030 cx,
2031 );
2032 buffer.edit(
2033 [(
2034 Point::new(2, 0)..Point::new(2, 0),
2035 " // inside first function\n",
2036 )],
2037 None,
2038 cx,
2039 );
2040 buffer.edit(
2041 [(
2042 Point::new(6, 4)..Point::new(6, 4),
2043 "// inside second function ",
2044 )],
2045 None,
2046 cx,
2047 );
2048
2049 assert_eq!(
2050 buffer.text(),
2051 "
2052 // above first function
2053 fn a() {
2054 // inside first function
2055 f1();
2056 }
2057 fn b() {
2058 // inside second function f2();
2059 }
2060 fn c() {
2061 f3();
2062 }
2063 "
2064 .unindent()
2065 );
2066 });
2067
2068 let edits = project
2069 .update(cx, |project, cx| {
2070 project.edits_from_lsp(
2071 &buffer,
2072 vec![
2073 // replace body of first function
2074 lsp::TextEdit {
2075 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2076 new_text: "
2077 fn a() {
2078 f10();
2079 }
2080 "
2081 .unindent(),
2082 },
2083 // edit inside second function
2084 lsp::TextEdit {
2085 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2086 new_text: "00".into(),
2087 },
2088 // edit inside third function via two distinct edits
2089 lsp::TextEdit {
2090 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2091 new_text: "4000".into(),
2092 },
2093 lsp::TextEdit {
2094 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2095 new_text: "".into(),
2096 },
2097 ],
2098 LanguageServerId(0),
2099 Some(lsp_document_version),
2100 cx,
2101 )
2102 })
2103 .await
2104 .unwrap();
2105
2106 buffer.update(cx, |buffer, cx| {
2107 for (range, new_text) in edits {
2108 buffer.edit([(range, new_text)], None, cx);
2109 }
2110 assert_eq!(
2111 buffer.text(),
2112 "
2113 // above first function
2114 fn a() {
2115 // inside first function
2116 f10();
2117 }
2118 fn b() {
2119 // inside second function f200();
2120 }
2121 fn c() {
2122 f4000();
2123 }
2124 "
2125 .unindent()
2126 );
2127 });
2128}
2129
2130#[gpui::test]
2131async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2132 init_test(cx);
2133
2134 let text = "
2135 use a::b;
2136 use a::c;
2137
2138 fn f() {
2139 b();
2140 c();
2141 }
2142 "
2143 .unindent();
2144
2145 let fs = FakeFs::new(cx.executor());
2146 fs.insert_tree(
2147 "/dir",
2148 json!({
2149 "a.rs": text.clone(),
2150 }),
2151 )
2152 .await;
2153
2154 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2155 let buffer = project
2156 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2157 .await
2158 .unwrap();
2159
2160 // Simulate the language server sending us a small edit in the form of a very large diff.
2161 // Rust-analyzer does this when performing a merge-imports code action.
2162 let edits = project
2163 .update(cx, |project, cx| {
2164 project.edits_from_lsp(
2165 &buffer,
2166 [
2167 // Replace the first use statement without editing the semicolon.
2168 lsp::TextEdit {
2169 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2170 new_text: "a::{b, c}".into(),
2171 },
2172 // Reinsert the remainder of the file between the semicolon and the final
2173 // newline of the file.
2174 lsp::TextEdit {
2175 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2176 new_text: "\n\n".into(),
2177 },
2178 lsp::TextEdit {
2179 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2180 new_text: "
2181 fn f() {
2182 b();
2183 c();
2184 }"
2185 .unindent(),
2186 },
2187 // Delete everything after the first newline of the file.
2188 lsp::TextEdit {
2189 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2190 new_text: "".into(),
2191 },
2192 ],
2193 LanguageServerId(0),
2194 None,
2195 cx,
2196 )
2197 })
2198 .await
2199 .unwrap();
2200
2201 buffer.update(cx, |buffer, cx| {
2202 let edits = edits
2203 .into_iter()
2204 .map(|(range, text)| {
2205 (
2206 range.start.to_point(buffer)..range.end.to_point(buffer),
2207 text,
2208 )
2209 })
2210 .collect::<Vec<_>>();
2211
2212 assert_eq!(
2213 edits,
2214 [
2215 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2216 (Point::new(1, 0)..Point::new(2, 0), "".into())
2217 ]
2218 );
2219
2220 for (range, new_text) in edits {
2221 buffer.edit([(range, new_text)], None, cx);
2222 }
2223 assert_eq!(
2224 buffer.text(),
2225 "
2226 use a::{b, c};
2227
2228 fn f() {
2229 b();
2230 c();
2231 }
2232 "
2233 .unindent()
2234 );
2235 });
2236}
2237
2238#[gpui::test]
2239async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2240 init_test(cx);
2241
2242 let text = "
2243 use a::b;
2244 use a::c;
2245
2246 fn f() {
2247 b();
2248 c();
2249 }
2250 "
2251 .unindent();
2252
2253 let fs = FakeFs::new(cx.executor());
2254 fs.insert_tree(
2255 "/dir",
2256 json!({
2257 "a.rs": text.clone(),
2258 }),
2259 )
2260 .await;
2261
2262 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2263 let buffer = project
2264 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2265 .await
2266 .unwrap();
2267
2268 // Simulate the language server sending us edits in a non-ordered fashion,
2269 // with ranges sometimes being inverted or pointing to invalid locations.
2270 let edits = project
2271 .update(cx, |project, cx| {
2272 project.edits_from_lsp(
2273 &buffer,
2274 [
2275 lsp::TextEdit {
2276 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2277 new_text: "\n\n".into(),
2278 },
2279 lsp::TextEdit {
2280 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2281 new_text: "a::{b, c}".into(),
2282 },
2283 lsp::TextEdit {
2284 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2285 new_text: "".into(),
2286 },
2287 lsp::TextEdit {
2288 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2289 new_text: "
2290 fn f() {
2291 b();
2292 c();
2293 }"
2294 .unindent(),
2295 },
2296 ],
2297 LanguageServerId(0),
2298 None,
2299 cx,
2300 )
2301 })
2302 .await
2303 .unwrap();
2304
2305 buffer.update(cx, |buffer, cx| {
2306 let edits = edits
2307 .into_iter()
2308 .map(|(range, text)| {
2309 (
2310 range.start.to_point(buffer)..range.end.to_point(buffer),
2311 text,
2312 )
2313 })
2314 .collect::<Vec<_>>();
2315
2316 assert_eq!(
2317 edits,
2318 [
2319 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2320 (Point::new(1, 0)..Point::new(2, 0), "".into())
2321 ]
2322 );
2323
2324 for (range, new_text) in edits {
2325 buffer.edit([(range, new_text)], None, cx);
2326 }
2327 assert_eq!(
2328 buffer.text(),
2329 "
2330 use a::{b, c};
2331
2332 fn f() {
2333 b();
2334 c();
2335 }
2336 "
2337 .unindent()
2338 );
2339 });
2340}
2341
2342fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2343 buffer: &Buffer,
2344 range: Range<T>,
2345) -> Vec<(String, Option<DiagnosticSeverity>)> {
2346 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2347 for chunk in buffer.snapshot().chunks(range, true) {
2348 if chunks.last().map_or(false, |prev_chunk| {
2349 prev_chunk.1 == chunk.diagnostic_severity
2350 }) {
2351 chunks.last_mut().unwrap().0.push_str(chunk.text);
2352 } else {
2353 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2354 }
2355 }
2356 chunks
2357}
2358
2359#[gpui::test(iterations = 10)]
2360async fn test_definition(cx: &mut gpui::TestAppContext) {
2361 init_test(cx);
2362
2363 let fs = FakeFs::new(cx.executor());
2364 fs.insert_tree(
2365 "/dir",
2366 json!({
2367 "a.rs": "const fn a() { A }",
2368 "b.rs": "const y: i32 = crate::a()",
2369 }),
2370 )
2371 .await;
2372
2373 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2374
2375 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2376 language_registry.add(rust_lang());
2377 let mut fake_servers =
2378 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2379
2380 let buffer = project
2381 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2382 .await
2383 .unwrap();
2384
2385 let fake_server = fake_servers.next().await.unwrap();
2386 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2387 let params = params.text_document_position_params;
2388 assert_eq!(
2389 params.text_document.uri.to_file_path().unwrap(),
2390 Path::new("/dir/b.rs"),
2391 );
2392 assert_eq!(params.position, lsp::Position::new(0, 22));
2393
2394 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2395 lsp::Location::new(
2396 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2397 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2398 ),
2399 )))
2400 });
2401
2402 let mut definitions = project
2403 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2404 .await
2405 .unwrap();
2406
2407 // Assert no new language server started
2408 cx.executor().run_until_parked();
2409 assert!(fake_servers.try_next().is_err());
2410
2411 assert_eq!(definitions.len(), 1);
2412 let definition = definitions.pop().unwrap();
2413 cx.update(|cx| {
2414 let target_buffer = definition.target.buffer.read(cx);
2415 assert_eq!(
2416 target_buffer
2417 .file()
2418 .unwrap()
2419 .as_local()
2420 .unwrap()
2421 .abs_path(cx),
2422 Path::new("/dir/a.rs"),
2423 );
2424 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2425 assert_eq!(
2426 list_worktrees(&project, cx),
2427 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2428 );
2429
2430 drop(definition);
2431 });
2432 cx.update(|cx| {
2433 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2434 });
2435
2436 fn list_worktrees<'a>(
2437 project: &'a Model<Project>,
2438 cx: &'a AppContext,
2439 ) -> Vec<(&'a Path, bool)> {
2440 project
2441 .read(cx)
2442 .worktrees()
2443 .map(|worktree| {
2444 let worktree = worktree.read(cx);
2445 (
2446 worktree.as_local().unwrap().abs_path().as_ref(),
2447 worktree.is_visible(),
2448 )
2449 })
2450 .collect::<Vec<_>>()
2451 }
2452}
2453
2454#[gpui::test]
2455async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2456 init_test(cx);
2457
2458 let fs = FakeFs::new(cx.executor());
2459 fs.insert_tree(
2460 "/dir",
2461 json!({
2462 "a.ts": "",
2463 }),
2464 )
2465 .await;
2466
2467 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2468
2469 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2470 language_registry.add(typescript_lang());
2471 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2472 "TypeScript",
2473 FakeLspAdapter {
2474 capabilities: lsp::ServerCapabilities {
2475 completion_provider: Some(lsp::CompletionOptions {
2476 trigger_characters: Some(vec![":".to_string()]),
2477 ..Default::default()
2478 }),
2479 ..Default::default()
2480 },
2481 ..Default::default()
2482 },
2483 );
2484
2485 let buffer = project
2486 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2487 .await
2488 .unwrap();
2489
2490 let fake_server = fake_language_servers.next().await.unwrap();
2491
2492 let text = "let a = b.fqn";
2493 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2494 let completions = project.update(cx, |project, cx| {
2495 project.completions(&buffer, text.len(), cx)
2496 });
2497
2498 fake_server
2499 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2500 Ok(Some(lsp::CompletionResponse::Array(vec![
2501 lsp::CompletionItem {
2502 label: "fullyQualifiedName?".into(),
2503 insert_text: Some("fullyQualifiedName".into()),
2504 ..Default::default()
2505 },
2506 ])))
2507 })
2508 .next()
2509 .await;
2510 let completions = completions.await.unwrap();
2511 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2512 assert_eq!(completions.len(), 1);
2513 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2514 assert_eq!(
2515 completions[0].old_range.to_offset(&snapshot),
2516 text.len() - 3..text.len()
2517 );
2518
2519 let text = "let a = \"atoms/cmp\"";
2520 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2521 let completions = project.update(cx, |project, cx| {
2522 project.completions(&buffer, text.len() - 1, cx)
2523 });
2524
2525 fake_server
2526 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2527 Ok(Some(lsp::CompletionResponse::Array(vec![
2528 lsp::CompletionItem {
2529 label: "component".into(),
2530 ..Default::default()
2531 },
2532 ])))
2533 })
2534 .next()
2535 .await;
2536 let completions = completions.await.unwrap();
2537 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2538 assert_eq!(completions.len(), 1);
2539 assert_eq!(completions[0].new_text, "component");
2540 assert_eq!(
2541 completions[0].old_range.to_offset(&snapshot),
2542 text.len() - 4..text.len() - 1
2543 );
2544}
2545
2546#[gpui::test]
2547async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2548 init_test(cx);
2549
2550 let fs = FakeFs::new(cx.executor());
2551 fs.insert_tree(
2552 "/dir",
2553 json!({
2554 "a.ts": "",
2555 }),
2556 )
2557 .await;
2558
2559 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2560
2561 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2562 language_registry.add(typescript_lang());
2563 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2564 "TypeScript",
2565 FakeLspAdapter {
2566 capabilities: lsp::ServerCapabilities {
2567 completion_provider: Some(lsp::CompletionOptions {
2568 trigger_characters: Some(vec![":".to_string()]),
2569 ..Default::default()
2570 }),
2571 ..Default::default()
2572 },
2573 ..Default::default()
2574 },
2575 );
2576
2577 let buffer = project
2578 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2579 .await
2580 .unwrap();
2581
2582 let fake_server = fake_language_servers.next().await.unwrap();
2583
2584 let text = "let a = b.fqn";
2585 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2586 let completions = project.update(cx, |project, cx| {
2587 project.completions(&buffer, text.len(), cx)
2588 });
2589
2590 fake_server
2591 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2592 Ok(Some(lsp::CompletionResponse::Array(vec![
2593 lsp::CompletionItem {
2594 label: "fullyQualifiedName?".into(),
2595 insert_text: Some("fully\rQualified\r\nName".into()),
2596 ..Default::default()
2597 },
2598 ])))
2599 })
2600 .next()
2601 .await;
2602 let completions = completions.await.unwrap();
2603 assert_eq!(completions.len(), 1);
2604 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2605}
2606
2607#[gpui::test(iterations = 10)]
2608async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2609 init_test(cx);
2610
2611 let fs = FakeFs::new(cx.executor());
2612 fs.insert_tree(
2613 "/dir",
2614 json!({
2615 "a.ts": "a",
2616 }),
2617 )
2618 .await;
2619
2620 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2621
2622 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2623 language_registry.add(typescript_lang());
2624 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2625 "TypeScript",
2626 FakeLspAdapter {
2627 capabilities: lsp::ServerCapabilities {
2628 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2629 lsp::CodeActionOptions {
2630 resolve_provider: Some(true),
2631 ..lsp::CodeActionOptions::default()
2632 },
2633 )),
2634 ..lsp::ServerCapabilities::default()
2635 },
2636 ..FakeLspAdapter::default()
2637 },
2638 );
2639
2640 let buffer = project
2641 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2642 .await
2643 .unwrap();
2644
2645 let fake_server = fake_language_servers.next().await.unwrap();
2646
2647 // Language server returns code actions that contain commands, and not edits.
2648 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2649 fake_server
2650 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2651 Ok(Some(vec![
2652 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2653 title: "The code action".into(),
2654 data: Some(serde_json::json!({
2655 "command": "_the/command",
2656 })),
2657 ..lsp::CodeAction::default()
2658 }),
2659 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2660 title: "two".into(),
2661 ..lsp::CodeAction::default()
2662 }),
2663 ]))
2664 })
2665 .next()
2666 .await;
2667
2668 let action = actions.await[0].clone();
2669 let apply = project.update(cx, |project, cx| {
2670 project.apply_code_action(buffer.clone(), action, true, cx)
2671 });
2672
2673 // Resolving the code action does not populate its edits. In absence of
2674 // edits, we must execute the given command.
2675 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2676 |mut action, _| async move {
2677 if action.data.is_some() {
2678 action.command = Some(lsp::Command {
2679 title: "The command".into(),
2680 command: "_the/command".into(),
2681 arguments: Some(vec![json!("the-argument")]),
2682 });
2683 }
2684 Ok(action)
2685 },
2686 );
2687
2688 // While executing the command, the language server sends the editor
2689 // a `workspaceEdit` request.
2690 fake_server
2691 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2692 let fake = fake_server.clone();
2693 move |params, _| {
2694 assert_eq!(params.command, "_the/command");
2695 let fake = fake.clone();
2696 async move {
2697 fake.server
2698 .request::<lsp::request::ApplyWorkspaceEdit>(
2699 lsp::ApplyWorkspaceEditParams {
2700 label: None,
2701 edit: lsp::WorkspaceEdit {
2702 changes: Some(
2703 [(
2704 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2705 vec![lsp::TextEdit {
2706 range: lsp::Range::new(
2707 lsp::Position::new(0, 0),
2708 lsp::Position::new(0, 0),
2709 ),
2710 new_text: "X".into(),
2711 }],
2712 )]
2713 .into_iter()
2714 .collect(),
2715 ),
2716 ..Default::default()
2717 },
2718 },
2719 )
2720 .await
2721 .unwrap();
2722 Ok(Some(json!(null)))
2723 }
2724 }
2725 })
2726 .next()
2727 .await;
2728
2729 // Applying the code action returns a project transaction containing the edits
2730 // sent by the language server in its `workspaceEdit` request.
2731 let transaction = apply.await.unwrap();
2732 assert!(transaction.0.contains_key(&buffer));
2733 buffer.update(cx, |buffer, cx| {
2734 assert_eq!(buffer.text(), "Xa");
2735 buffer.undo(cx);
2736 assert_eq!(buffer.text(), "a");
2737 });
2738}
2739
2740#[gpui::test(iterations = 10)]
2741async fn test_save_file(cx: &mut gpui::TestAppContext) {
2742 init_test(cx);
2743
2744 let fs = FakeFs::new(cx.executor());
2745 fs.insert_tree(
2746 "/dir",
2747 json!({
2748 "file1": "the old contents",
2749 }),
2750 )
2751 .await;
2752
2753 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2754 let buffer = project
2755 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2756 .await
2757 .unwrap();
2758 buffer.update(cx, |buffer, cx| {
2759 assert_eq!(buffer.text(), "the old contents");
2760 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2761 });
2762
2763 project
2764 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2765 .await
2766 .unwrap();
2767
2768 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2769 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2770}
2771
2772#[gpui::test(iterations = 30)]
2773async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2774 init_test(cx);
2775
2776 let fs = FakeFs::new(cx.executor().clone());
2777 fs.insert_tree(
2778 "/dir",
2779 json!({
2780 "file1": "the original contents",
2781 }),
2782 )
2783 .await;
2784
2785 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2786 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2787 let buffer = project
2788 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2789 .await
2790 .unwrap();
2791
2792 // Simulate buffer diffs being slow, so that they don't complete before
2793 // the next file change occurs.
2794 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2795
2796 // Change the buffer's file on disk, and then wait for the file change
2797 // to be detected by the worktree, so that the buffer starts reloading.
2798 fs.save(
2799 "/dir/file1".as_ref(),
2800 &"the first contents".into(),
2801 Default::default(),
2802 )
2803 .await
2804 .unwrap();
2805 worktree.next_event(cx).await;
2806
2807 // Change the buffer's file again. Depending on the random seed, the
2808 // previous file change may still be in progress.
2809 fs.save(
2810 "/dir/file1".as_ref(),
2811 &"the second contents".into(),
2812 Default::default(),
2813 )
2814 .await
2815 .unwrap();
2816 worktree.next_event(cx).await;
2817
2818 cx.executor().run_until_parked();
2819 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2820 buffer.read_with(cx, |buffer, _| {
2821 assert_eq!(buffer.text(), on_disk_text);
2822 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2823 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2824 });
2825}
2826
2827#[gpui::test(iterations = 30)]
2828async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2829 init_test(cx);
2830
2831 let fs = FakeFs::new(cx.executor().clone());
2832 fs.insert_tree(
2833 "/dir",
2834 json!({
2835 "file1": "the original contents",
2836 }),
2837 )
2838 .await;
2839
2840 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2841 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2842 let buffer = project
2843 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2844 .await
2845 .unwrap();
2846
2847 // Simulate buffer diffs being slow, so that they don't complete before
2848 // the next file change occurs.
2849 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2850
2851 // Change the buffer's file on disk, and then wait for the file change
2852 // to be detected by the worktree, so that the buffer starts reloading.
2853 fs.save(
2854 "/dir/file1".as_ref(),
2855 &"the first contents".into(),
2856 Default::default(),
2857 )
2858 .await
2859 .unwrap();
2860 worktree.next_event(cx).await;
2861
2862 cx.executor()
2863 .spawn(cx.executor().simulate_random_delay())
2864 .await;
2865
2866 // Perform a noop edit, causing the buffer's version to increase.
2867 buffer.update(cx, |buffer, cx| {
2868 buffer.edit([(0..0, " ")], None, cx);
2869 buffer.undo(cx);
2870 });
2871
2872 cx.executor().run_until_parked();
2873 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2874 buffer.read_with(cx, |buffer, _| {
2875 let buffer_text = buffer.text();
2876 if buffer_text == on_disk_text {
2877 assert!(
2878 !buffer.is_dirty() && !buffer.has_conflict(),
2879 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2880 );
2881 }
2882 // If the file change occurred while the buffer was processing the first
2883 // change, the buffer will be in a conflicting state.
2884 else {
2885 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2886 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2887 }
2888 });
2889}
2890
2891#[gpui::test]
2892async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2893 init_test(cx);
2894
2895 let fs = FakeFs::new(cx.executor());
2896 fs.insert_tree(
2897 "/dir",
2898 json!({
2899 "file1": "the old contents",
2900 }),
2901 )
2902 .await;
2903
2904 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2905 let buffer = project
2906 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2907 .await
2908 .unwrap();
2909 buffer.update(cx, |buffer, cx| {
2910 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2911 });
2912
2913 project
2914 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2915 .await
2916 .unwrap();
2917
2918 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2919 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2920}
2921
2922#[gpui::test]
2923async fn test_save_as(cx: &mut gpui::TestAppContext) {
2924 init_test(cx);
2925
2926 let fs = FakeFs::new(cx.executor());
2927 fs.insert_tree("/dir", json!({})).await;
2928
2929 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2930
2931 let languages = project.update(cx, |project, _| project.languages().clone());
2932 languages.add(rust_lang());
2933
2934 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2935 buffer.update(cx, |buffer, cx| {
2936 buffer.edit([(0..0, "abc")], None, cx);
2937 assert!(buffer.is_dirty());
2938 assert!(!buffer.has_conflict());
2939 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2940 });
2941 project
2942 .update(cx, |project, cx| {
2943 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2944 let path = ProjectPath {
2945 worktree_id,
2946 path: Arc::from(Path::new("file1.rs")),
2947 };
2948 project.save_buffer_as(buffer.clone(), path, cx)
2949 })
2950 .await
2951 .unwrap();
2952 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2953
2954 cx.executor().run_until_parked();
2955 buffer.update(cx, |buffer, cx| {
2956 assert_eq!(
2957 buffer.file().unwrap().full_path(cx),
2958 Path::new("dir/file1.rs")
2959 );
2960 assert!(!buffer.is_dirty());
2961 assert!(!buffer.has_conflict());
2962 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2963 });
2964
2965 let opened_buffer = project
2966 .update(cx, |project, cx| {
2967 project.open_local_buffer("/dir/file1.rs", cx)
2968 })
2969 .await
2970 .unwrap();
2971 assert_eq!(opened_buffer, buffer);
2972}
2973
2974#[gpui::test(retries = 5)]
2975async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2976 init_test(cx);
2977 cx.executor().allow_parking();
2978
2979 let dir = temp_tree(json!({
2980 "a": {
2981 "file1": "",
2982 "file2": "",
2983 "file3": "",
2984 },
2985 "b": {
2986 "c": {
2987 "file4": "",
2988 "file5": "",
2989 }
2990 }
2991 }));
2992
2993 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2994 let rpc = project.update(cx, |p, _| p.client.clone());
2995
2996 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2997 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2998 async move { buffer.await.unwrap() }
2999 };
3000 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3001 project.update(cx, |project, cx| {
3002 let tree = project.worktrees().next().unwrap();
3003 tree.read(cx)
3004 .entry_for_path(path)
3005 .unwrap_or_else(|| panic!("no entry for path {}", path))
3006 .id
3007 })
3008 };
3009
3010 let buffer2 = buffer_for_path("a/file2", cx).await;
3011 let buffer3 = buffer_for_path("a/file3", cx).await;
3012 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3013 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3014
3015 let file2_id = id_for_path("a/file2", cx);
3016 let file3_id = id_for_path("a/file3", cx);
3017 let file4_id = id_for_path("b/c/file4", cx);
3018
3019 // Create a remote copy of this worktree.
3020 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3021
3022 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
3023
3024 let updates = Arc::new(Mutex::new(Vec::new()));
3025 tree.update(cx, |tree, cx| {
3026 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
3027 let updates = updates.clone();
3028 move |update| {
3029 updates.lock().push(update);
3030 async { true }
3031 }
3032 });
3033 });
3034
3035 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
3036
3037 cx.executor().run_until_parked();
3038
3039 cx.update(|cx| {
3040 assert!(!buffer2.read(cx).is_dirty());
3041 assert!(!buffer3.read(cx).is_dirty());
3042 assert!(!buffer4.read(cx).is_dirty());
3043 assert!(!buffer5.read(cx).is_dirty());
3044 });
3045
3046 // Rename and delete files and directories.
3047 tree.flush_fs_events(cx).await;
3048 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3049 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3050 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3051 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3052 tree.flush_fs_events(cx).await;
3053
3054 let expected_paths = vec![
3055 "a",
3056 "a/file1",
3057 "a/file2.new",
3058 "b",
3059 "d",
3060 "d/file3",
3061 "d/file4",
3062 ];
3063
3064 cx.update(|app| {
3065 assert_eq!(
3066 tree.read(app)
3067 .paths()
3068 .map(|p| p.to_str().unwrap())
3069 .collect::<Vec<_>>(),
3070 expected_paths
3071 );
3072 });
3073
3074 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3075 assert_eq!(id_for_path("d/file3", cx), file3_id);
3076 assert_eq!(id_for_path("d/file4", cx), file4_id);
3077
3078 cx.update(|cx| {
3079 assert_eq!(
3080 buffer2.read(cx).file().unwrap().path().as_ref(),
3081 Path::new("a/file2.new")
3082 );
3083 assert_eq!(
3084 buffer3.read(cx).file().unwrap().path().as_ref(),
3085 Path::new("d/file3")
3086 );
3087 assert_eq!(
3088 buffer4.read(cx).file().unwrap().path().as_ref(),
3089 Path::new("d/file4")
3090 );
3091 assert_eq!(
3092 buffer5.read(cx).file().unwrap().path().as_ref(),
3093 Path::new("b/c/file5")
3094 );
3095
3096 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3097 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3098 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3099 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3100 });
3101
3102 // Update the remote worktree. Check that it becomes consistent with the
3103 // local worktree.
3104 cx.executor().run_until_parked();
3105
3106 remote.update(cx, |remote, _| {
3107 for update in updates.lock().drain(..) {
3108 remote.as_remote_mut().unwrap().update_from_remote(update);
3109 }
3110 });
3111 cx.executor().run_until_parked();
3112 remote.update(cx, |remote, _| {
3113 assert_eq!(
3114 remote
3115 .paths()
3116 .map(|p| p.to_str().unwrap())
3117 .collect::<Vec<_>>(),
3118 expected_paths
3119 );
3120 });
3121}
3122
3123#[gpui::test(iterations = 10)]
3124async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3125 init_test(cx);
3126
3127 let fs = FakeFs::new(cx.executor());
3128 fs.insert_tree(
3129 "/dir",
3130 json!({
3131 "a": {
3132 "file1": "",
3133 }
3134 }),
3135 )
3136 .await;
3137
3138 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3139 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3140 let tree_id = tree.update(cx, |tree, _| tree.id());
3141
3142 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3143 project.update(cx, |project, cx| {
3144 let tree = project.worktrees().next().unwrap();
3145 tree.read(cx)
3146 .entry_for_path(path)
3147 .unwrap_or_else(|| panic!("no entry for path {}", path))
3148 .id
3149 })
3150 };
3151
3152 let dir_id = id_for_path("a", cx);
3153 let file_id = id_for_path("a/file1", cx);
3154 let buffer = project
3155 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3156 .await
3157 .unwrap();
3158 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3159
3160 project
3161 .update(cx, |project, cx| {
3162 project.rename_entry(dir_id, Path::new("b"), cx)
3163 })
3164 .unwrap()
3165 .await
3166 .unwrap();
3167 cx.executor().run_until_parked();
3168
3169 assert_eq!(id_for_path("b", cx), dir_id);
3170 assert_eq!(id_for_path("b/file1", cx), file_id);
3171 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3172}
3173
3174#[gpui::test]
3175async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3176 init_test(cx);
3177
3178 let fs = FakeFs::new(cx.executor());
3179 fs.insert_tree(
3180 "/dir",
3181 json!({
3182 "a.txt": "a-contents",
3183 "b.txt": "b-contents",
3184 }),
3185 )
3186 .await;
3187
3188 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3189
3190 // Spawn multiple tasks to open paths, repeating some paths.
3191 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3192 (
3193 p.open_local_buffer("/dir/a.txt", cx),
3194 p.open_local_buffer("/dir/b.txt", cx),
3195 p.open_local_buffer("/dir/a.txt", cx),
3196 )
3197 });
3198
3199 let buffer_a_1 = buffer_a_1.await.unwrap();
3200 let buffer_a_2 = buffer_a_2.await.unwrap();
3201 let buffer_b = buffer_b.await.unwrap();
3202 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3203 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3204
3205 // There is only one buffer per path.
3206 let buffer_a_id = buffer_a_1.entity_id();
3207 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3208
3209 // Open the same path again while it is still open.
3210 drop(buffer_a_1);
3211 let buffer_a_3 = project
3212 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3213 .await
3214 .unwrap();
3215
3216 // There's still only one buffer per path.
3217 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3218}
3219
3220#[gpui::test]
3221async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3222 init_test(cx);
3223
3224 let fs = FakeFs::new(cx.executor());
3225 fs.insert_tree(
3226 "/dir",
3227 json!({
3228 "file1": "abc",
3229 "file2": "def",
3230 "file3": "ghi",
3231 }),
3232 )
3233 .await;
3234
3235 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3236
3237 let buffer1 = project
3238 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3239 .await
3240 .unwrap();
3241 let events = Arc::new(Mutex::new(Vec::new()));
3242
3243 // initially, the buffer isn't dirty.
3244 buffer1.update(cx, |buffer, cx| {
3245 cx.subscribe(&buffer1, {
3246 let events = events.clone();
3247 move |_, _, event, _| match event {
3248 BufferEvent::Operation(_) => {}
3249 _ => events.lock().push(event.clone()),
3250 }
3251 })
3252 .detach();
3253
3254 assert!(!buffer.is_dirty());
3255 assert!(events.lock().is_empty());
3256
3257 buffer.edit([(1..2, "")], None, cx);
3258 });
3259
3260 // after the first edit, the buffer is dirty, and emits a dirtied event.
3261 buffer1.update(cx, |buffer, cx| {
3262 assert!(buffer.text() == "ac");
3263 assert!(buffer.is_dirty());
3264 assert_eq!(
3265 *events.lock(),
3266 &[language::Event::Edited, language::Event::DirtyChanged]
3267 );
3268 events.lock().clear();
3269 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3270 });
3271
3272 // after saving, the buffer is not dirty, and emits a saved event.
3273 buffer1.update(cx, |buffer, cx| {
3274 assert!(!buffer.is_dirty());
3275 assert_eq!(*events.lock(), &[language::Event::Saved]);
3276 events.lock().clear();
3277
3278 buffer.edit([(1..1, "B")], None, cx);
3279 buffer.edit([(2..2, "D")], None, cx);
3280 });
3281
3282 // after editing again, the buffer is dirty, and emits another dirty event.
3283 buffer1.update(cx, |buffer, cx| {
3284 assert!(buffer.text() == "aBDc");
3285 assert!(buffer.is_dirty());
3286 assert_eq!(
3287 *events.lock(),
3288 &[
3289 language::Event::Edited,
3290 language::Event::DirtyChanged,
3291 language::Event::Edited,
3292 ],
3293 );
3294 events.lock().clear();
3295
3296 // After restoring the buffer to its previously-saved state,
3297 // the buffer is not considered dirty anymore.
3298 buffer.edit([(1..3, "")], None, cx);
3299 assert!(buffer.text() == "ac");
3300 assert!(!buffer.is_dirty());
3301 });
3302
3303 assert_eq!(
3304 *events.lock(),
3305 &[language::Event::Edited, language::Event::DirtyChanged]
3306 );
3307
3308 // When a file is deleted, the buffer is considered dirty.
3309 let events = Arc::new(Mutex::new(Vec::new()));
3310 let buffer2 = project
3311 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3312 .await
3313 .unwrap();
3314 buffer2.update(cx, |_, cx| {
3315 cx.subscribe(&buffer2, {
3316 let events = events.clone();
3317 move |_, _, event, _| events.lock().push(event.clone())
3318 })
3319 .detach();
3320 });
3321
3322 fs.remove_file("/dir/file2".as_ref(), Default::default())
3323 .await
3324 .unwrap();
3325 cx.executor().run_until_parked();
3326 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3327 assert_eq!(
3328 *events.lock(),
3329 &[
3330 language::Event::DirtyChanged,
3331 language::Event::FileHandleChanged
3332 ]
3333 );
3334
3335 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3336 let events = Arc::new(Mutex::new(Vec::new()));
3337 let buffer3 = project
3338 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3339 .await
3340 .unwrap();
3341 buffer3.update(cx, |_, cx| {
3342 cx.subscribe(&buffer3, {
3343 let events = events.clone();
3344 move |_, _, event, _| events.lock().push(event.clone())
3345 })
3346 .detach();
3347 });
3348
3349 buffer3.update(cx, |buffer, cx| {
3350 buffer.edit([(0..0, "x")], None, cx);
3351 });
3352 events.lock().clear();
3353 fs.remove_file("/dir/file3".as_ref(), Default::default())
3354 .await
3355 .unwrap();
3356 cx.executor().run_until_parked();
3357 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3358 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3359}
3360
3361#[gpui::test]
3362async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3363 init_test(cx);
3364
3365 let initial_contents = "aaa\nbbbbb\nc\n";
3366 let fs = FakeFs::new(cx.executor());
3367 fs.insert_tree(
3368 "/dir",
3369 json!({
3370 "the-file": initial_contents,
3371 }),
3372 )
3373 .await;
3374 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3375 let buffer = project
3376 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3377 .await
3378 .unwrap();
3379
3380 let anchors = (0..3)
3381 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3382 .collect::<Vec<_>>();
3383
3384 // Change the file on disk, adding two new lines of text, and removing
3385 // one line.
3386 buffer.update(cx, |buffer, _| {
3387 assert!(!buffer.is_dirty());
3388 assert!(!buffer.has_conflict());
3389 });
3390 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3391 fs.save(
3392 "/dir/the-file".as_ref(),
3393 &new_contents.into(),
3394 LineEnding::Unix,
3395 )
3396 .await
3397 .unwrap();
3398
3399 // Because the buffer was not modified, it is reloaded from disk. Its
3400 // contents are edited according to the diff between the old and new
3401 // file contents.
3402 cx.executor().run_until_parked();
3403 buffer.update(cx, |buffer, _| {
3404 assert_eq!(buffer.text(), new_contents);
3405 assert!(!buffer.is_dirty());
3406 assert!(!buffer.has_conflict());
3407
3408 let anchor_positions = anchors
3409 .iter()
3410 .map(|anchor| anchor.to_point(&*buffer))
3411 .collect::<Vec<_>>();
3412 assert_eq!(
3413 anchor_positions,
3414 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3415 );
3416 });
3417
3418 // Modify the buffer
3419 buffer.update(cx, |buffer, cx| {
3420 buffer.edit([(0..0, " ")], None, cx);
3421 assert!(buffer.is_dirty());
3422 assert!(!buffer.has_conflict());
3423 });
3424
3425 // Change the file on disk again, adding blank lines to the beginning.
3426 fs.save(
3427 "/dir/the-file".as_ref(),
3428 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3429 LineEnding::Unix,
3430 )
3431 .await
3432 .unwrap();
3433
3434 // Because the buffer is modified, it doesn't reload from disk, but is
3435 // marked as having a conflict.
3436 cx.executor().run_until_parked();
3437 buffer.update(cx, |buffer, _| {
3438 assert!(buffer.has_conflict());
3439 });
3440}
3441
3442#[gpui::test]
3443async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3444 init_test(cx);
3445
3446 let fs = FakeFs::new(cx.executor());
3447 fs.insert_tree(
3448 "/dir",
3449 json!({
3450 "file1": "a\nb\nc\n",
3451 "file2": "one\r\ntwo\r\nthree\r\n",
3452 }),
3453 )
3454 .await;
3455
3456 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3457 let buffer1 = project
3458 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3459 .await
3460 .unwrap();
3461 let buffer2 = project
3462 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3463 .await
3464 .unwrap();
3465
3466 buffer1.update(cx, |buffer, _| {
3467 assert_eq!(buffer.text(), "a\nb\nc\n");
3468 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3469 });
3470 buffer2.update(cx, |buffer, _| {
3471 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3472 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3473 });
3474
3475 // Change a file's line endings on disk from unix to windows. The buffer's
3476 // state updates correctly.
3477 fs.save(
3478 "/dir/file1".as_ref(),
3479 &"aaa\nb\nc\n".into(),
3480 LineEnding::Windows,
3481 )
3482 .await
3483 .unwrap();
3484 cx.executor().run_until_parked();
3485 buffer1.update(cx, |buffer, _| {
3486 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3487 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3488 });
3489
3490 // Save a file with windows line endings. The file is written correctly.
3491 buffer2.update(cx, |buffer, cx| {
3492 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3493 });
3494 project
3495 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3496 .await
3497 .unwrap();
3498 assert_eq!(
3499 fs.load("/dir/file2".as_ref()).await.unwrap(),
3500 "one\r\ntwo\r\nthree\r\nfour\r\n",
3501 );
3502}
3503
3504#[gpui::test]
3505async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3506 init_test(cx);
3507
3508 let fs = FakeFs::new(cx.executor());
3509 fs.insert_tree(
3510 "/the-dir",
3511 json!({
3512 "a.rs": "
3513 fn foo(mut v: Vec<usize>) {
3514 for x in &v {
3515 v.push(1);
3516 }
3517 }
3518 "
3519 .unindent(),
3520 }),
3521 )
3522 .await;
3523
3524 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3525 let buffer = project
3526 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3527 .await
3528 .unwrap();
3529
3530 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3531 let message = lsp::PublishDiagnosticsParams {
3532 uri: buffer_uri.clone(),
3533 diagnostics: vec![
3534 lsp::Diagnostic {
3535 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3536 severity: Some(DiagnosticSeverity::WARNING),
3537 message: "error 1".to_string(),
3538 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3539 location: lsp::Location {
3540 uri: buffer_uri.clone(),
3541 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3542 },
3543 message: "error 1 hint 1".to_string(),
3544 }]),
3545 ..Default::default()
3546 },
3547 lsp::Diagnostic {
3548 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3549 severity: Some(DiagnosticSeverity::HINT),
3550 message: "error 1 hint 1".to_string(),
3551 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3552 location: lsp::Location {
3553 uri: buffer_uri.clone(),
3554 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3555 },
3556 message: "original diagnostic".to_string(),
3557 }]),
3558 ..Default::default()
3559 },
3560 lsp::Diagnostic {
3561 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3562 severity: Some(DiagnosticSeverity::ERROR),
3563 message: "error 2".to_string(),
3564 related_information: Some(vec![
3565 lsp::DiagnosticRelatedInformation {
3566 location: lsp::Location {
3567 uri: buffer_uri.clone(),
3568 range: lsp::Range::new(
3569 lsp::Position::new(1, 13),
3570 lsp::Position::new(1, 15),
3571 ),
3572 },
3573 message: "error 2 hint 1".to_string(),
3574 },
3575 lsp::DiagnosticRelatedInformation {
3576 location: lsp::Location {
3577 uri: buffer_uri.clone(),
3578 range: lsp::Range::new(
3579 lsp::Position::new(1, 13),
3580 lsp::Position::new(1, 15),
3581 ),
3582 },
3583 message: "error 2 hint 2".to_string(),
3584 },
3585 ]),
3586 ..Default::default()
3587 },
3588 lsp::Diagnostic {
3589 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3590 severity: Some(DiagnosticSeverity::HINT),
3591 message: "error 2 hint 1".to_string(),
3592 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3593 location: lsp::Location {
3594 uri: buffer_uri.clone(),
3595 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3596 },
3597 message: "original diagnostic".to_string(),
3598 }]),
3599 ..Default::default()
3600 },
3601 lsp::Diagnostic {
3602 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3603 severity: Some(DiagnosticSeverity::HINT),
3604 message: "error 2 hint 2".to_string(),
3605 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3606 location: lsp::Location {
3607 uri: buffer_uri,
3608 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3609 },
3610 message: "original diagnostic".to_string(),
3611 }]),
3612 ..Default::default()
3613 },
3614 ],
3615 version: None,
3616 };
3617
3618 project
3619 .update(cx, |p, cx| {
3620 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3621 })
3622 .unwrap();
3623 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3624
3625 assert_eq!(
3626 buffer
3627 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3628 .collect::<Vec<_>>(),
3629 &[
3630 DiagnosticEntry {
3631 range: Point::new(1, 8)..Point::new(1, 9),
3632 diagnostic: Diagnostic {
3633 severity: DiagnosticSeverity::WARNING,
3634 message: "error 1".to_string(),
3635 group_id: 1,
3636 is_primary: true,
3637 ..Default::default()
3638 }
3639 },
3640 DiagnosticEntry {
3641 range: Point::new(1, 8)..Point::new(1, 9),
3642 diagnostic: Diagnostic {
3643 severity: DiagnosticSeverity::HINT,
3644 message: "error 1 hint 1".to_string(),
3645 group_id: 1,
3646 is_primary: false,
3647 ..Default::default()
3648 }
3649 },
3650 DiagnosticEntry {
3651 range: Point::new(1, 13)..Point::new(1, 15),
3652 diagnostic: Diagnostic {
3653 severity: DiagnosticSeverity::HINT,
3654 message: "error 2 hint 1".to_string(),
3655 group_id: 0,
3656 is_primary: false,
3657 ..Default::default()
3658 }
3659 },
3660 DiagnosticEntry {
3661 range: Point::new(1, 13)..Point::new(1, 15),
3662 diagnostic: Diagnostic {
3663 severity: DiagnosticSeverity::HINT,
3664 message: "error 2 hint 2".to_string(),
3665 group_id: 0,
3666 is_primary: false,
3667 ..Default::default()
3668 }
3669 },
3670 DiagnosticEntry {
3671 range: Point::new(2, 8)..Point::new(2, 17),
3672 diagnostic: Diagnostic {
3673 severity: DiagnosticSeverity::ERROR,
3674 message: "error 2".to_string(),
3675 group_id: 0,
3676 is_primary: true,
3677 ..Default::default()
3678 }
3679 }
3680 ]
3681 );
3682
3683 assert_eq!(
3684 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3685 &[
3686 DiagnosticEntry {
3687 range: Point::new(1, 13)..Point::new(1, 15),
3688 diagnostic: Diagnostic {
3689 severity: DiagnosticSeverity::HINT,
3690 message: "error 2 hint 1".to_string(),
3691 group_id: 0,
3692 is_primary: false,
3693 ..Default::default()
3694 }
3695 },
3696 DiagnosticEntry {
3697 range: Point::new(1, 13)..Point::new(1, 15),
3698 diagnostic: Diagnostic {
3699 severity: DiagnosticSeverity::HINT,
3700 message: "error 2 hint 2".to_string(),
3701 group_id: 0,
3702 is_primary: false,
3703 ..Default::default()
3704 }
3705 },
3706 DiagnosticEntry {
3707 range: Point::new(2, 8)..Point::new(2, 17),
3708 diagnostic: Diagnostic {
3709 severity: DiagnosticSeverity::ERROR,
3710 message: "error 2".to_string(),
3711 group_id: 0,
3712 is_primary: true,
3713 ..Default::default()
3714 }
3715 }
3716 ]
3717 );
3718
3719 assert_eq!(
3720 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3721 &[
3722 DiagnosticEntry {
3723 range: Point::new(1, 8)..Point::new(1, 9),
3724 diagnostic: Diagnostic {
3725 severity: DiagnosticSeverity::WARNING,
3726 message: "error 1".to_string(),
3727 group_id: 1,
3728 is_primary: true,
3729 ..Default::default()
3730 }
3731 },
3732 DiagnosticEntry {
3733 range: Point::new(1, 8)..Point::new(1, 9),
3734 diagnostic: Diagnostic {
3735 severity: DiagnosticSeverity::HINT,
3736 message: "error 1 hint 1".to_string(),
3737 group_id: 1,
3738 is_primary: false,
3739 ..Default::default()
3740 }
3741 },
3742 ]
3743 );
3744}
3745
3746#[gpui::test]
3747async fn test_rename(cx: &mut gpui::TestAppContext) {
3748 init_test(cx);
3749
3750 let fs = FakeFs::new(cx.executor());
3751 fs.insert_tree(
3752 "/dir",
3753 json!({
3754 "one.rs": "const ONE: usize = 1;",
3755 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3756 }),
3757 )
3758 .await;
3759
3760 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3761
3762 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3763 language_registry.add(rust_lang());
3764 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3765 "Rust",
3766 FakeLspAdapter {
3767 capabilities: lsp::ServerCapabilities {
3768 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3769 prepare_provider: Some(true),
3770 work_done_progress_options: Default::default(),
3771 })),
3772 ..Default::default()
3773 },
3774 ..Default::default()
3775 },
3776 );
3777
3778 let buffer = project
3779 .update(cx, |project, cx| {
3780 project.open_local_buffer("/dir/one.rs", cx)
3781 })
3782 .await
3783 .unwrap();
3784
3785 let fake_server = fake_servers.next().await.unwrap();
3786
3787 let response = project.update(cx, |project, cx| {
3788 project.prepare_rename(buffer.clone(), 7, cx)
3789 });
3790 fake_server
3791 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3792 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3793 assert_eq!(params.position, lsp::Position::new(0, 7));
3794 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3795 lsp::Position::new(0, 6),
3796 lsp::Position::new(0, 9),
3797 ))))
3798 })
3799 .next()
3800 .await
3801 .unwrap();
3802 let range = response.await.unwrap().unwrap();
3803 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3804 assert_eq!(range, 6..9);
3805
3806 let response = project.update(cx, |project, cx| {
3807 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3808 });
3809 fake_server
3810 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3811 assert_eq!(
3812 params.text_document_position.text_document.uri.as_str(),
3813 "file:///dir/one.rs"
3814 );
3815 assert_eq!(
3816 params.text_document_position.position,
3817 lsp::Position::new(0, 7)
3818 );
3819 assert_eq!(params.new_name, "THREE");
3820 Ok(Some(lsp::WorkspaceEdit {
3821 changes: Some(
3822 [
3823 (
3824 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3825 vec![lsp::TextEdit::new(
3826 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3827 "THREE".to_string(),
3828 )],
3829 ),
3830 (
3831 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3832 vec![
3833 lsp::TextEdit::new(
3834 lsp::Range::new(
3835 lsp::Position::new(0, 24),
3836 lsp::Position::new(0, 27),
3837 ),
3838 "THREE".to_string(),
3839 ),
3840 lsp::TextEdit::new(
3841 lsp::Range::new(
3842 lsp::Position::new(0, 35),
3843 lsp::Position::new(0, 38),
3844 ),
3845 "THREE".to_string(),
3846 ),
3847 ],
3848 ),
3849 ]
3850 .into_iter()
3851 .collect(),
3852 ),
3853 ..Default::default()
3854 }))
3855 })
3856 .next()
3857 .await
3858 .unwrap();
3859 let mut transaction = response.await.unwrap().0;
3860 assert_eq!(transaction.len(), 2);
3861 assert_eq!(
3862 transaction
3863 .remove_entry(&buffer)
3864 .unwrap()
3865 .0
3866 .update(cx, |buffer, _| buffer.text()),
3867 "const THREE: usize = 1;"
3868 );
3869 assert_eq!(
3870 transaction
3871 .into_keys()
3872 .next()
3873 .unwrap()
3874 .update(cx, |buffer, _| buffer.text()),
3875 "const TWO: usize = one::THREE + one::THREE;"
3876 );
3877}
3878
3879#[gpui::test]
3880async fn test_search(cx: &mut gpui::TestAppContext) {
3881 init_test(cx);
3882
3883 let fs = FakeFs::new(cx.executor());
3884 fs.insert_tree(
3885 "/dir",
3886 json!({
3887 "one.rs": "const ONE: usize = 1;",
3888 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3889 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3890 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3891 }),
3892 )
3893 .await;
3894 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3895 assert_eq!(
3896 search(
3897 &project,
3898 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3899 cx
3900 )
3901 .await
3902 .unwrap(),
3903 HashMap::from_iter([
3904 ("dir/two.rs".to_string(), vec![6..9]),
3905 ("dir/three.rs".to_string(), vec![37..40])
3906 ])
3907 );
3908
3909 let buffer_4 = project
3910 .update(cx, |project, cx| {
3911 project.open_local_buffer("/dir/four.rs", cx)
3912 })
3913 .await
3914 .unwrap();
3915 buffer_4.update(cx, |buffer, cx| {
3916 let text = "two::TWO";
3917 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3918 });
3919
3920 assert_eq!(
3921 search(
3922 &project,
3923 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3924 cx
3925 )
3926 .await
3927 .unwrap(),
3928 HashMap::from_iter([
3929 ("dir/two.rs".to_string(), vec![6..9]),
3930 ("dir/three.rs".to_string(), vec![37..40]),
3931 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3932 ])
3933 );
3934}
3935
3936#[gpui::test]
3937async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3938 init_test(cx);
3939
3940 let search_query = "file";
3941
3942 let fs = FakeFs::new(cx.executor());
3943 fs.insert_tree(
3944 "/dir",
3945 json!({
3946 "one.rs": r#"// Rust file one"#,
3947 "one.ts": r#"// TypeScript file one"#,
3948 "two.rs": r#"// Rust file two"#,
3949 "two.ts": r#"// TypeScript file two"#,
3950 }),
3951 )
3952 .await;
3953 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3954
3955 assert!(
3956 search(
3957 &project,
3958 SearchQuery::text(
3959 search_query,
3960 false,
3961 true,
3962 false,
3963 vec![PathMatcher::new("*.odd").unwrap()],
3964 Vec::new()
3965 )
3966 .unwrap(),
3967 cx
3968 )
3969 .await
3970 .unwrap()
3971 .is_empty(),
3972 "If no inclusions match, no files should be returned"
3973 );
3974
3975 assert_eq!(
3976 search(
3977 &project,
3978 SearchQuery::text(
3979 search_query,
3980 false,
3981 true,
3982 false,
3983 vec![PathMatcher::new("*.rs").unwrap()],
3984 Vec::new()
3985 )
3986 .unwrap(),
3987 cx
3988 )
3989 .await
3990 .unwrap(),
3991 HashMap::from_iter([
3992 ("dir/one.rs".to_string(), vec![8..12]),
3993 ("dir/two.rs".to_string(), vec![8..12]),
3994 ]),
3995 "Rust only search should give only Rust files"
3996 );
3997
3998 assert_eq!(
3999 search(
4000 &project,
4001 SearchQuery::text(
4002 search_query,
4003 false,
4004 true,
4005 false,
4006 vec![
4007 PathMatcher::new("*.ts").unwrap(),
4008 PathMatcher::new("*.odd").unwrap(),
4009 ],
4010 Vec::new()
4011 ).unwrap(),
4012 cx
4013 )
4014 .await
4015 .unwrap(),
4016 HashMap::from_iter([
4017 ("dir/one.ts".to_string(), vec![14..18]),
4018 ("dir/two.ts".to_string(), vec![14..18]),
4019 ]),
4020 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4021 );
4022
4023 assert_eq!(
4024 search(
4025 &project,
4026 SearchQuery::text(
4027 search_query,
4028 false,
4029 true,
4030 false,
4031 vec![
4032 PathMatcher::new("*.rs").unwrap(),
4033 PathMatcher::new("*.ts").unwrap(),
4034 PathMatcher::new("*.odd").unwrap(),
4035 ],
4036 Vec::new()
4037 ).unwrap(),
4038 cx
4039 )
4040 .await
4041 .unwrap(),
4042 HashMap::from_iter([
4043 ("dir/two.ts".to_string(), vec![14..18]),
4044 ("dir/one.rs".to_string(), vec![8..12]),
4045 ("dir/one.ts".to_string(), vec![14..18]),
4046 ("dir/two.rs".to_string(), vec![8..12]),
4047 ]),
4048 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4049 );
4050}
4051
4052#[gpui::test]
4053async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4054 init_test(cx);
4055
4056 let search_query = "file";
4057
4058 let fs = FakeFs::new(cx.executor());
4059 fs.insert_tree(
4060 "/dir",
4061 json!({
4062 "one.rs": r#"// Rust file one"#,
4063 "one.ts": r#"// TypeScript file one"#,
4064 "two.rs": r#"// Rust file two"#,
4065 "two.ts": r#"// TypeScript file two"#,
4066 }),
4067 )
4068 .await;
4069 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4070
4071 assert_eq!(
4072 search(
4073 &project,
4074 SearchQuery::text(
4075 search_query,
4076 false,
4077 true,
4078 false,
4079 Vec::new(),
4080 vec![PathMatcher::new("*.odd").unwrap()],
4081 )
4082 .unwrap(),
4083 cx
4084 )
4085 .await
4086 .unwrap(),
4087 HashMap::from_iter([
4088 ("dir/one.rs".to_string(), vec![8..12]),
4089 ("dir/one.ts".to_string(), vec![14..18]),
4090 ("dir/two.rs".to_string(), vec![8..12]),
4091 ("dir/two.ts".to_string(), vec![14..18]),
4092 ]),
4093 "If no exclusions match, all files should be returned"
4094 );
4095
4096 assert_eq!(
4097 search(
4098 &project,
4099 SearchQuery::text(
4100 search_query,
4101 false,
4102 true,
4103 false,
4104 Vec::new(),
4105 vec![PathMatcher::new("*.rs").unwrap()],
4106 )
4107 .unwrap(),
4108 cx
4109 )
4110 .await
4111 .unwrap(),
4112 HashMap::from_iter([
4113 ("dir/one.ts".to_string(), vec![14..18]),
4114 ("dir/two.ts".to_string(), vec![14..18]),
4115 ]),
4116 "Rust exclusion search should give only TypeScript files"
4117 );
4118
4119 assert_eq!(
4120 search(
4121 &project,
4122 SearchQuery::text(
4123 search_query,
4124 false,
4125 true,
4126 false,
4127 Vec::new(),
4128 vec![
4129 PathMatcher::new("*.ts").unwrap(),
4130 PathMatcher::new("*.odd").unwrap(),
4131 ],
4132 ).unwrap(),
4133 cx
4134 )
4135 .await
4136 .unwrap(),
4137 HashMap::from_iter([
4138 ("dir/one.rs".to_string(), vec![8..12]),
4139 ("dir/two.rs".to_string(), vec![8..12]),
4140 ]),
4141 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4142 );
4143
4144 assert!(
4145 search(
4146 &project,
4147 SearchQuery::text(
4148 search_query,
4149 false,
4150 true,
4151 false,
4152 Vec::new(),
4153 vec![
4154 PathMatcher::new("*.rs").unwrap(),
4155 PathMatcher::new("*.ts").unwrap(),
4156 PathMatcher::new("*.odd").unwrap(),
4157 ],
4158 ).unwrap(),
4159 cx
4160 )
4161 .await
4162 .unwrap().is_empty(),
4163 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4164 );
4165}
4166
4167#[gpui::test]
4168async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4169 init_test(cx);
4170
4171 let search_query = "file";
4172
4173 let fs = FakeFs::new(cx.executor());
4174 fs.insert_tree(
4175 "/dir",
4176 json!({
4177 "one.rs": r#"// Rust file one"#,
4178 "one.ts": r#"// TypeScript file one"#,
4179 "two.rs": r#"// Rust file two"#,
4180 "two.ts": r#"// TypeScript file two"#,
4181 }),
4182 )
4183 .await;
4184 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4185
4186 assert!(
4187 search(
4188 &project,
4189 SearchQuery::text(
4190 search_query,
4191 false,
4192 true,
4193 false,
4194 vec![PathMatcher::new("*.odd").unwrap()],
4195 vec![PathMatcher::new("*.odd").unwrap()],
4196 )
4197 .unwrap(),
4198 cx
4199 )
4200 .await
4201 .unwrap()
4202 .is_empty(),
4203 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4204 );
4205
4206 assert!(
4207 search(
4208 &project,
4209 SearchQuery::text(
4210 search_query,
4211 false,
4212 true,
4213 false,
4214 vec![PathMatcher::new("*.ts").unwrap()],
4215 vec![PathMatcher::new("*.ts").unwrap()],
4216 ).unwrap(),
4217 cx
4218 )
4219 .await
4220 .unwrap()
4221 .is_empty(),
4222 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4223 );
4224
4225 assert!(
4226 search(
4227 &project,
4228 SearchQuery::text(
4229 search_query,
4230 false,
4231 true,
4232 false,
4233 vec![
4234 PathMatcher::new("*.ts").unwrap(),
4235 PathMatcher::new("*.odd").unwrap()
4236 ],
4237 vec![
4238 PathMatcher::new("*.ts").unwrap(),
4239 PathMatcher::new("*.odd").unwrap()
4240 ],
4241 )
4242 .unwrap(),
4243 cx
4244 )
4245 .await
4246 .unwrap()
4247 .is_empty(),
4248 "Non-matching inclusions and exclusions should not change that."
4249 );
4250
4251 assert_eq!(
4252 search(
4253 &project,
4254 SearchQuery::text(
4255 search_query,
4256 false,
4257 true,
4258 false,
4259 vec![
4260 PathMatcher::new("*.ts").unwrap(),
4261 PathMatcher::new("*.odd").unwrap()
4262 ],
4263 vec![
4264 PathMatcher::new("*.rs").unwrap(),
4265 PathMatcher::new("*.odd").unwrap()
4266 ],
4267 )
4268 .unwrap(),
4269 cx
4270 )
4271 .await
4272 .unwrap(),
4273 HashMap::from_iter([
4274 ("dir/one.ts".to_string(), vec![14..18]),
4275 ("dir/two.ts".to_string(), vec![14..18]),
4276 ]),
4277 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4278 );
4279}
4280
4281#[gpui::test]
4282async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4283 init_test(cx);
4284
4285 let fs = FakeFs::new(cx.executor());
4286 fs.insert_tree(
4287 "/worktree-a",
4288 json!({
4289 "haystack.rs": r#"// NEEDLE"#,
4290 "haystack.ts": r#"// NEEDLE"#,
4291 }),
4292 )
4293 .await;
4294 fs.insert_tree(
4295 "/worktree-b",
4296 json!({
4297 "haystack.rs": r#"// NEEDLE"#,
4298 "haystack.ts": r#"// NEEDLE"#,
4299 }),
4300 )
4301 .await;
4302
4303 let project = Project::test(
4304 fs.clone(),
4305 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4306 cx,
4307 )
4308 .await;
4309
4310 assert_eq!(
4311 search(
4312 &project,
4313 SearchQuery::text(
4314 "NEEDLE",
4315 false,
4316 true,
4317 false,
4318 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4319 Vec::new()
4320 )
4321 .unwrap(),
4322 cx
4323 )
4324 .await
4325 .unwrap(),
4326 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4327 "should only return results from included worktree"
4328 );
4329 assert_eq!(
4330 search(
4331 &project,
4332 SearchQuery::text(
4333 "NEEDLE",
4334 false,
4335 true,
4336 false,
4337 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4338 Vec::new()
4339 )
4340 .unwrap(),
4341 cx
4342 )
4343 .await
4344 .unwrap(),
4345 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4346 "should only return results from included worktree"
4347 );
4348
4349 assert_eq!(
4350 search(
4351 &project,
4352 SearchQuery::text(
4353 "NEEDLE",
4354 false,
4355 true,
4356 false,
4357 vec![PathMatcher::new("*.ts").unwrap()],
4358 Vec::new()
4359 )
4360 .unwrap(),
4361 cx
4362 )
4363 .await
4364 .unwrap(),
4365 HashMap::from_iter([
4366 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4367 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4368 ]),
4369 "should return results from both worktrees"
4370 );
4371}
4372
4373#[gpui::test]
4374async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4375 init_test(cx);
4376
4377 let fs = FakeFs::new(cx.background_executor.clone());
4378 fs.insert_tree(
4379 "/dir",
4380 json!({
4381 ".git": {},
4382 ".gitignore": "**/target\n/node_modules\n",
4383 "target": {
4384 "index.txt": "index_key:index_value"
4385 },
4386 "node_modules": {
4387 "eslint": {
4388 "index.ts": "const eslint_key = 'eslint value'",
4389 "package.json": r#"{ "some_key": "some value" }"#,
4390 },
4391 "prettier": {
4392 "index.ts": "const prettier_key = 'prettier value'",
4393 "package.json": r#"{ "other_key": "other value" }"#,
4394 },
4395 },
4396 "package.json": r#"{ "main_key": "main value" }"#,
4397 }),
4398 )
4399 .await;
4400 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4401
4402 let query = "key";
4403 assert_eq!(
4404 search(
4405 &project,
4406 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4407 cx
4408 )
4409 .await
4410 .unwrap(),
4411 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4412 "Only one non-ignored file should have the query"
4413 );
4414
4415 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4416 assert_eq!(
4417 search(
4418 &project,
4419 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4420 cx
4421 )
4422 .await
4423 .unwrap(),
4424 HashMap::from_iter([
4425 ("dir/package.json".to_string(), vec![8..11]),
4426 ("dir/target/index.txt".to_string(), vec![6..9]),
4427 (
4428 "dir/node_modules/prettier/package.json".to_string(),
4429 vec![9..12]
4430 ),
4431 (
4432 "dir/node_modules/prettier/index.ts".to_string(),
4433 vec![15..18]
4434 ),
4435 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4436 (
4437 "dir/node_modules/eslint/package.json".to_string(),
4438 vec![8..11]
4439 ),
4440 ]),
4441 "Unrestricted search with ignored directories should find every file with the query"
4442 );
4443
4444 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4445 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4446 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4447 assert_eq!(
4448 search(
4449 &project,
4450 SearchQuery::text(
4451 query,
4452 false,
4453 false,
4454 true,
4455 files_to_include,
4456 files_to_exclude,
4457 )
4458 .unwrap(),
4459 cx
4460 )
4461 .await
4462 .unwrap(),
4463 HashMap::from_iter([(
4464 "dir/node_modules/prettier/package.json".to_string(),
4465 vec![9..12]
4466 )]),
4467 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4468 );
4469}
4470
4471#[test]
4472fn test_glob_literal_prefix() {
4473 assert_eq!(glob_literal_prefix("**/*.js"), "");
4474 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4475 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4476 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4477}
4478
4479#[gpui::test]
4480async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4481 init_test(cx);
4482
4483 let fs = FakeFs::new(cx.executor().clone());
4484 fs.insert_tree(
4485 "/one/two",
4486 json!({
4487 "three": {
4488 "a.txt": "",
4489 "four": {}
4490 },
4491 "c.rs": ""
4492 }),
4493 )
4494 .await;
4495
4496 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4497 project
4498 .update(cx, |project, cx| {
4499 let id = project.worktrees().next().unwrap().read(cx).id();
4500 project.create_entry((id, "b.."), true, cx)
4501 })
4502 .unwrap()
4503 .await
4504 .unwrap();
4505
4506 // Can't create paths outside the project
4507 let result = project
4508 .update(cx, |project, cx| {
4509 let id = project.worktrees().next().unwrap().read(cx).id();
4510 project.create_entry((id, "../../boop"), true, cx)
4511 })
4512 .await;
4513 assert!(result.is_err());
4514
4515 // Can't create paths with '..'
4516 let result = project
4517 .update(cx, |project, cx| {
4518 let id = project.worktrees().next().unwrap().read(cx).id();
4519 project.create_entry((id, "four/../beep"), true, cx)
4520 })
4521 .await;
4522 assert!(result.is_err());
4523
4524 assert_eq!(
4525 fs.paths(true),
4526 vec![
4527 PathBuf::from("/"),
4528 PathBuf::from("/one"),
4529 PathBuf::from("/one/two"),
4530 PathBuf::from("/one/two/c.rs"),
4531 PathBuf::from("/one/two/three"),
4532 PathBuf::from("/one/two/three/a.txt"),
4533 PathBuf::from("/one/two/three/b.."),
4534 PathBuf::from("/one/two/three/four"),
4535 ]
4536 );
4537
4538 // And we cannot open buffers with '..'
4539 let result = project
4540 .update(cx, |project, cx| {
4541 let id = project.worktrees().next().unwrap().read(cx).id();
4542 project.open_buffer((id, "../c.rs"), cx)
4543 })
4544 .await;
4545 assert!(result.is_err())
4546}
4547
4548#[gpui::test]
4549async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4550 init_test(cx);
4551
4552 let fs = FakeFs::new(cx.executor());
4553 fs.insert_tree(
4554 "/dir",
4555 json!({
4556 "a.tsx": "a",
4557 }),
4558 )
4559 .await;
4560
4561 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4562
4563 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4564 language_registry.add(tsx_lang());
4565 let language_server_names = [
4566 "TypeScriptServer",
4567 "TailwindServer",
4568 "ESLintServer",
4569 "NoHoverCapabilitiesServer",
4570 ];
4571 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4572 "tsx",
4573 true,
4574 FakeLspAdapter {
4575 name: &language_server_names[0],
4576 capabilities: lsp::ServerCapabilities {
4577 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4578 ..lsp::ServerCapabilities::default()
4579 },
4580 ..FakeLspAdapter::default()
4581 },
4582 );
4583 let _a = language_registry.register_specific_fake_lsp_adapter(
4584 "tsx",
4585 false,
4586 FakeLspAdapter {
4587 name: &language_server_names[1],
4588 capabilities: lsp::ServerCapabilities {
4589 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4590 ..lsp::ServerCapabilities::default()
4591 },
4592 ..FakeLspAdapter::default()
4593 },
4594 );
4595 let _b = language_registry.register_specific_fake_lsp_adapter(
4596 "tsx",
4597 false,
4598 FakeLspAdapter {
4599 name: &language_server_names[2],
4600 capabilities: lsp::ServerCapabilities {
4601 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4602 ..lsp::ServerCapabilities::default()
4603 },
4604 ..FakeLspAdapter::default()
4605 },
4606 );
4607 let _c = language_registry.register_specific_fake_lsp_adapter(
4608 "tsx",
4609 false,
4610 FakeLspAdapter {
4611 name: &language_server_names[3],
4612 capabilities: lsp::ServerCapabilities {
4613 hover_provider: None,
4614 ..lsp::ServerCapabilities::default()
4615 },
4616 ..FakeLspAdapter::default()
4617 },
4618 );
4619
4620 let buffer = project
4621 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4622 .await
4623 .unwrap();
4624 cx.executor().run_until_parked();
4625
4626 let mut servers_with_hover_requests = HashMap::default();
4627 for i in 0..language_server_names.len() {
4628 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4629 panic!(
4630 "Failed to get language server #{i} with name {}",
4631 &language_server_names[i]
4632 )
4633 });
4634 let new_server_name = new_server.server.name();
4635 assert!(
4636 !servers_with_hover_requests.contains_key(new_server_name),
4637 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4638 );
4639 let new_server_name = new_server_name.to_string();
4640 match new_server_name.as_str() {
4641 "TailwindServer" | "TypeScriptServer" => {
4642 servers_with_hover_requests.insert(
4643 new_server_name.clone(),
4644 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4645 let name = new_server_name.clone();
4646 async move {
4647 Ok(Some(lsp::Hover {
4648 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4649 format!("{name} hover"),
4650 )),
4651 range: None,
4652 }))
4653 }
4654 }),
4655 );
4656 }
4657 "ESLintServer" => {
4658 servers_with_hover_requests.insert(
4659 new_server_name,
4660 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4661 |_, _| async move { Ok(None) },
4662 ),
4663 );
4664 }
4665 "NoHoverCapabilitiesServer" => {
4666 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4667 |_, _| async move {
4668 panic!(
4669 "Should not call for hovers server with no corresponding capabilities"
4670 )
4671 },
4672 );
4673 }
4674 unexpected => panic!("Unexpected server name: {unexpected}"),
4675 }
4676 }
4677
4678 let hover_task = project.update(cx, |project, cx| {
4679 project.hover(&buffer, Point::new(0, 0), cx)
4680 });
4681 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4682 |mut hover_request| async move {
4683 hover_request
4684 .next()
4685 .await
4686 .expect("All hover requests should have been triggered")
4687 },
4688 ))
4689 .await;
4690 assert_eq!(
4691 vec!["TailwindServer hover", "TypeScriptServer hover"],
4692 hover_task
4693 .await
4694 .into_iter()
4695 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4696 .sorted()
4697 .collect::<Vec<_>>(),
4698 "Should receive hover responses from all related servers with hover capabilities"
4699 );
4700}
4701
4702#[gpui::test]
4703async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4704 init_test(cx);
4705
4706 let fs = FakeFs::new(cx.executor());
4707 fs.insert_tree(
4708 "/dir",
4709 json!({
4710 "a.ts": "a",
4711 }),
4712 )
4713 .await;
4714
4715 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4716
4717 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4718 language_registry.add(typescript_lang());
4719 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4720 "TypeScript",
4721 FakeLspAdapter {
4722 capabilities: lsp::ServerCapabilities {
4723 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4724 ..lsp::ServerCapabilities::default()
4725 },
4726 ..FakeLspAdapter::default()
4727 },
4728 );
4729
4730 let buffer = project
4731 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4732 .await
4733 .unwrap();
4734 cx.executor().run_until_parked();
4735
4736 let fake_server = fake_language_servers
4737 .next()
4738 .await
4739 .expect("failed to get the language server");
4740
4741 let mut request_handled =
4742 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4743 Ok(Some(lsp::Hover {
4744 contents: lsp::HoverContents::Array(vec![
4745 lsp::MarkedString::String("".to_string()),
4746 lsp::MarkedString::String(" ".to_string()),
4747 lsp::MarkedString::String("\n\n\n".to_string()),
4748 ]),
4749 range: None,
4750 }))
4751 });
4752
4753 let hover_task = project.update(cx, |project, cx| {
4754 project.hover(&buffer, Point::new(0, 0), cx)
4755 });
4756 let () = request_handled
4757 .next()
4758 .await
4759 .expect("All hover requests should have been triggered");
4760 assert_eq!(
4761 Vec::<String>::new(),
4762 hover_task
4763 .await
4764 .into_iter()
4765 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4766 .sorted()
4767 .collect::<Vec<_>>(),
4768 "Empty hover parts should be ignored"
4769 );
4770}
4771
4772#[gpui::test]
4773async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4774 init_test(cx);
4775
4776 let fs = FakeFs::new(cx.executor());
4777 fs.insert_tree(
4778 "/dir",
4779 json!({
4780 "a.tsx": "a",
4781 }),
4782 )
4783 .await;
4784
4785 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4786
4787 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4788 language_registry.add(tsx_lang());
4789 let language_server_names = [
4790 "TypeScriptServer",
4791 "TailwindServer",
4792 "ESLintServer",
4793 "NoActionsCapabilitiesServer",
4794 ];
4795 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4796 "tsx",
4797 true,
4798 FakeLspAdapter {
4799 name: &language_server_names[0],
4800 capabilities: lsp::ServerCapabilities {
4801 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4802 ..lsp::ServerCapabilities::default()
4803 },
4804 ..FakeLspAdapter::default()
4805 },
4806 );
4807 let _a = language_registry.register_specific_fake_lsp_adapter(
4808 "tsx",
4809 false,
4810 FakeLspAdapter {
4811 name: &language_server_names[1],
4812 capabilities: lsp::ServerCapabilities {
4813 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4814 ..lsp::ServerCapabilities::default()
4815 },
4816 ..FakeLspAdapter::default()
4817 },
4818 );
4819 let _b = language_registry.register_specific_fake_lsp_adapter(
4820 "tsx",
4821 false,
4822 FakeLspAdapter {
4823 name: &language_server_names[2],
4824 capabilities: lsp::ServerCapabilities {
4825 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4826 ..lsp::ServerCapabilities::default()
4827 },
4828 ..FakeLspAdapter::default()
4829 },
4830 );
4831 let _c = language_registry.register_specific_fake_lsp_adapter(
4832 "tsx",
4833 false,
4834 FakeLspAdapter {
4835 name: &language_server_names[3],
4836 capabilities: lsp::ServerCapabilities {
4837 code_action_provider: None,
4838 ..lsp::ServerCapabilities::default()
4839 },
4840 ..FakeLspAdapter::default()
4841 },
4842 );
4843
4844 let buffer = project
4845 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4846 .await
4847 .unwrap();
4848 cx.executor().run_until_parked();
4849
4850 let mut servers_with_actions_requests = HashMap::default();
4851 for i in 0..language_server_names.len() {
4852 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4853 panic!(
4854 "Failed to get language server #{i} with name {}",
4855 &language_server_names[i]
4856 )
4857 });
4858 let new_server_name = new_server.server.name();
4859 assert!(
4860 !servers_with_actions_requests.contains_key(new_server_name),
4861 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4862 );
4863 let new_server_name = new_server_name.to_string();
4864 match new_server_name.as_str() {
4865 "TailwindServer" | "TypeScriptServer" => {
4866 servers_with_actions_requests.insert(
4867 new_server_name.clone(),
4868 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4869 move |_, _| {
4870 let name = new_server_name.clone();
4871 async move {
4872 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4873 lsp::CodeAction {
4874 title: format!("{name} code action"),
4875 ..lsp::CodeAction::default()
4876 },
4877 )]))
4878 }
4879 },
4880 ),
4881 );
4882 }
4883 "ESLintServer" => {
4884 servers_with_actions_requests.insert(
4885 new_server_name,
4886 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4887 |_, _| async move { Ok(None) },
4888 ),
4889 );
4890 }
4891 "NoActionsCapabilitiesServer" => {
4892 let _never_handled = new_server
4893 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4894 panic!(
4895 "Should not call for code actions server with no corresponding capabilities"
4896 )
4897 });
4898 }
4899 unexpected => panic!("Unexpected server name: {unexpected}"),
4900 }
4901 }
4902
4903 let code_actions_task = project.update(cx, |project, cx| {
4904 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4905 });
4906 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4907 |mut code_actions_request| async move {
4908 code_actions_request
4909 .next()
4910 .await
4911 .expect("All code actions requests should have been triggered")
4912 },
4913 ))
4914 .await;
4915 assert_eq!(
4916 vec!["TailwindServer code action", "TypeScriptServer code action"],
4917 code_actions_task
4918 .await
4919 .into_iter()
4920 .map(|code_action| code_action.lsp_action.title)
4921 .sorted()
4922 .collect::<Vec<_>>(),
4923 "Should receive code actions responses from all related servers with hover capabilities"
4924 );
4925}
4926
4927async fn search(
4928 project: &Model<Project>,
4929 query: SearchQuery,
4930 cx: &mut gpui::TestAppContext,
4931) -> Result<HashMap<String, Vec<Range<usize>>>> {
4932 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4933 let mut results = HashMap::default();
4934 while let Some(search_result) = search_rx.next().await {
4935 match search_result {
4936 SearchResult::Buffer { buffer, ranges } => {
4937 results.entry(buffer).or_insert(ranges);
4938 }
4939 SearchResult::LimitReached => {}
4940 }
4941 }
4942 Ok(results
4943 .into_iter()
4944 .map(|(buffer, ranges)| {
4945 buffer.update(cx, |buffer, cx| {
4946 let path = buffer
4947 .file()
4948 .unwrap()
4949 .full_path(cx)
4950 .to_string_lossy()
4951 .to_string();
4952 let ranges = ranges
4953 .into_iter()
4954 .map(|range| range.to_offset(buffer))
4955 .collect::<Vec<_>>();
4956 (path, ranges)
4957 })
4958 })
4959 .collect())
4960}
4961
4962fn init_test(cx: &mut gpui::TestAppContext) {
4963 if std::env::var("RUST_LOG").is_ok() {
4964 env_logger::try_init().ok();
4965 }
4966
4967 cx.update(|cx| {
4968 let settings_store = SettingsStore::test(cx);
4969 cx.set_global(settings_store);
4970 release_channel::init("0.0.0", cx);
4971 language::init(cx);
4972 Project::init_settings(cx);
4973 });
4974}
4975
4976fn json_lang() -> Arc<Language> {
4977 Arc::new(Language::new(
4978 LanguageConfig {
4979 name: "JSON".into(),
4980 matcher: LanguageMatcher {
4981 path_suffixes: vec!["json".to_string()],
4982 ..Default::default()
4983 },
4984 ..Default::default()
4985 },
4986 None,
4987 ))
4988}
4989
4990fn js_lang() -> Arc<Language> {
4991 Arc::new(Language::new(
4992 LanguageConfig {
4993 name: Arc::from("JavaScript"),
4994 matcher: LanguageMatcher {
4995 path_suffixes: vec!["js".to_string()],
4996 ..Default::default()
4997 },
4998 ..Default::default()
4999 },
5000 None,
5001 ))
5002}
5003
5004fn rust_lang() -> Arc<Language> {
5005 Arc::new(Language::new(
5006 LanguageConfig {
5007 name: "Rust".into(),
5008 matcher: LanguageMatcher {
5009 path_suffixes: vec!["rs".to_string()],
5010 ..Default::default()
5011 },
5012 ..Default::default()
5013 },
5014 Some(tree_sitter_rust::language()),
5015 ))
5016}
5017
5018fn typescript_lang() -> Arc<Language> {
5019 Arc::new(Language::new(
5020 LanguageConfig {
5021 name: "TypeScript".into(),
5022 matcher: LanguageMatcher {
5023 path_suffixes: vec!["ts".to_string()],
5024 ..Default::default()
5025 },
5026 ..Default::default()
5027 },
5028 Some(tree_sitter_typescript::language_typescript()),
5029 ))
5030}
5031
5032fn tsx_lang() -> Arc<Language> {
5033 Arc::new(Language::new(
5034 LanguageConfig {
5035 name: "tsx".into(),
5036 matcher: LanguageMatcher {
5037 path_suffixes: vec!["tsx".to_string()],
5038 ..Default::default()
5039 },
5040 ..Default::default()
5041 },
5042 Some(tree_sitter_typescript::language_tsx()),
5043 ))
5044}