1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, UpdateGlobal};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{TaskContext, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20use worktree::WorktreeModelHandle as _;
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/Users").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees().next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let workree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees().next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: workree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree",
141 };
142 cx.update(|cx| {
143 let tree = worktree.read(cx);
144
145 let settings_a = language_settings(
146 None,
147 Some(
148 &(File::for_entry(
149 tree.entry_for_path("a/a.rs").unwrap().clone(),
150 worktree.clone(),
151 ) as _),
152 ),
153 cx,
154 );
155 let settings_b = language_settings(
156 None,
157 Some(
158 &(File::for_entry(
159 tree.entry_for_path("b/b.rs").unwrap().clone(),
160 worktree.clone(),
161 ) as _),
162 ),
163 cx,
164 );
165
166 assert_eq!(settings_a.tab_size.get(), 8);
167 assert_eq!(settings_b.tab_size.get(), 2);
168
169 let all_tasks = project
170 .update(cx, |project, cx| {
171 project.task_inventory().update(cx, |inventory, _| {
172 let (mut old, new) = inventory.used_and_current_resolved_tasks(
173 None,
174 Some(workree_id),
175 &task_context,
176 );
177 old.extend(new);
178 old
179 })
180 })
181 .into_iter()
182 .map(|(source_kind, task)| {
183 let resolved = task.resolved.unwrap();
184 (
185 source_kind,
186 task.resolved_label,
187 resolved.args,
188 resolved.env,
189 )
190 })
191 .collect::<Vec<_>>();
192 assert_eq!(
193 all_tasks,
194 vec![
195 (
196 global_task_source_kind.clone(),
197 "cargo check".to_string(),
198 vec!["check".to_string(), "--all".to_string()],
199 HashMap::default(),
200 ),
201 (
202 TaskSourceKind::Worktree {
203 id: workree_id,
204 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
205 id_base: "local_tasks_for_worktree",
206 },
207 "cargo check".to_string(),
208 vec!["check".to_string()],
209 HashMap::default(),
210 ),
211 ]
212 );
213 });
214
215 project.update(cx, |project, cx| {
216 let inventory = project.task_inventory();
217 inventory.update(cx, |inventory, _| {
218 let (mut old, new) =
219 inventory.used_and_current_resolved_tasks(None, Some(workree_id), &task_context);
220 old.extend(new);
221 let (_, resolved_task) = old
222 .into_iter()
223 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
224 .expect("should have one global task");
225 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
226 })
227 });
228
229 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
230 label: "cargo check".to_string(),
231 command: "cargo".to_string(),
232 args: vec![
233 "check".to_string(),
234 "--all".to_string(),
235 "--all-targets".to_string(),
236 ],
237 env: HashMap::from_iter(Some((
238 "RUSTFLAGS".to_string(),
239 "-Zunstable-options".to_string(),
240 ))),
241 ..TaskTemplate::default()
242 }]))
243 .unwrap();
244 let (tx, rx) = futures::channel::mpsc::unbounded();
245 cx.update(|cx| {
246 project.update(cx, |project, cx| {
247 project.task_inventory().update(cx, |inventory, cx| {
248 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
249 inventory.add_source(
250 global_task_source_kind.clone(),
251 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
252 cx,
253 );
254 });
255 })
256 });
257 tx.unbounded_send(tasks).unwrap();
258
259 cx.run_until_parked();
260 cx.update(|cx| {
261 let all_tasks = project
262 .update(cx, |project, cx| {
263 project.task_inventory().update(cx, |inventory, _| {
264 let (mut old, new) = inventory.used_and_current_resolved_tasks(
265 None,
266 Some(workree_id),
267 &task_context,
268 );
269 old.extend(new);
270 old
271 })
272 })
273 .into_iter()
274 .map(|(source_kind, task)| {
275 let resolved = task.resolved.unwrap();
276 (
277 source_kind,
278 task.resolved_label,
279 resolved.args,
280 resolved.env,
281 )
282 })
283 .collect::<Vec<_>>();
284 assert_eq!(
285 all_tasks,
286 vec![
287 (
288 TaskSourceKind::Worktree {
289 id: workree_id,
290 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
291 id_base: "local_tasks_for_worktree",
292 },
293 "cargo check".to_string(),
294 vec![
295 "check".to_string(),
296 "--all".to_string(),
297 "--all-targets".to_string()
298 ],
299 HashMap::from_iter(Some((
300 "RUSTFLAGS".to_string(),
301 "-Zunstable-options".to_string()
302 ))),
303 ),
304 (
305 TaskSourceKind::Worktree {
306 id: workree_id,
307 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
308 id_base: "local_tasks_for_worktree",
309 },
310 "cargo check".to_string(),
311 vec!["check".to_string()],
312 HashMap::default(),
313 ),
314 ]
315 );
316 });
317}
318
319#[gpui::test]
320async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
321 init_test(cx);
322
323 let fs = FakeFs::new(cx.executor());
324 fs.insert_tree(
325 "/the-root",
326 json!({
327 "test.rs": "const A: i32 = 1;",
328 "test2.rs": "",
329 "Cargo.toml": "a = 1",
330 "package.json": "{\"a\": 1}",
331 }),
332 )
333 .await;
334
335 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
336 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
337
338 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
339 "Rust",
340 FakeLspAdapter {
341 name: "the-rust-language-server",
342 capabilities: lsp::ServerCapabilities {
343 completion_provider: Some(lsp::CompletionOptions {
344 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
345 ..Default::default()
346 }),
347 ..Default::default()
348 },
349 ..Default::default()
350 },
351 );
352 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
353 "JSON",
354 FakeLspAdapter {
355 name: "the-json-language-server",
356 capabilities: lsp::ServerCapabilities {
357 completion_provider: Some(lsp::CompletionOptions {
358 trigger_characters: Some(vec![":".to_string()]),
359 ..Default::default()
360 }),
361 ..Default::default()
362 },
363 ..Default::default()
364 },
365 );
366
367 // Open a buffer without an associated language server.
368 let toml_buffer = project
369 .update(cx, |project, cx| {
370 project.open_local_buffer("/the-root/Cargo.toml", cx)
371 })
372 .await
373 .unwrap();
374
375 // Open a buffer with an associated language server before the language for it has been loaded.
376 let rust_buffer = project
377 .update(cx, |project, cx| {
378 project.open_local_buffer("/the-root/test.rs", cx)
379 })
380 .await
381 .unwrap();
382 rust_buffer.update(cx, |buffer, _| {
383 assert_eq!(buffer.language().map(|l| l.name()), None);
384 });
385
386 // Now we add the languages to the project, and ensure they get assigned to all
387 // the relevant open buffers.
388 language_registry.add(json_lang());
389 language_registry.add(rust_lang());
390 cx.executor().run_until_parked();
391 rust_buffer.update(cx, |buffer, _| {
392 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
393 });
394
395 // A server is started up, and it is notified about Rust files.
396 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
397 assert_eq!(
398 fake_rust_server
399 .receive_notification::<lsp::notification::DidOpenTextDocument>()
400 .await
401 .text_document,
402 lsp::TextDocumentItem {
403 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
404 version: 0,
405 text: "const A: i32 = 1;".to_string(),
406 language_id: "rust".to_string(),
407 }
408 );
409
410 // The buffer is configured based on the language server's capabilities.
411 rust_buffer.update(cx, |buffer, _| {
412 assert_eq!(
413 buffer.completion_triggers(),
414 &[".".to_string(), "::".to_string()]
415 );
416 });
417 toml_buffer.update(cx, |buffer, _| {
418 assert!(buffer.completion_triggers().is_empty());
419 });
420
421 // Edit a buffer. The changes are reported to the language server.
422 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
423 assert_eq!(
424 fake_rust_server
425 .receive_notification::<lsp::notification::DidChangeTextDocument>()
426 .await
427 .text_document,
428 lsp::VersionedTextDocumentIdentifier::new(
429 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
430 1
431 )
432 );
433
434 // Open a third buffer with a different associated language server.
435 let json_buffer = project
436 .update(cx, |project, cx| {
437 project.open_local_buffer("/the-root/package.json", cx)
438 })
439 .await
440 .unwrap();
441
442 // A json language server is started up and is only notified about the json buffer.
443 let mut fake_json_server = fake_json_servers.next().await.unwrap();
444 assert_eq!(
445 fake_json_server
446 .receive_notification::<lsp::notification::DidOpenTextDocument>()
447 .await
448 .text_document,
449 lsp::TextDocumentItem {
450 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
451 version: 0,
452 text: "{\"a\": 1}".to_string(),
453 language_id: "json".to_string(),
454 }
455 );
456
457 // This buffer is configured based on the second language server's
458 // capabilities.
459 json_buffer.update(cx, |buffer, _| {
460 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
461 });
462
463 // When opening another buffer whose language server is already running,
464 // it is also configured based on the existing language server's capabilities.
465 let rust_buffer2 = project
466 .update(cx, |project, cx| {
467 project.open_local_buffer("/the-root/test2.rs", cx)
468 })
469 .await
470 .unwrap();
471 rust_buffer2.update(cx, |buffer, _| {
472 assert_eq!(
473 buffer.completion_triggers(),
474 &[".".to_string(), "::".to_string()]
475 );
476 });
477
478 // Changes are reported only to servers matching the buffer's language.
479 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
480 rust_buffer2.update(cx, |buffer, cx| {
481 buffer.edit([(0..0, "let x = 1;")], None, cx)
482 });
483 assert_eq!(
484 fake_rust_server
485 .receive_notification::<lsp::notification::DidChangeTextDocument>()
486 .await
487 .text_document,
488 lsp::VersionedTextDocumentIdentifier::new(
489 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
490 1
491 )
492 );
493
494 // Save notifications are reported to all servers.
495 project
496 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
497 .await
498 .unwrap();
499 assert_eq!(
500 fake_rust_server
501 .receive_notification::<lsp::notification::DidSaveTextDocument>()
502 .await
503 .text_document,
504 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
505 );
506 assert_eq!(
507 fake_json_server
508 .receive_notification::<lsp::notification::DidSaveTextDocument>()
509 .await
510 .text_document,
511 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
512 );
513
514 // Renames are reported only to servers matching the buffer's language.
515 fs.rename(
516 Path::new("/the-root/test2.rs"),
517 Path::new("/the-root/test3.rs"),
518 Default::default(),
519 )
520 .await
521 .unwrap();
522 assert_eq!(
523 fake_rust_server
524 .receive_notification::<lsp::notification::DidCloseTextDocument>()
525 .await
526 .text_document,
527 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
528 );
529 assert_eq!(
530 fake_rust_server
531 .receive_notification::<lsp::notification::DidOpenTextDocument>()
532 .await
533 .text_document,
534 lsp::TextDocumentItem {
535 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
536 version: 0,
537 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
538 language_id: "rust".to_string(),
539 },
540 );
541
542 rust_buffer2.update(cx, |buffer, cx| {
543 buffer.update_diagnostics(
544 LanguageServerId(0),
545 DiagnosticSet::from_sorted_entries(
546 vec![DiagnosticEntry {
547 diagnostic: Default::default(),
548 range: Anchor::MIN..Anchor::MAX,
549 }],
550 &buffer.snapshot(),
551 ),
552 cx,
553 );
554 assert_eq!(
555 buffer
556 .snapshot()
557 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
558 .count(),
559 1
560 );
561 });
562
563 // When the rename changes the extension of the file, the buffer gets closed on the old
564 // language server and gets opened on the new one.
565 fs.rename(
566 Path::new("/the-root/test3.rs"),
567 Path::new("/the-root/test3.json"),
568 Default::default(),
569 )
570 .await
571 .unwrap();
572 assert_eq!(
573 fake_rust_server
574 .receive_notification::<lsp::notification::DidCloseTextDocument>()
575 .await
576 .text_document,
577 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
578 );
579 assert_eq!(
580 fake_json_server
581 .receive_notification::<lsp::notification::DidOpenTextDocument>()
582 .await
583 .text_document,
584 lsp::TextDocumentItem {
585 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
586 version: 0,
587 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
588 language_id: "json".to_string(),
589 },
590 );
591
592 // We clear the diagnostics, since the language has changed.
593 rust_buffer2.update(cx, |buffer, _| {
594 assert_eq!(
595 buffer
596 .snapshot()
597 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
598 .count(),
599 0
600 );
601 });
602
603 // The renamed file's version resets after changing language server.
604 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
605 assert_eq!(
606 fake_json_server
607 .receive_notification::<lsp::notification::DidChangeTextDocument>()
608 .await
609 .text_document,
610 lsp::VersionedTextDocumentIdentifier::new(
611 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
612 1
613 )
614 );
615
616 // Restart language servers
617 project.update(cx, |project, cx| {
618 project.restart_language_servers_for_buffers(
619 vec![rust_buffer.clone(), json_buffer.clone()],
620 cx,
621 );
622 });
623
624 let mut rust_shutdown_requests = fake_rust_server
625 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
626 let mut json_shutdown_requests = fake_json_server
627 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
628 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
629
630 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
631 let mut fake_json_server = fake_json_servers.next().await.unwrap();
632
633 // Ensure rust document is reopened in new rust language server
634 assert_eq!(
635 fake_rust_server
636 .receive_notification::<lsp::notification::DidOpenTextDocument>()
637 .await
638 .text_document,
639 lsp::TextDocumentItem {
640 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
641 version: 0,
642 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
643 language_id: "rust".to_string(),
644 }
645 );
646
647 // Ensure json documents are reopened in new json language server
648 assert_set_eq!(
649 [
650 fake_json_server
651 .receive_notification::<lsp::notification::DidOpenTextDocument>()
652 .await
653 .text_document,
654 fake_json_server
655 .receive_notification::<lsp::notification::DidOpenTextDocument>()
656 .await
657 .text_document,
658 ],
659 [
660 lsp::TextDocumentItem {
661 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
662 version: 0,
663 text: json_buffer.update(cx, |buffer, _| buffer.text()),
664 language_id: "json".to_string(),
665 },
666 lsp::TextDocumentItem {
667 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
668 version: 0,
669 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
670 language_id: "json".to_string(),
671 }
672 ]
673 );
674
675 // Close notifications are reported only to servers matching the buffer's language.
676 cx.update(|_| drop(json_buffer));
677 let close_message = lsp::DidCloseTextDocumentParams {
678 text_document: lsp::TextDocumentIdentifier::new(
679 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
680 ),
681 };
682 assert_eq!(
683 fake_json_server
684 .receive_notification::<lsp::notification::DidCloseTextDocument>()
685 .await,
686 close_message,
687 );
688}
689
690#[gpui::test]
691async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
692 init_test(cx);
693
694 let fs = FakeFs::new(cx.executor());
695 fs.insert_tree(
696 "/the-root",
697 json!({
698 ".gitignore": "target\n",
699 "src": {
700 "a.rs": "",
701 "b.rs": "",
702 },
703 "target": {
704 "x": {
705 "out": {
706 "x.rs": ""
707 }
708 },
709 "y": {
710 "out": {
711 "y.rs": "",
712 }
713 },
714 "z": {
715 "out": {
716 "z.rs": ""
717 }
718 }
719 }
720 }),
721 )
722 .await;
723
724 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
725 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
726 language_registry.add(rust_lang());
727 let mut fake_servers = language_registry.register_fake_lsp_adapter(
728 "Rust",
729 FakeLspAdapter {
730 name: "the-language-server",
731 ..Default::default()
732 },
733 );
734
735 cx.executor().run_until_parked();
736
737 // Start the language server by opening a buffer with a compatible file extension.
738 let _buffer = project
739 .update(cx, |project, cx| {
740 project.open_local_buffer("/the-root/src/a.rs", cx)
741 })
742 .await
743 .unwrap();
744
745 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
746 project.update(cx, |project, cx| {
747 let worktree = project.worktrees().next().unwrap();
748 assert_eq!(
749 worktree
750 .read(cx)
751 .snapshot()
752 .entries(true)
753 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
754 .collect::<Vec<_>>(),
755 &[
756 (Path::new(""), false),
757 (Path::new(".gitignore"), false),
758 (Path::new("src"), false),
759 (Path::new("src/a.rs"), false),
760 (Path::new("src/b.rs"), false),
761 (Path::new("target"), true),
762 ]
763 );
764 });
765
766 let prev_read_dir_count = fs.read_dir_call_count();
767
768 // Keep track of the FS events reported to the language server.
769 let fake_server = fake_servers.next().await.unwrap();
770 let file_changes = Arc::new(Mutex::new(Vec::new()));
771 fake_server
772 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
773 registrations: vec![lsp::Registration {
774 id: Default::default(),
775 method: "workspace/didChangeWatchedFiles".to_string(),
776 register_options: serde_json::to_value(
777 lsp::DidChangeWatchedFilesRegistrationOptions {
778 watchers: vec![
779 lsp::FileSystemWatcher {
780 glob_pattern: lsp::GlobPattern::String(
781 "/the-root/Cargo.toml".to_string(),
782 ),
783 kind: None,
784 },
785 lsp::FileSystemWatcher {
786 glob_pattern: lsp::GlobPattern::String(
787 "/the-root/src/*.{rs,c}".to_string(),
788 ),
789 kind: None,
790 },
791 lsp::FileSystemWatcher {
792 glob_pattern: lsp::GlobPattern::String(
793 "/the-root/target/y/**/*.rs".to_string(),
794 ),
795 kind: None,
796 },
797 ],
798 },
799 )
800 .ok(),
801 }],
802 })
803 .await
804 .unwrap();
805 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
806 let file_changes = file_changes.clone();
807 move |params, _| {
808 let mut file_changes = file_changes.lock();
809 file_changes.extend(params.changes);
810 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
811 }
812 });
813
814 cx.executor().run_until_parked();
815 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
816 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
817
818 // Now the language server has asked us to watch an ignored directory path,
819 // so we recursively load it.
820 project.update(cx, |project, cx| {
821 let worktree = project.worktrees().next().unwrap();
822 assert_eq!(
823 worktree
824 .read(cx)
825 .snapshot()
826 .entries(true)
827 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
828 .collect::<Vec<_>>(),
829 &[
830 (Path::new(""), false),
831 (Path::new(".gitignore"), false),
832 (Path::new("src"), false),
833 (Path::new("src/a.rs"), false),
834 (Path::new("src/b.rs"), false),
835 (Path::new("target"), true),
836 (Path::new("target/x"), true),
837 (Path::new("target/y"), true),
838 (Path::new("target/y/out"), true),
839 (Path::new("target/y/out/y.rs"), true),
840 (Path::new("target/z"), true),
841 ]
842 );
843 });
844
845 // Perform some file system mutations, two of which match the watched patterns,
846 // and one of which does not.
847 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
848 .await
849 .unwrap();
850 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
851 .await
852 .unwrap();
853 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
854 .await
855 .unwrap();
856 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
857 .await
858 .unwrap();
859 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
860 .await
861 .unwrap();
862
863 // The language server receives events for the FS mutations that match its watch patterns.
864 cx.executor().run_until_parked();
865 assert_eq!(
866 &*file_changes.lock(),
867 &[
868 lsp::FileEvent {
869 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
870 typ: lsp::FileChangeType::DELETED,
871 },
872 lsp::FileEvent {
873 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
874 typ: lsp::FileChangeType::CREATED,
875 },
876 lsp::FileEvent {
877 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
878 typ: lsp::FileChangeType::CREATED,
879 },
880 ]
881 );
882}
883
884#[gpui::test]
885async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
886 init_test(cx);
887
888 let fs = FakeFs::new(cx.executor());
889 fs.insert_tree(
890 "/dir",
891 json!({
892 "a.rs": "let a = 1;",
893 "b.rs": "let b = 2;"
894 }),
895 )
896 .await;
897
898 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
899
900 let buffer_a = project
901 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
902 .await
903 .unwrap();
904 let buffer_b = project
905 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
906 .await
907 .unwrap();
908
909 project.update(cx, |project, cx| {
910 project
911 .update_diagnostics(
912 LanguageServerId(0),
913 lsp::PublishDiagnosticsParams {
914 uri: Url::from_file_path("/dir/a.rs").unwrap(),
915 version: None,
916 diagnostics: vec![lsp::Diagnostic {
917 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
918 severity: Some(lsp::DiagnosticSeverity::ERROR),
919 message: "error 1".to_string(),
920 ..Default::default()
921 }],
922 },
923 &[],
924 cx,
925 )
926 .unwrap();
927 project
928 .update_diagnostics(
929 LanguageServerId(0),
930 lsp::PublishDiagnosticsParams {
931 uri: Url::from_file_path("/dir/b.rs").unwrap(),
932 version: None,
933 diagnostics: vec![lsp::Diagnostic {
934 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
935 severity: Some(lsp::DiagnosticSeverity::WARNING),
936 message: "error 2".to_string(),
937 ..Default::default()
938 }],
939 },
940 &[],
941 cx,
942 )
943 .unwrap();
944 });
945
946 buffer_a.update(cx, |buffer, _| {
947 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
948 assert_eq!(
949 chunks
950 .iter()
951 .map(|(s, d)| (s.as_str(), *d))
952 .collect::<Vec<_>>(),
953 &[
954 ("let ", None),
955 ("a", Some(DiagnosticSeverity::ERROR)),
956 (" = 1;", None),
957 ]
958 );
959 });
960 buffer_b.update(cx, |buffer, _| {
961 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
962 assert_eq!(
963 chunks
964 .iter()
965 .map(|(s, d)| (s.as_str(), *d))
966 .collect::<Vec<_>>(),
967 &[
968 ("let ", None),
969 ("b", Some(DiagnosticSeverity::WARNING)),
970 (" = 2;", None),
971 ]
972 );
973 });
974}
975
976#[gpui::test]
977async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
978 init_test(cx);
979
980 let fs = FakeFs::new(cx.executor());
981 fs.insert_tree(
982 "/root",
983 json!({
984 "dir": {
985 ".git": {
986 "HEAD": "ref: refs/heads/main",
987 },
988 ".gitignore": "b.rs",
989 "a.rs": "let a = 1;",
990 "b.rs": "let b = 2;",
991 },
992 "other.rs": "let b = c;"
993 }),
994 )
995 .await;
996
997 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
998 let (worktree, _) = project
999 .update(cx, |project, cx| {
1000 project.find_or_create_local_worktree("/root/dir", true, cx)
1001 })
1002 .await
1003 .unwrap();
1004 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1005
1006 let (worktree, _) = project
1007 .update(cx, |project, cx| {
1008 project.find_or_create_local_worktree("/root/other.rs", false, cx)
1009 })
1010 .await
1011 .unwrap();
1012 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1013
1014 let server_id = LanguageServerId(0);
1015 project.update(cx, |project, cx| {
1016 project
1017 .update_diagnostics(
1018 server_id,
1019 lsp::PublishDiagnosticsParams {
1020 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1021 version: None,
1022 diagnostics: vec![lsp::Diagnostic {
1023 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1024 severity: Some(lsp::DiagnosticSeverity::ERROR),
1025 message: "unused variable 'b'".to_string(),
1026 ..Default::default()
1027 }],
1028 },
1029 &[],
1030 cx,
1031 )
1032 .unwrap();
1033 project
1034 .update_diagnostics(
1035 server_id,
1036 lsp::PublishDiagnosticsParams {
1037 uri: Url::from_file_path("/root/other.rs").unwrap(),
1038 version: None,
1039 diagnostics: vec![lsp::Diagnostic {
1040 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1041 severity: Some(lsp::DiagnosticSeverity::ERROR),
1042 message: "unknown variable 'c'".to_string(),
1043 ..Default::default()
1044 }],
1045 },
1046 &[],
1047 cx,
1048 )
1049 .unwrap();
1050 });
1051
1052 let main_ignored_buffer = project
1053 .update(cx, |project, cx| {
1054 project.open_buffer((main_worktree_id, "b.rs"), cx)
1055 })
1056 .await
1057 .unwrap();
1058 main_ignored_buffer.update(cx, |buffer, _| {
1059 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1060 assert_eq!(
1061 chunks
1062 .iter()
1063 .map(|(s, d)| (s.as_str(), *d))
1064 .collect::<Vec<_>>(),
1065 &[
1066 ("let ", None),
1067 ("b", Some(DiagnosticSeverity::ERROR)),
1068 (" = 2;", None),
1069 ],
1070 "Gigitnored buffers should still get in-buffer diagnostics",
1071 );
1072 });
1073 let other_buffer = project
1074 .update(cx, |project, cx| {
1075 project.open_buffer((other_worktree_id, ""), cx)
1076 })
1077 .await
1078 .unwrap();
1079 other_buffer.update(cx, |buffer, _| {
1080 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1081 assert_eq!(
1082 chunks
1083 .iter()
1084 .map(|(s, d)| (s.as_str(), *d))
1085 .collect::<Vec<_>>(),
1086 &[
1087 ("let b = ", None),
1088 ("c", Some(DiagnosticSeverity::ERROR)),
1089 (";", None),
1090 ],
1091 "Buffers from hidden projects should still get in-buffer diagnostics"
1092 );
1093 });
1094
1095 project.update(cx, |project, cx| {
1096 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1097 assert_eq!(
1098 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1099 vec![(
1100 ProjectPath {
1101 worktree_id: main_worktree_id,
1102 path: Arc::from(Path::new("b.rs")),
1103 },
1104 server_id,
1105 DiagnosticSummary {
1106 error_count: 1,
1107 warning_count: 0,
1108 }
1109 )]
1110 );
1111 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1112 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1113 });
1114}
1115
1116#[gpui::test]
1117async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1118 init_test(cx);
1119
1120 let progress_token = "the-progress-token";
1121
1122 let fs = FakeFs::new(cx.executor());
1123 fs.insert_tree(
1124 "/dir",
1125 json!({
1126 "a.rs": "fn a() { A }",
1127 "b.rs": "const y: i32 = 1",
1128 }),
1129 )
1130 .await;
1131
1132 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1133 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1134
1135 language_registry.add(rust_lang());
1136 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1137 "Rust",
1138 FakeLspAdapter {
1139 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1140 disk_based_diagnostics_sources: vec!["disk".into()],
1141 ..Default::default()
1142 },
1143 );
1144
1145 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1146
1147 // Cause worktree to start the fake language server
1148 let _buffer = project
1149 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1150 .await
1151 .unwrap();
1152
1153 let mut events = cx.events(&project);
1154
1155 let fake_server = fake_servers.next().await.unwrap();
1156 assert_eq!(
1157 events.next().await.unwrap(),
1158 Event::LanguageServerAdded(LanguageServerId(0)),
1159 );
1160
1161 fake_server
1162 .start_progress(format!("{}/0", progress_token))
1163 .await;
1164 assert_eq!(
1165 events.next().await.unwrap(),
1166 Event::DiskBasedDiagnosticsStarted {
1167 language_server_id: LanguageServerId(0),
1168 }
1169 );
1170
1171 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1172 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1173 version: None,
1174 diagnostics: vec![lsp::Diagnostic {
1175 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1176 severity: Some(lsp::DiagnosticSeverity::ERROR),
1177 message: "undefined variable 'A'".to_string(),
1178 ..Default::default()
1179 }],
1180 });
1181 assert_eq!(
1182 events.next().await.unwrap(),
1183 Event::DiagnosticsUpdated {
1184 language_server_id: LanguageServerId(0),
1185 path: (worktree_id, Path::new("a.rs")).into()
1186 }
1187 );
1188
1189 fake_server.end_progress(format!("{}/0", progress_token));
1190 assert_eq!(
1191 events.next().await.unwrap(),
1192 Event::DiskBasedDiagnosticsFinished {
1193 language_server_id: LanguageServerId(0)
1194 }
1195 );
1196
1197 let buffer = project
1198 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1199 .await
1200 .unwrap();
1201
1202 buffer.update(cx, |buffer, _| {
1203 let snapshot = buffer.snapshot();
1204 let diagnostics = snapshot
1205 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1206 .collect::<Vec<_>>();
1207 assert_eq!(
1208 diagnostics,
1209 &[DiagnosticEntry {
1210 range: Point::new(0, 9)..Point::new(0, 10),
1211 diagnostic: Diagnostic {
1212 severity: lsp::DiagnosticSeverity::ERROR,
1213 message: "undefined variable 'A'".to_string(),
1214 group_id: 0,
1215 is_primary: true,
1216 ..Default::default()
1217 }
1218 }]
1219 )
1220 });
1221
1222 // Ensure publishing empty diagnostics twice only results in one update event.
1223 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1224 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1225 version: None,
1226 diagnostics: Default::default(),
1227 });
1228 assert_eq!(
1229 events.next().await.unwrap(),
1230 Event::DiagnosticsUpdated {
1231 language_server_id: LanguageServerId(0),
1232 path: (worktree_id, Path::new("a.rs")).into()
1233 }
1234 );
1235
1236 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1237 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1238 version: None,
1239 diagnostics: Default::default(),
1240 });
1241 cx.executor().run_until_parked();
1242 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1243}
1244
1245#[gpui::test]
1246async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1247 init_test(cx);
1248
1249 let progress_token = "the-progress-token";
1250
1251 let fs = FakeFs::new(cx.executor());
1252 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1253
1254 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1255
1256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1257 language_registry.add(rust_lang());
1258 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1259 "Rust",
1260 FakeLspAdapter {
1261 name: "the-language-server",
1262 disk_based_diagnostics_sources: vec!["disk".into()],
1263 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1264 ..Default::default()
1265 },
1266 );
1267
1268 let buffer = project
1269 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1270 .await
1271 .unwrap();
1272
1273 // Simulate diagnostics starting to update.
1274 let fake_server = fake_servers.next().await.unwrap();
1275 fake_server.start_progress(progress_token).await;
1276
1277 // Restart the server before the diagnostics finish updating.
1278 project.update(cx, |project, cx| {
1279 project.restart_language_servers_for_buffers([buffer], cx);
1280 });
1281 let mut events = cx.events(&project);
1282
1283 // Simulate the newly started server sending more diagnostics.
1284 let fake_server = fake_servers.next().await.unwrap();
1285 assert_eq!(
1286 events.next().await.unwrap(),
1287 Event::LanguageServerAdded(LanguageServerId(1))
1288 );
1289 fake_server.start_progress(progress_token).await;
1290 assert_eq!(
1291 events.next().await.unwrap(),
1292 Event::DiskBasedDiagnosticsStarted {
1293 language_server_id: LanguageServerId(1)
1294 }
1295 );
1296 project.update(cx, |project, _| {
1297 assert_eq!(
1298 project
1299 .language_servers_running_disk_based_diagnostics()
1300 .collect::<Vec<_>>(),
1301 [LanguageServerId(1)]
1302 );
1303 });
1304
1305 // All diagnostics are considered done, despite the old server's diagnostic
1306 // task never completing.
1307 fake_server.end_progress(progress_token);
1308 assert_eq!(
1309 events.next().await.unwrap(),
1310 Event::DiskBasedDiagnosticsFinished {
1311 language_server_id: LanguageServerId(1)
1312 }
1313 );
1314 project.update(cx, |project, _| {
1315 assert_eq!(
1316 project
1317 .language_servers_running_disk_based_diagnostics()
1318 .collect::<Vec<_>>(),
1319 [LanguageServerId(0); 0]
1320 );
1321 });
1322}
1323
1324#[gpui::test]
1325async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1326 init_test(cx);
1327
1328 let fs = FakeFs::new(cx.executor());
1329 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1330
1331 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1332
1333 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1334 language_registry.add(rust_lang());
1335 let mut fake_servers =
1336 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1337
1338 let buffer = project
1339 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1340 .await
1341 .unwrap();
1342
1343 // Publish diagnostics
1344 let fake_server = fake_servers.next().await.unwrap();
1345 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1346 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1347 version: None,
1348 diagnostics: vec![lsp::Diagnostic {
1349 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1350 severity: Some(lsp::DiagnosticSeverity::ERROR),
1351 message: "the message".to_string(),
1352 ..Default::default()
1353 }],
1354 });
1355
1356 cx.executor().run_until_parked();
1357 buffer.update(cx, |buffer, _| {
1358 assert_eq!(
1359 buffer
1360 .snapshot()
1361 .diagnostics_in_range::<_, usize>(0..1, false)
1362 .map(|entry| entry.diagnostic.message.clone())
1363 .collect::<Vec<_>>(),
1364 ["the message".to_string()]
1365 );
1366 });
1367 project.update(cx, |project, cx| {
1368 assert_eq!(
1369 project.diagnostic_summary(false, cx),
1370 DiagnosticSummary {
1371 error_count: 1,
1372 warning_count: 0,
1373 }
1374 );
1375 });
1376
1377 project.update(cx, |project, cx| {
1378 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1379 });
1380
1381 // The diagnostics are cleared.
1382 cx.executor().run_until_parked();
1383 buffer.update(cx, |buffer, _| {
1384 assert_eq!(
1385 buffer
1386 .snapshot()
1387 .diagnostics_in_range::<_, usize>(0..1, false)
1388 .map(|entry| entry.diagnostic.message.clone())
1389 .collect::<Vec<_>>(),
1390 Vec::<String>::new(),
1391 );
1392 });
1393 project.update(cx, |project, cx| {
1394 assert_eq!(
1395 project.diagnostic_summary(false, cx),
1396 DiagnosticSummary {
1397 error_count: 0,
1398 warning_count: 0,
1399 }
1400 );
1401 });
1402}
1403
1404#[gpui::test]
1405async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1406 init_test(cx);
1407
1408 let fs = FakeFs::new(cx.executor());
1409 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1410
1411 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1412 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1413
1414 language_registry.add(rust_lang());
1415 let mut fake_servers =
1416 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1417
1418 let buffer = project
1419 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1420 .await
1421 .unwrap();
1422
1423 // Before restarting the server, report diagnostics with an unknown buffer version.
1424 let fake_server = fake_servers.next().await.unwrap();
1425 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1426 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1427 version: Some(10000),
1428 diagnostics: Vec::new(),
1429 });
1430 cx.executor().run_until_parked();
1431
1432 project.update(cx, |project, cx| {
1433 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1434 });
1435 let mut fake_server = fake_servers.next().await.unwrap();
1436 let notification = fake_server
1437 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1438 .await
1439 .text_document;
1440 assert_eq!(notification.version, 0);
1441}
1442
1443#[gpui::test]
1444async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1445 init_test(cx);
1446
1447 let fs = FakeFs::new(cx.executor());
1448 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1449 .await;
1450
1451 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1452 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1453
1454 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1455 "Rust",
1456 FakeLspAdapter {
1457 name: "rust-lsp",
1458 ..Default::default()
1459 },
1460 );
1461 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1462 "JavaScript",
1463 FakeLspAdapter {
1464 name: "js-lsp",
1465 ..Default::default()
1466 },
1467 );
1468 language_registry.add(rust_lang());
1469 language_registry.add(js_lang());
1470
1471 let _rs_buffer = project
1472 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1473 .await
1474 .unwrap();
1475 let _js_buffer = project
1476 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1477 .await
1478 .unwrap();
1479
1480 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1481 assert_eq!(
1482 fake_rust_server_1
1483 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1484 .await
1485 .text_document
1486 .uri
1487 .as_str(),
1488 "file:///dir/a.rs"
1489 );
1490
1491 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1492 assert_eq!(
1493 fake_js_server
1494 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1495 .await
1496 .text_document
1497 .uri
1498 .as_str(),
1499 "file:///dir/b.js"
1500 );
1501
1502 // Disable Rust language server, ensuring only that server gets stopped.
1503 cx.update(|cx| {
1504 SettingsStore::update_global(cx, |settings, cx| {
1505 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1506 settings.languages.insert(
1507 Arc::from("Rust"),
1508 LanguageSettingsContent {
1509 enable_language_server: Some(false),
1510 ..Default::default()
1511 },
1512 );
1513 });
1514 })
1515 });
1516 fake_rust_server_1
1517 .receive_notification::<lsp::notification::Exit>()
1518 .await;
1519
1520 // Enable Rust and disable JavaScript language servers, ensuring that the
1521 // former gets started again and that the latter stops.
1522 cx.update(|cx| {
1523 SettingsStore::update_global(cx, |settings, cx| {
1524 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1525 settings.languages.insert(
1526 Arc::from("Rust"),
1527 LanguageSettingsContent {
1528 enable_language_server: Some(true),
1529 ..Default::default()
1530 },
1531 );
1532 settings.languages.insert(
1533 Arc::from("JavaScript"),
1534 LanguageSettingsContent {
1535 enable_language_server: Some(false),
1536 ..Default::default()
1537 },
1538 );
1539 });
1540 })
1541 });
1542 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1543 assert_eq!(
1544 fake_rust_server_2
1545 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1546 .await
1547 .text_document
1548 .uri
1549 .as_str(),
1550 "file:///dir/a.rs"
1551 );
1552 fake_js_server
1553 .receive_notification::<lsp::notification::Exit>()
1554 .await;
1555}
1556
1557#[gpui::test(iterations = 3)]
1558async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1559 init_test(cx);
1560
1561 let text = "
1562 fn a() { A }
1563 fn b() { BB }
1564 fn c() { CCC }
1565 "
1566 .unindent();
1567
1568 let fs = FakeFs::new(cx.executor());
1569 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1570
1571 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1572 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1573
1574 language_registry.add(rust_lang());
1575 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1576 "Rust",
1577 FakeLspAdapter {
1578 disk_based_diagnostics_sources: vec!["disk".into()],
1579 ..Default::default()
1580 },
1581 );
1582
1583 let buffer = project
1584 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1585 .await
1586 .unwrap();
1587
1588 let mut fake_server = fake_servers.next().await.unwrap();
1589 let open_notification = fake_server
1590 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1591 .await;
1592
1593 // Edit the buffer, moving the content down
1594 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1595 let change_notification_1 = fake_server
1596 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1597 .await;
1598 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1599
1600 // Report some diagnostics for the initial version of the buffer
1601 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1602 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1603 version: Some(open_notification.text_document.version),
1604 diagnostics: vec![
1605 lsp::Diagnostic {
1606 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1607 severity: Some(DiagnosticSeverity::ERROR),
1608 message: "undefined variable 'A'".to_string(),
1609 source: Some("disk".to_string()),
1610 ..Default::default()
1611 },
1612 lsp::Diagnostic {
1613 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1614 severity: Some(DiagnosticSeverity::ERROR),
1615 message: "undefined variable 'BB'".to_string(),
1616 source: Some("disk".to_string()),
1617 ..Default::default()
1618 },
1619 lsp::Diagnostic {
1620 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1621 severity: Some(DiagnosticSeverity::ERROR),
1622 source: Some("disk".to_string()),
1623 message: "undefined variable 'CCC'".to_string(),
1624 ..Default::default()
1625 },
1626 ],
1627 });
1628
1629 // The diagnostics have moved down since they were created.
1630 cx.executor().run_until_parked();
1631 buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .snapshot()
1635 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1636 .collect::<Vec<_>>(),
1637 &[
1638 DiagnosticEntry {
1639 range: Point::new(3, 9)..Point::new(3, 11),
1640 diagnostic: Diagnostic {
1641 source: Some("disk".into()),
1642 severity: DiagnosticSeverity::ERROR,
1643 message: "undefined variable 'BB'".to_string(),
1644 is_disk_based: true,
1645 group_id: 1,
1646 is_primary: true,
1647 ..Default::default()
1648 },
1649 },
1650 DiagnosticEntry {
1651 range: Point::new(4, 9)..Point::new(4, 12),
1652 diagnostic: Diagnostic {
1653 source: Some("disk".into()),
1654 severity: DiagnosticSeverity::ERROR,
1655 message: "undefined variable 'CCC'".to_string(),
1656 is_disk_based: true,
1657 group_id: 2,
1658 is_primary: true,
1659 ..Default::default()
1660 }
1661 }
1662 ]
1663 );
1664 assert_eq!(
1665 chunks_with_diagnostics(buffer, 0..buffer.len()),
1666 [
1667 ("\n\nfn a() { ".to_string(), None),
1668 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1669 (" }\nfn b() { ".to_string(), None),
1670 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1671 (" }\nfn c() { ".to_string(), None),
1672 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1673 (" }\n".to_string(), None),
1674 ]
1675 );
1676 assert_eq!(
1677 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1678 [
1679 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1680 (" }\nfn c() { ".to_string(), None),
1681 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1682 ]
1683 );
1684 });
1685
1686 // Ensure overlapping diagnostics are highlighted correctly.
1687 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1688 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1689 version: Some(open_notification.text_document.version),
1690 diagnostics: vec![
1691 lsp::Diagnostic {
1692 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1693 severity: Some(DiagnosticSeverity::ERROR),
1694 message: "undefined variable 'A'".to_string(),
1695 source: Some("disk".to_string()),
1696 ..Default::default()
1697 },
1698 lsp::Diagnostic {
1699 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1700 severity: Some(DiagnosticSeverity::WARNING),
1701 message: "unreachable statement".to_string(),
1702 source: Some("disk".to_string()),
1703 ..Default::default()
1704 },
1705 ],
1706 });
1707
1708 cx.executor().run_until_parked();
1709 buffer.update(cx, |buffer, _| {
1710 assert_eq!(
1711 buffer
1712 .snapshot()
1713 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1714 .collect::<Vec<_>>(),
1715 &[
1716 DiagnosticEntry {
1717 range: Point::new(2, 9)..Point::new(2, 12),
1718 diagnostic: Diagnostic {
1719 source: Some("disk".into()),
1720 severity: DiagnosticSeverity::WARNING,
1721 message: "unreachable statement".to_string(),
1722 is_disk_based: true,
1723 group_id: 4,
1724 is_primary: true,
1725 ..Default::default()
1726 }
1727 },
1728 DiagnosticEntry {
1729 range: Point::new(2, 9)..Point::new(2, 10),
1730 diagnostic: Diagnostic {
1731 source: Some("disk".into()),
1732 severity: DiagnosticSeverity::ERROR,
1733 message: "undefined variable 'A'".to_string(),
1734 is_disk_based: true,
1735 group_id: 3,
1736 is_primary: true,
1737 ..Default::default()
1738 },
1739 }
1740 ]
1741 );
1742 assert_eq!(
1743 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1744 [
1745 ("fn a() { ".to_string(), None),
1746 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1747 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1748 ("\n".to_string(), None),
1749 ]
1750 );
1751 assert_eq!(
1752 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1753 [
1754 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1755 ("\n".to_string(), None),
1756 ]
1757 );
1758 });
1759
1760 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1761 // changes since the last save.
1762 buffer.update(cx, |buffer, cx| {
1763 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1764 buffer.edit(
1765 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1766 None,
1767 cx,
1768 );
1769 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1770 });
1771 let change_notification_2 = fake_server
1772 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1773 .await;
1774 assert!(
1775 change_notification_2.text_document.version > change_notification_1.text_document.version
1776 );
1777
1778 // Handle out-of-order diagnostics
1779 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1780 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1781 version: Some(change_notification_2.text_document.version),
1782 diagnostics: vec![
1783 lsp::Diagnostic {
1784 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1785 severity: Some(DiagnosticSeverity::ERROR),
1786 message: "undefined variable 'BB'".to_string(),
1787 source: Some("disk".to_string()),
1788 ..Default::default()
1789 },
1790 lsp::Diagnostic {
1791 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1792 severity: Some(DiagnosticSeverity::WARNING),
1793 message: "undefined variable 'A'".to_string(),
1794 source: Some("disk".to_string()),
1795 ..Default::default()
1796 },
1797 ],
1798 });
1799
1800 cx.executor().run_until_parked();
1801 buffer.update(cx, |buffer, _| {
1802 assert_eq!(
1803 buffer
1804 .snapshot()
1805 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1806 .collect::<Vec<_>>(),
1807 &[
1808 DiagnosticEntry {
1809 range: Point::new(2, 21)..Point::new(2, 22),
1810 diagnostic: Diagnostic {
1811 source: Some("disk".into()),
1812 severity: DiagnosticSeverity::WARNING,
1813 message: "undefined variable 'A'".to_string(),
1814 is_disk_based: true,
1815 group_id: 6,
1816 is_primary: true,
1817 ..Default::default()
1818 }
1819 },
1820 DiagnosticEntry {
1821 range: Point::new(3, 9)..Point::new(3, 14),
1822 diagnostic: Diagnostic {
1823 source: Some("disk".into()),
1824 severity: DiagnosticSeverity::ERROR,
1825 message: "undefined variable 'BB'".to_string(),
1826 is_disk_based: true,
1827 group_id: 5,
1828 is_primary: true,
1829 ..Default::default()
1830 },
1831 }
1832 ]
1833 );
1834 });
1835}
1836
1837#[gpui::test]
1838async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1839 init_test(cx);
1840
1841 let text = concat!(
1842 "let one = ;\n", //
1843 "let two = \n",
1844 "let three = 3;\n",
1845 );
1846
1847 let fs = FakeFs::new(cx.executor());
1848 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1849
1850 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1851 let buffer = project
1852 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1853 .await
1854 .unwrap();
1855
1856 project.update(cx, |project, cx| {
1857 project
1858 .update_buffer_diagnostics(
1859 &buffer,
1860 LanguageServerId(0),
1861 None,
1862 vec![
1863 DiagnosticEntry {
1864 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1865 diagnostic: Diagnostic {
1866 severity: DiagnosticSeverity::ERROR,
1867 message: "syntax error 1".to_string(),
1868 ..Default::default()
1869 },
1870 },
1871 DiagnosticEntry {
1872 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1873 diagnostic: Diagnostic {
1874 severity: DiagnosticSeverity::ERROR,
1875 message: "syntax error 2".to_string(),
1876 ..Default::default()
1877 },
1878 },
1879 ],
1880 cx,
1881 )
1882 .unwrap();
1883 });
1884
1885 // An empty range is extended forward to include the following character.
1886 // At the end of a line, an empty range is extended backward to include
1887 // the preceding character.
1888 buffer.update(cx, |buffer, _| {
1889 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1890 assert_eq!(
1891 chunks
1892 .iter()
1893 .map(|(s, d)| (s.as_str(), *d))
1894 .collect::<Vec<_>>(),
1895 &[
1896 ("let one = ", None),
1897 (";", Some(DiagnosticSeverity::ERROR)),
1898 ("\nlet two =", None),
1899 (" ", Some(DiagnosticSeverity::ERROR)),
1900 ("\nlet three = 3;\n", None)
1901 ]
1902 );
1903 });
1904}
1905
1906#[gpui::test]
1907async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1908 init_test(cx);
1909
1910 let fs = FakeFs::new(cx.executor());
1911 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1912 .await;
1913
1914 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1915
1916 project.update(cx, |project, cx| {
1917 project
1918 .update_diagnostic_entries(
1919 LanguageServerId(0),
1920 Path::new("/dir/a.rs").to_owned(),
1921 None,
1922 vec![DiagnosticEntry {
1923 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1924 diagnostic: Diagnostic {
1925 severity: DiagnosticSeverity::ERROR,
1926 is_primary: true,
1927 message: "syntax error a1".to_string(),
1928 ..Default::default()
1929 },
1930 }],
1931 cx,
1932 )
1933 .unwrap();
1934 project
1935 .update_diagnostic_entries(
1936 LanguageServerId(1),
1937 Path::new("/dir/a.rs").to_owned(),
1938 None,
1939 vec![DiagnosticEntry {
1940 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1941 diagnostic: Diagnostic {
1942 severity: DiagnosticSeverity::ERROR,
1943 is_primary: true,
1944 message: "syntax error b1".to_string(),
1945 ..Default::default()
1946 },
1947 }],
1948 cx,
1949 )
1950 .unwrap();
1951
1952 assert_eq!(
1953 project.diagnostic_summary(false, cx),
1954 DiagnosticSummary {
1955 error_count: 2,
1956 warning_count: 0,
1957 }
1958 );
1959 });
1960}
1961
1962#[gpui::test]
1963async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1964 init_test(cx);
1965
1966 let text = "
1967 fn a() {
1968 f1();
1969 }
1970 fn b() {
1971 f2();
1972 }
1973 fn c() {
1974 f3();
1975 }
1976 "
1977 .unindent();
1978
1979 let fs = FakeFs::new(cx.executor());
1980 fs.insert_tree(
1981 "/dir",
1982 json!({
1983 "a.rs": text.clone(),
1984 }),
1985 )
1986 .await;
1987
1988 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1989
1990 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1991 language_registry.add(rust_lang());
1992 let mut fake_servers =
1993 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1994
1995 let buffer = project
1996 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1997 .await
1998 .unwrap();
1999
2000 let mut fake_server = fake_servers.next().await.unwrap();
2001 let lsp_document_version = fake_server
2002 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2003 .await
2004 .text_document
2005 .version;
2006
2007 // Simulate editing the buffer after the language server computes some edits.
2008 buffer.update(cx, |buffer, cx| {
2009 buffer.edit(
2010 [(
2011 Point::new(0, 0)..Point::new(0, 0),
2012 "// above first function\n",
2013 )],
2014 None,
2015 cx,
2016 );
2017 buffer.edit(
2018 [(
2019 Point::new(2, 0)..Point::new(2, 0),
2020 " // inside first function\n",
2021 )],
2022 None,
2023 cx,
2024 );
2025 buffer.edit(
2026 [(
2027 Point::new(6, 4)..Point::new(6, 4),
2028 "// inside second function ",
2029 )],
2030 None,
2031 cx,
2032 );
2033
2034 assert_eq!(
2035 buffer.text(),
2036 "
2037 // above first function
2038 fn a() {
2039 // inside first function
2040 f1();
2041 }
2042 fn b() {
2043 // inside second function f2();
2044 }
2045 fn c() {
2046 f3();
2047 }
2048 "
2049 .unindent()
2050 );
2051 });
2052
2053 let edits = project
2054 .update(cx, |project, cx| {
2055 project.edits_from_lsp(
2056 &buffer,
2057 vec![
2058 // replace body of first function
2059 lsp::TextEdit {
2060 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2061 new_text: "
2062 fn a() {
2063 f10();
2064 }
2065 "
2066 .unindent(),
2067 },
2068 // edit inside second function
2069 lsp::TextEdit {
2070 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2071 new_text: "00".into(),
2072 },
2073 // edit inside third function via two distinct edits
2074 lsp::TextEdit {
2075 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2076 new_text: "4000".into(),
2077 },
2078 lsp::TextEdit {
2079 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2080 new_text: "".into(),
2081 },
2082 ],
2083 LanguageServerId(0),
2084 Some(lsp_document_version),
2085 cx,
2086 )
2087 })
2088 .await
2089 .unwrap();
2090
2091 buffer.update(cx, |buffer, cx| {
2092 for (range, new_text) in edits {
2093 buffer.edit([(range, new_text)], None, cx);
2094 }
2095 assert_eq!(
2096 buffer.text(),
2097 "
2098 // above first function
2099 fn a() {
2100 // inside first function
2101 f10();
2102 }
2103 fn b() {
2104 // inside second function f200();
2105 }
2106 fn c() {
2107 f4000();
2108 }
2109 "
2110 .unindent()
2111 );
2112 });
2113}
2114
2115#[gpui::test]
2116async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2117 init_test(cx);
2118
2119 let text = "
2120 use a::b;
2121 use a::c;
2122
2123 fn f() {
2124 b();
2125 c();
2126 }
2127 "
2128 .unindent();
2129
2130 let fs = FakeFs::new(cx.executor());
2131 fs.insert_tree(
2132 "/dir",
2133 json!({
2134 "a.rs": text.clone(),
2135 }),
2136 )
2137 .await;
2138
2139 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2140 let buffer = project
2141 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2142 .await
2143 .unwrap();
2144
2145 // Simulate the language server sending us a small edit in the form of a very large diff.
2146 // Rust-analyzer does this when performing a merge-imports code action.
2147 let edits = project
2148 .update(cx, |project, cx| {
2149 project.edits_from_lsp(
2150 &buffer,
2151 [
2152 // Replace the first use statement without editing the semicolon.
2153 lsp::TextEdit {
2154 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2155 new_text: "a::{b, c}".into(),
2156 },
2157 // Reinsert the remainder of the file between the semicolon and the final
2158 // newline of the file.
2159 lsp::TextEdit {
2160 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2161 new_text: "\n\n".into(),
2162 },
2163 lsp::TextEdit {
2164 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2165 new_text: "
2166 fn f() {
2167 b();
2168 c();
2169 }"
2170 .unindent(),
2171 },
2172 // Delete everything after the first newline of the file.
2173 lsp::TextEdit {
2174 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2175 new_text: "".into(),
2176 },
2177 ],
2178 LanguageServerId(0),
2179 None,
2180 cx,
2181 )
2182 })
2183 .await
2184 .unwrap();
2185
2186 buffer.update(cx, |buffer, cx| {
2187 let edits = edits
2188 .into_iter()
2189 .map(|(range, text)| {
2190 (
2191 range.start.to_point(buffer)..range.end.to_point(buffer),
2192 text,
2193 )
2194 })
2195 .collect::<Vec<_>>();
2196
2197 assert_eq!(
2198 edits,
2199 [
2200 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2201 (Point::new(1, 0)..Point::new(2, 0), "".into())
2202 ]
2203 );
2204
2205 for (range, new_text) in edits {
2206 buffer.edit([(range, new_text)], None, cx);
2207 }
2208 assert_eq!(
2209 buffer.text(),
2210 "
2211 use a::{b, c};
2212
2213 fn f() {
2214 b();
2215 c();
2216 }
2217 "
2218 .unindent()
2219 );
2220 });
2221}
2222
2223#[gpui::test]
2224async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2225 init_test(cx);
2226
2227 let text = "
2228 use a::b;
2229 use a::c;
2230
2231 fn f() {
2232 b();
2233 c();
2234 }
2235 "
2236 .unindent();
2237
2238 let fs = FakeFs::new(cx.executor());
2239 fs.insert_tree(
2240 "/dir",
2241 json!({
2242 "a.rs": text.clone(),
2243 }),
2244 )
2245 .await;
2246
2247 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2248 let buffer = project
2249 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2250 .await
2251 .unwrap();
2252
2253 // Simulate the language server sending us edits in a non-ordered fashion,
2254 // with ranges sometimes being inverted or pointing to invalid locations.
2255 let edits = project
2256 .update(cx, |project, cx| {
2257 project.edits_from_lsp(
2258 &buffer,
2259 [
2260 lsp::TextEdit {
2261 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2262 new_text: "\n\n".into(),
2263 },
2264 lsp::TextEdit {
2265 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2266 new_text: "a::{b, c}".into(),
2267 },
2268 lsp::TextEdit {
2269 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2270 new_text: "".into(),
2271 },
2272 lsp::TextEdit {
2273 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2274 new_text: "
2275 fn f() {
2276 b();
2277 c();
2278 }"
2279 .unindent(),
2280 },
2281 ],
2282 LanguageServerId(0),
2283 None,
2284 cx,
2285 )
2286 })
2287 .await
2288 .unwrap();
2289
2290 buffer.update(cx, |buffer, cx| {
2291 let edits = edits
2292 .into_iter()
2293 .map(|(range, text)| {
2294 (
2295 range.start.to_point(buffer)..range.end.to_point(buffer),
2296 text,
2297 )
2298 })
2299 .collect::<Vec<_>>();
2300
2301 assert_eq!(
2302 edits,
2303 [
2304 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2305 (Point::new(1, 0)..Point::new(2, 0), "".into())
2306 ]
2307 );
2308
2309 for (range, new_text) in edits {
2310 buffer.edit([(range, new_text)], None, cx);
2311 }
2312 assert_eq!(
2313 buffer.text(),
2314 "
2315 use a::{b, c};
2316
2317 fn f() {
2318 b();
2319 c();
2320 }
2321 "
2322 .unindent()
2323 );
2324 });
2325}
2326
2327fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2328 buffer: &Buffer,
2329 range: Range<T>,
2330) -> Vec<(String, Option<DiagnosticSeverity>)> {
2331 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2332 for chunk in buffer.snapshot().chunks(range, true) {
2333 if chunks.last().map_or(false, |prev_chunk| {
2334 prev_chunk.1 == chunk.diagnostic_severity
2335 }) {
2336 chunks.last_mut().unwrap().0.push_str(chunk.text);
2337 } else {
2338 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2339 }
2340 }
2341 chunks
2342}
2343
2344#[gpui::test(iterations = 10)]
2345async fn test_definition(cx: &mut gpui::TestAppContext) {
2346 init_test(cx);
2347
2348 let fs = FakeFs::new(cx.executor());
2349 fs.insert_tree(
2350 "/dir",
2351 json!({
2352 "a.rs": "const fn a() { A }",
2353 "b.rs": "const y: i32 = crate::a()",
2354 }),
2355 )
2356 .await;
2357
2358 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2359
2360 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2361 language_registry.add(rust_lang());
2362 let mut fake_servers =
2363 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2364
2365 let buffer = project
2366 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2367 .await
2368 .unwrap();
2369
2370 let fake_server = fake_servers.next().await.unwrap();
2371 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2372 let params = params.text_document_position_params;
2373 assert_eq!(
2374 params.text_document.uri.to_file_path().unwrap(),
2375 Path::new("/dir/b.rs"),
2376 );
2377 assert_eq!(params.position, lsp::Position::new(0, 22));
2378
2379 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2380 lsp::Location::new(
2381 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2382 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2383 ),
2384 )))
2385 });
2386
2387 let mut definitions = project
2388 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2389 .await
2390 .unwrap();
2391
2392 // Assert no new language server started
2393 cx.executor().run_until_parked();
2394 assert!(fake_servers.try_next().is_err());
2395
2396 assert_eq!(definitions.len(), 1);
2397 let definition = definitions.pop().unwrap();
2398 cx.update(|cx| {
2399 let target_buffer = definition.target.buffer.read(cx);
2400 assert_eq!(
2401 target_buffer
2402 .file()
2403 .unwrap()
2404 .as_local()
2405 .unwrap()
2406 .abs_path(cx),
2407 Path::new("/dir/a.rs"),
2408 );
2409 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2410 assert_eq!(
2411 list_worktrees(&project, cx),
2412 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2413 );
2414
2415 drop(definition);
2416 });
2417 cx.update(|cx| {
2418 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2419 });
2420
2421 fn list_worktrees<'a>(
2422 project: &'a Model<Project>,
2423 cx: &'a AppContext,
2424 ) -> Vec<(&'a Path, bool)> {
2425 project
2426 .read(cx)
2427 .worktrees()
2428 .map(|worktree| {
2429 let worktree = worktree.read(cx);
2430 (
2431 worktree.as_local().unwrap().abs_path().as_ref(),
2432 worktree.is_visible(),
2433 )
2434 })
2435 .collect::<Vec<_>>()
2436 }
2437}
2438
2439#[gpui::test]
2440async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2441 init_test(cx);
2442
2443 let fs = FakeFs::new(cx.executor());
2444 fs.insert_tree(
2445 "/dir",
2446 json!({
2447 "a.ts": "",
2448 }),
2449 )
2450 .await;
2451
2452 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2453
2454 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2455 language_registry.add(typescript_lang());
2456 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2457 "TypeScript",
2458 FakeLspAdapter {
2459 capabilities: lsp::ServerCapabilities {
2460 completion_provider: Some(lsp::CompletionOptions {
2461 trigger_characters: Some(vec![":".to_string()]),
2462 ..Default::default()
2463 }),
2464 ..Default::default()
2465 },
2466 ..Default::default()
2467 },
2468 );
2469
2470 let buffer = project
2471 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2472 .await
2473 .unwrap();
2474
2475 let fake_server = fake_language_servers.next().await.unwrap();
2476
2477 let text = "let a = b.fqn";
2478 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2479 let completions = project.update(cx, |project, cx| {
2480 project.completions(&buffer, text.len(), cx)
2481 });
2482
2483 fake_server
2484 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2485 Ok(Some(lsp::CompletionResponse::Array(vec![
2486 lsp::CompletionItem {
2487 label: "fullyQualifiedName?".into(),
2488 insert_text: Some("fullyQualifiedName".into()),
2489 ..Default::default()
2490 },
2491 ])))
2492 })
2493 .next()
2494 .await;
2495 let completions = completions.await.unwrap();
2496 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2497 assert_eq!(completions.len(), 1);
2498 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2499 assert_eq!(
2500 completions[0].old_range.to_offset(&snapshot),
2501 text.len() - 3..text.len()
2502 );
2503
2504 let text = "let a = \"atoms/cmp\"";
2505 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2506 let completions = project.update(cx, |project, cx| {
2507 project.completions(&buffer, text.len() - 1, cx)
2508 });
2509
2510 fake_server
2511 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2512 Ok(Some(lsp::CompletionResponse::Array(vec![
2513 lsp::CompletionItem {
2514 label: "component".into(),
2515 ..Default::default()
2516 },
2517 ])))
2518 })
2519 .next()
2520 .await;
2521 let completions = completions.await.unwrap();
2522 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2523 assert_eq!(completions.len(), 1);
2524 assert_eq!(completions[0].new_text, "component");
2525 assert_eq!(
2526 completions[0].old_range.to_offset(&snapshot),
2527 text.len() - 4..text.len() - 1
2528 );
2529}
2530
2531#[gpui::test]
2532async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2533 init_test(cx);
2534
2535 let fs = FakeFs::new(cx.executor());
2536 fs.insert_tree(
2537 "/dir",
2538 json!({
2539 "a.ts": "",
2540 }),
2541 )
2542 .await;
2543
2544 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2545
2546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2547 language_registry.add(typescript_lang());
2548 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2549 "TypeScript",
2550 FakeLspAdapter {
2551 capabilities: lsp::ServerCapabilities {
2552 completion_provider: Some(lsp::CompletionOptions {
2553 trigger_characters: Some(vec![":".to_string()]),
2554 ..Default::default()
2555 }),
2556 ..Default::default()
2557 },
2558 ..Default::default()
2559 },
2560 );
2561
2562 let buffer = project
2563 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2564 .await
2565 .unwrap();
2566
2567 let fake_server = fake_language_servers.next().await.unwrap();
2568
2569 let text = "let a = b.fqn";
2570 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2571 let completions = project.update(cx, |project, cx| {
2572 project.completions(&buffer, text.len(), cx)
2573 });
2574
2575 fake_server
2576 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2577 Ok(Some(lsp::CompletionResponse::Array(vec![
2578 lsp::CompletionItem {
2579 label: "fullyQualifiedName?".into(),
2580 insert_text: Some("fully\rQualified\r\nName".into()),
2581 ..Default::default()
2582 },
2583 ])))
2584 })
2585 .next()
2586 .await;
2587 let completions = completions.await.unwrap();
2588 assert_eq!(completions.len(), 1);
2589 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2590}
2591
2592#[gpui::test(iterations = 10)]
2593async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2594 init_test(cx);
2595
2596 let fs = FakeFs::new(cx.executor());
2597 fs.insert_tree(
2598 "/dir",
2599 json!({
2600 "a.ts": "a",
2601 }),
2602 )
2603 .await;
2604
2605 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2606
2607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2608 language_registry.add(typescript_lang());
2609 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2610 "TypeScript",
2611 FakeLspAdapter {
2612 capabilities: lsp::ServerCapabilities {
2613 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2614 lsp::CodeActionOptions {
2615 resolve_provider: Some(true),
2616 ..lsp::CodeActionOptions::default()
2617 },
2618 )),
2619 ..lsp::ServerCapabilities::default()
2620 },
2621 ..FakeLspAdapter::default()
2622 },
2623 );
2624
2625 let buffer = project
2626 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2627 .await
2628 .unwrap();
2629
2630 let fake_server = fake_language_servers.next().await.unwrap();
2631
2632 // Language server returns code actions that contain commands, and not edits.
2633 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2634 fake_server
2635 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2636 Ok(Some(vec![
2637 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2638 title: "The code action".into(),
2639 data: Some(serde_json::json!({
2640 "command": "_the/command",
2641 })),
2642 ..lsp::CodeAction::default()
2643 }),
2644 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2645 title: "two".into(),
2646 ..lsp::CodeAction::default()
2647 }),
2648 ]))
2649 })
2650 .next()
2651 .await;
2652
2653 let action = actions.await[0].clone();
2654 let apply = project.update(cx, |project, cx| {
2655 project.apply_code_action(buffer.clone(), action, true, cx)
2656 });
2657
2658 // Resolving the code action does not populate its edits. In absence of
2659 // edits, we must execute the given command.
2660 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2661 |mut action, _| async move {
2662 if action.data.is_some() {
2663 action.command = Some(lsp::Command {
2664 title: "The command".into(),
2665 command: "_the/command".into(),
2666 arguments: Some(vec![json!("the-argument")]),
2667 });
2668 }
2669 Ok(action)
2670 },
2671 );
2672
2673 // While executing the command, the language server sends the editor
2674 // a `workspaceEdit` request.
2675 fake_server
2676 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2677 let fake = fake_server.clone();
2678 move |params, _| {
2679 assert_eq!(params.command, "_the/command");
2680 let fake = fake.clone();
2681 async move {
2682 fake.server
2683 .request::<lsp::request::ApplyWorkspaceEdit>(
2684 lsp::ApplyWorkspaceEditParams {
2685 label: None,
2686 edit: lsp::WorkspaceEdit {
2687 changes: Some(
2688 [(
2689 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2690 vec![lsp::TextEdit {
2691 range: lsp::Range::new(
2692 lsp::Position::new(0, 0),
2693 lsp::Position::new(0, 0),
2694 ),
2695 new_text: "X".into(),
2696 }],
2697 )]
2698 .into_iter()
2699 .collect(),
2700 ),
2701 ..Default::default()
2702 },
2703 },
2704 )
2705 .await
2706 .unwrap();
2707 Ok(Some(json!(null)))
2708 }
2709 }
2710 })
2711 .next()
2712 .await;
2713
2714 // Applying the code action returns a project transaction containing the edits
2715 // sent by the language server in its `workspaceEdit` request.
2716 let transaction = apply.await.unwrap();
2717 assert!(transaction.0.contains_key(&buffer));
2718 buffer.update(cx, |buffer, cx| {
2719 assert_eq!(buffer.text(), "Xa");
2720 buffer.undo(cx);
2721 assert_eq!(buffer.text(), "a");
2722 });
2723}
2724
2725#[gpui::test(iterations = 10)]
2726async fn test_save_file(cx: &mut gpui::TestAppContext) {
2727 init_test(cx);
2728
2729 let fs = FakeFs::new(cx.executor());
2730 fs.insert_tree(
2731 "/dir",
2732 json!({
2733 "file1": "the old contents",
2734 }),
2735 )
2736 .await;
2737
2738 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2739 let buffer = project
2740 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2741 .await
2742 .unwrap();
2743 buffer.update(cx, |buffer, cx| {
2744 assert_eq!(buffer.text(), "the old contents");
2745 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2746 });
2747
2748 project
2749 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2750 .await
2751 .unwrap();
2752
2753 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2754 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2755}
2756
2757#[gpui::test(iterations = 30)]
2758async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2759 init_test(cx);
2760
2761 let fs = FakeFs::new(cx.executor().clone());
2762 fs.insert_tree(
2763 "/dir",
2764 json!({
2765 "file1": "the original contents",
2766 }),
2767 )
2768 .await;
2769
2770 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2771 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2772 let buffer = project
2773 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2774 .await
2775 .unwrap();
2776
2777 // Simulate buffer diffs being slow, so that they don't complete before
2778 // the next file change occurs.
2779 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2780
2781 // Change the buffer's file on disk, and then wait for the file change
2782 // to be detected by the worktree, so that the buffer starts reloading.
2783 fs.save(
2784 "/dir/file1".as_ref(),
2785 &"the first contents".into(),
2786 Default::default(),
2787 )
2788 .await
2789 .unwrap();
2790 worktree.next_event(cx).await;
2791
2792 // Change the buffer's file again. Depending on the random seed, the
2793 // previous file change may still be in progress.
2794 fs.save(
2795 "/dir/file1".as_ref(),
2796 &"the second contents".into(),
2797 Default::default(),
2798 )
2799 .await
2800 .unwrap();
2801 worktree.next_event(cx).await;
2802
2803 cx.executor().run_until_parked();
2804 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2805 buffer.read_with(cx, |buffer, _| {
2806 assert_eq!(buffer.text(), on_disk_text);
2807 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2808 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2809 });
2810}
2811
2812#[gpui::test(iterations = 30)]
2813async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2814 init_test(cx);
2815
2816 let fs = FakeFs::new(cx.executor().clone());
2817 fs.insert_tree(
2818 "/dir",
2819 json!({
2820 "file1": "the original contents",
2821 }),
2822 )
2823 .await;
2824
2825 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2826 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2827 let buffer = project
2828 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2829 .await
2830 .unwrap();
2831
2832 // Simulate buffer diffs being slow, so that they don't complete before
2833 // the next file change occurs.
2834 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2835
2836 // Change the buffer's file on disk, and then wait for the file change
2837 // to be detected by the worktree, so that the buffer starts reloading.
2838 fs.save(
2839 "/dir/file1".as_ref(),
2840 &"the first contents".into(),
2841 Default::default(),
2842 )
2843 .await
2844 .unwrap();
2845 worktree.next_event(cx).await;
2846
2847 cx.executor()
2848 .spawn(cx.executor().simulate_random_delay())
2849 .await;
2850
2851 // Perform a noop edit, causing the buffer's version to increase.
2852 buffer.update(cx, |buffer, cx| {
2853 buffer.edit([(0..0, " ")], None, cx);
2854 buffer.undo(cx);
2855 });
2856
2857 cx.executor().run_until_parked();
2858 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2859 buffer.read_with(cx, |buffer, _| {
2860 let buffer_text = buffer.text();
2861 if buffer_text == on_disk_text {
2862 assert!(
2863 !buffer.is_dirty() && !buffer.has_conflict(),
2864 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2865 );
2866 }
2867 // If the file change occurred while the buffer was processing the first
2868 // change, the buffer will be in a conflicting state.
2869 else {
2870 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2871 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2872 }
2873 });
2874}
2875
2876#[gpui::test]
2877async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2878 init_test(cx);
2879
2880 let fs = FakeFs::new(cx.executor());
2881 fs.insert_tree(
2882 "/dir",
2883 json!({
2884 "file1": "the old contents",
2885 }),
2886 )
2887 .await;
2888
2889 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2890 let buffer = project
2891 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2892 .await
2893 .unwrap();
2894 buffer.update(cx, |buffer, cx| {
2895 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2896 });
2897
2898 project
2899 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2900 .await
2901 .unwrap();
2902
2903 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2904 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2905}
2906
2907#[gpui::test]
2908async fn test_save_as(cx: &mut gpui::TestAppContext) {
2909 init_test(cx);
2910
2911 let fs = FakeFs::new(cx.executor());
2912 fs.insert_tree("/dir", json!({})).await;
2913
2914 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2915
2916 let languages = project.update(cx, |project, _| project.languages().clone());
2917 languages.add(rust_lang());
2918
2919 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2920 buffer.update(cx, |buffer, cx| {
2921 buffer.edit([(0..0, "abc")], None, cx);
2922 assert!(buffer.is_dirty());
2923 assert!(!buffer.has_conflict());
2924 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2925 });
2926 project
2927 .update(cx, |project, cx| {
2928 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2929 let path = ProjectPath {
2930 worktree_id,
2931 path: Arc::from(Path::new("file1.rs")),
2932 };
2933 project.save_buffer_as(buffer.clone(), path, cx)
2934 })
2935 .await
2936 .unwrap();
2937 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2938
2939 cx.executor().run_until_parked();
2940 buffer.update(cx, |buffer, cx| {
2941 assert_eq!(
2942 buffer.file().unwrap().full_path(cx),
2943 Path::new("dir/file1.rs")
2944 );
2945 assert!(!buffer.is_dirty());
2946 assert!(!buffer.has_conflict());
2947 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2948 });
2949
2950 let opened_buffer = project
2951 .update(cx, |project, cx| {
2952 project.open_local_buffer("/dir/file1.rs", cx)
2953 })
2954 .await
2955 .unwrap();
2956 assert_eq!(opened_buffer, buffer);
2957}
2958
2959#[gpui::test(retries = 5)]
2960async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2961 init_test(cx);
2962 cx.executor().allow_parking();
2963
2964 let dir = temp_tree(json!({
2965 "a": {
2966 "file1": "",
2967 "file2": "",
2968 "file3": "",
2969 },
2970 "b": {
2971 "c": {
2972 "file4": "",
2973 "file5": "",
2974 }
2975 }
2976 }));
2977
2978 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2979 let rpc = project.update(cx, |p, _| p.client.clone());
2980
2981 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2982 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2983 async move { buffer.await.unwrap() }
2984 };
2985 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2986 project.update(cx, |project, cx| {
2987 let tree = project.worktrees().next().unwrap();
2988 tree.read(cx)
2989 .entry_for_path(path)
2990 .unwrap_or_else(|| panic!("no entry for path {}", path))
2991 .id
2992 })
2993 };
2994
2995 let buffer2 = buffer_for_path("a/file2", cx).await;
2996 let buffer3 = buffer_for_path("a/file3", cx).await;
2997 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2998 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2999
3000 let file2_id = id_for_path("a/file2", cx);
3001 let file3_id = id_for_path("a/file3", cx);
3002 let file4_id = id_for_path("b/c/file4", cx);
3003
3004 // Create a remote copy of this worktree.
3005 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3006
3007 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
3008
3009 let updates = Arc::new(Mutex::new(Vec::new()));
3010 tree.update(cx, |tree, cx| {
3011 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
3012 let updates = updates.clone();
3013 move |update| {
3014 updates.lock().push(update);
3015 async { true }
3016 }
3017 });
3018 });
3019
3020 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
3021
3022 cx.executor().run_until_parked();
3023
3024 cx.update(|cx| {
3025 assert!(!buffer2.read(cx).is_dirty());
3026 assert!(!buffer3.read(cx).is_dirty());
3027 assert!(!buffer4.read(cx).is_dirty());
3028 assert!(!buffer5.read(cx).is_dirty());
3029 });
3030
3031 // Rename and delete files and directories.
3032 tree.flush_fs_events(cx).await;
3033 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3034 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3035 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3036 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3037 tree.flush_fs_events(cx).await;
3038
3039 let expected_paths = vec![
3040 "a",
3041 "a/file1",
3042 "a/file2.new",
3043 "b",
3044 "d",
3045 "d/file3",
3046 "d/file4",
3047 ];
3048
3049 cx.update(|app| {
3050 assert_eq!(
3051 tree.read(app)
3052 .paths()
3053 .map(|p| p.to_str().unwrap())
3054 .collect::<Vec<_>>(),
3055 expected_paths
3056 );
3057 });
3058
3059 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3060 assert_eq!(id_for_path("d/file3", cx), file3_id);
3061 assert_eq!(id_for_path("d/file4", cx), file4_id);
3062
3063 cx.update(|cx| {
3064 assert_eq!(
3065 buffer2.read(cx).file().unwrap().path().as_ref(),
3066 Path::new("a/file2.new")
3067 );
3068 assert_eq!(
3069 buffer3.read(cx).file().unwrap().path().as_ref(),
3070 Path::new("d/file3")
3071 );
3072 assert_eq!(
3073 buffer4.read(cx).file().unwrap().path().as_ref(),
3074 Path::new("d/file4")
3075 );
3076 assert_eq!(
3077 buffer5.read(cx).file().unwrap().path().as_ref(),
3078 Path::new("b/c/file5")
3079 );
3080
3081 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3082 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3083 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3084 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3085 });
3086
3087 // Update the remote worktree. Check that it becomes consistent with the
3088 // local worktree.
3089 cx.executor().run_until_parked();
3090
3091 remote.update(cx, |remote, _| {
3092 for update in updates.lock().drain(..) {
3093 remote.as_remote_mut().unwrap().update_from_remote(update);
3094 }
3095 });
3096 cx.executor().run_until_parked();
3097 remote.update(cx, |remote, _| {
3098 assert_eq!(
3099 remote
3100 .paths()
3101 .map(|p| p.to_str().unwrap())
3102 .collect::<Vec<_>>(),
3103 expected_paths
3104 );
3105 });
3106}
3107
3108#[gpui::test(iterations = 10)]
3109async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3110 init_test(cx);
3111
3112 let fs = FakeFs::new(cx.executor());
3113 fs.insert_tree(
3114 "/dir",
3115 json!({
3116 "a": {
3117 "file1": "",
3118 }
3119 }),
3120 )
3121 .await;
3122
3123 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3124 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3125 let tree_id = tree.update(cx, |tree, _| tree.id());
3126
3127 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3128 project.update(cx, |project, cx| {
3129 let tree = project.worktrees().next().unwrap();
3130 tree.read(cx)
3131 .entry_for_path(path)
3132 .unwrap_or_else(|| panic!("no entry for path {}", path))
3133 .id
3134 })
3135 };
3136
3137 let dir_id = id_for_path("a", cx);
3138 let file_id = id_for_path("a/file1", cx);
3139 let buffer = project
3140 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3141 .await
3142 .unwrap();
3143 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3144
3145 project
3146 .update(cx, |project, cx| {
3147 project.rename_entry(dir_id, Path::new("b"), cx)
3148 })
3149 .unwrap()
3150 .await
3151 .unwrap();
3152 cx.executor().run_until_parked();
3153
3154 assert_eq!(id_for_path("b", cx), dir_id);
3155 assert_eq!(id_for_path("b/file1", cx), file_id);
3156 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3157}
3158
3159#[gpui::test]
3160async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3161 init_test(cx);
3162
3163 let fs = FakeFs::new(cx.executor());
3164 fs.insert_tree(
3165 "/dir",
3166 json!({
3167 "a.txt": "a-contents",
3168 "b.txt": "b-contents",
3169 }),
3170 )
3171 .await;
3172
3173 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3174
3175 // Spawn multiple tasks to open paths, repeating some paths.
3176 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3177 (
3178 p.open_local_buffer("/dir/a.txt", cx),
3179 p.open_local_buffer("/dir/b.txt", cx),
3180 p.open_local_buffer("/dir/a.txt", cx),
3181 )
3182 });
3183
3184 let buffer_a_1 = buffer_a_1.await.unwrap();
3185 let buffer_a_2 = buffer_a_2.await.unwrap();
3186 let buffer_b = buffer_b.await.unwrap();
3187 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3188 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3189
3190 // There is only one buffer per path.
3191 let buffer_a_id = buffer_a_1.entity_id();
3192 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3193
3194 // Open the same path again while it is still open.
3195 drop(buffer_a_1);
3196 let buffer_a_3 = project
3197 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3198 .await
3199 .unwrap();
3200
3201 // There's still only one buffer per path.
3202 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3203}
3204
3205#[gpui::test]
3206async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3207 init_test(cx);
3208
3209 let fs = FakeFs::new(cx.executor());
3210 fs.insert_tree(
3211 "/dir",
3212 json!({
3213 "file1": "abc",
3214 "file2": "def",
3215 "file3": "ghi",
3216 }),
3217 )
3218 .await;
3219
3220 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3221
3222 let buffer1 = project
3223 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3224 .await
3225 .unwrap();
3226 let events = Arc::new(Mutex::new(Vec::new()));
3227
3228 // initially, the buffer isn't dirty.
3229 buffer1.update(cx, |buffer, cx| {
3230 cx.subscribe(&buffer1, {
3231 let events = events.clone();
3232 move |_, _, event, _| match event {
3233 BufferEvent::Operation(_) => {}
3234 _ => events.lock().push(event.clone()),
3235 }
3236 })
3237 .detach();
3238
3239 assert!(!buffer.is_dirty());
3240 assert!(events.lock().is_empty());
3241
3242 buffer.edit([(1..2, "")], None, cx);
3243 });
3244
3245 // after the first edit, the buffer is dirty, and emits a dirtied event.
3246 buffer1.update(cx, |buffer, cx| {
3247 assert!(buffer.text() == "ac");
3248 assert!(buffer.is_dirty());
3249 assert_eq!(
3250 *events.lock(),
3251 &[language::Event::Edited, language::Event::DirtyChanged]
3252 );
3253 events.lock().clear();
3254 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3255 });
3256
3257 // after saving, the buffer is not dirty, and emits a saved event.
3258 buffer1.update(cx, |buffer, cx| {
3259 assert!(!buffer.is_dirty());
3260 assert_eq!(*events.lock(), &[language::Event::Saved]);
3261 events.lock().clear();
3262
3263 buffer.edit([(1..1, "B")], None, cx);
3264 buffer.edit([(2..2, "D")], None, cx);
3265 });
3266
3267 // after editing again, the buffer is dirty, and emits another dirty event.
3268 buffer1.update(cx, |buffer, cx| {
3269 assert!(buffer.text() == "aBDc");
3270 assert!(buffer.is_dirty());
3271 assert_eq!(
3272 *events.lock(),
3273 &[
3274 language::Event::Edited,
3275 language::Event::DirtyChanged,
3276 language::Event::Edited,
3277 ],
3278 );
3279 events.lock().clear();
3280
3281 // After restoring the buffer to its previously-saved state,
3282 // the buffer is not considered dirty anymore.
3283 buffer.edit([(1..3, "")], None, cx);
3284 assert!(buffer.text() == "ac");
3285 assert!(!buffer.is_dirty());
3286 });
3287
3288 assert_eq!(
3289 *events.lock(),
3290 &[language::Event::Edited, language::Event::DirtyChanged]
3291 );
3292
3293 // When a file is deleted, the buffer is considered dirty.
3294 let events = Arc::new(Mutex::new(Vec::new()));
3295 let buffer2 = project
3296 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3297 .await
3298 .unwrap();
3299 buffer2.update(cx, |_, cx| {
3300 cx.subscribe(&buffer2, {
3301 let events = events.clone();
3302 move |_, _, event, _| events.lock().push(event.clone())
3303 })
3304 .detach();
3305 });
3306
3307 fs.remove_file("/dir/file2".as_ref(), Default::default())
3308 .await
3309 .unwrap();
3310 cx.executor().run_until_parked();
3311 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3312 assert_eq!(
3313 *events.lock(),
3314 &[
3315 language::Event::DirtyChanged,
3316 language::Event::FileHandleChanged
3317 ]
3318 );
3319
3320 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3321 let events = Arc::new(Mutex::new(Vec::new()));
3322 let buffer3 = project
3323 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3324 .await
3325 .unwrap();
3326 buffer3.update(cx, |_, cx| {
3327 cx.subscribe(&buffer3, {
3328 let events = events.clone();
3329 move |_, _, event, _| events.lock().push(event.clone())
3330 })
3331 .detach();
3332 });
3333
3334 buffer3.update(cx, |buffer, cx| {
3335 buffer.edit([(0..0, "x")], None, cx);
3336 });
3337 events.lock().clear();
3338 fs.remove_file("/dir/file3".as_ref(), Default::default())
3339 .await
3340 .unwrap();
3341 cx.executor().run_until_parked();
3342 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3343 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3344}
3345
3346#[gpui::test]
3347async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3348 init_test(cx);
3349
3350 let initial_contents = "aaa\nbbbbb\nc\n";
3351 let fs = FakeFs::new(cx.executor());
3352 fs.insert_tree(
3353 "/dir",
3354 json!({
3355 "the-file": initial_contents,
3356 }),
3357 )
3358 .await;
3359 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3360 let buffer = project
3361 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3362 .await
3363 .unwrap();
3364
3365 let anchors = (0..3)
3366 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3367 .collect::<Vec<_>>();
3368
3369 // Change the file on disk, adding two new lines of text, and removing
3370 // one line.
3371 buffer.update(cx, |buffer, _| {
3372 assert!(!buffer.is_dirty());
3373 assert!(!buffer.has_conflict());
3374 });
3375 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3376 fs.save(
3377 "/dir/the-file".as_ref(),
3378 &new_contents.into(),
3379 LineEnding::Unix,
3380 )
3381 .await
3382 .unwrap();
3383
3384 // Because the buffer was not modified, it is reloaded from disk. Its
3385 // contents are edited according to the diff between the old and new
3386 // file contents.
3387 cx.executor().run_until_parked();
3388 buffer.update(cx, |buffer, _| {
3389 assert_eq!(buffer.text(), new_contents);
3390 assert!(!buffer.is_dirty());
3391 assert!(!buffer.has_conflict());
3392
3393 let anchor_positions = anchors
3394 .iter()
3395 .map(|anchor| anchor.to_point(&*buffer))
3396 .collect::<Vec<_>>();
3397 assert_eq!(
3398 anchor_positions,
3399 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3400 );
3401 });
3402
3403 // Modify the buffer
3404 buffer.update(cx, |buffer, cx| {
3405 buffer.edit([(0..0, " ")], None, cx);
3406 assert!(buffer.is_dirty());
3407 assert!(!buffer.has_conflict());
3408 });
3409
3410 // Change the file on disk again, adding blank lines to the beginning.
3411 fs.save(
3412 "/dir/the-file".as_ref(),
3413 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3414 LineEnding::Unix,
3415 )
3416 .await
3417 .unwrap();
3418
3419 // Because the buffer is modified, it doesn't reload from disk, but is
3420 // marked as having a conflict.
3421 cx.executor().run_until_parked();
3422 buffer.update(cx, |buffer, _| {
3423 assert!(buffer.has_conflict());
3424 });
3425}
3426
3427#[gpui::test]
3428async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3429 init_test(cx);
3430
3431 let fs = FakeFs::new(cx.executor());
3432 fs.insert_tree(
3433 "/dir",
3434 json!({
3435 "file1": "a\nb\nc\n",
3436 "file2": "one\r\ntwo\r\nthree\r\n",
3437 }),
3438 )
3439 .await;
3440
3441 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3442 let buffer1 = project
3443 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3444 .await
3445 .unwrap();
3446 let buffer2 = project
3447 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3448 .await
3449 .unwrap();
3450
3451 buffer1.update(cx, |buffer, _| {
3452 assert_eq!(buffer.text(), "a\nb\nc\n");
3453 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3454 });
3455 buffer2.update(cx, |buffer, _| {
3456 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3457 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3458 });
3459
3460 // Change a file's line endings on disk from unix to windows. The buffer's
3461 // state updates correctly.
3462 fs.save(
3463 "/dir/file1".as_ref(),
3464 &"aaa\nb\nc\n".into(),
3465 LineEnding::Windows,
3466 )
3467 .await
3468 .unwrap();
3469 cx.executor().run_until_parked();
3470 buffer1.update(cx, |buffer, _| {
3471 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3472 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3473 });
3474
3475 // Save a file with windows line endings. The file is written correctly.
3476 buffer2.update(cx, |buffer, cx| {
3477 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3478 });
3479 project
3480 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3481 .await
3482 .unwrap();
3483 assert_eq!(
3484 fs.load("/dir/file2".as_ref()).await.unwrap(),
3485 "one\r\ntwo\r\nthree\r\nfour\r\n",
3486 );
3487}
3488
3489#[gpui::test]
3490async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3491 init_test(cx);
3492
3493 let fs = FakeFs::new(cx.executor());
3494 fs.insert_tree(
3495 "/the-dir",
3496 json!({
3497 "a.rs": "
3498 fn foo(mut v: Vec<usize>) {
3499 for x in &v {
3500 v.push(1);
3501 }
3502 }
3503 "
3504 .unindent(),
3505 }),
3506 )
3507 .await;
3508
3509 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3510 let buffer = project
3511 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3512 .await
3513 .unwrap();
3514
3515 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3516 let message = lsp::PublishDiagnosticsParams {
3517 uri: buffer_uri.clone(),
3518 diagnostics: vec![
3519 lsp::Diagnostic {
3520 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3521 severity: Some(DiagnosticSeverity::WARNING),
3522 message: "error 1".to_string(),
3523 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3524 location: lsp::Location {
3525 uri: buffer_uri.clone(),
3526 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3527 },
3528 message: "error 1 hint 1".to_string(),
3529 }]),
3530 ..Default::default()
3531 },
3532 lsp::Diagnostic {
3533 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3534 severity: Some(DiagnosticSeverity::HINT),
3535 message: "error 1 hint 1".to_string(),
3536 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3537 location: lsp::Location {
3538 uri: buffer_uri.clone(),
3539 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3540 },
3541 message: "original diagnostic".to_string(),
3542 }]),
3543 ..Default::default()
3544 },
3545 lsp::Diagnostic {
3546 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3547 severity: Some(DiagnosticSeverity::ERROR),
3548 message: "error 2".to_string(),
3549 related_information: Some(vec![
3550 lsp::DiagnosticRelatedInformation {
3551 location: lsp::Location {
3552 uri: buffer_uri.clone(),
3553 range: lsp::Range::new(
3554 lsp::Position::new(1, 13),
3555 lsp::Position::new(1, 15),
3556 ),
3557 },
3558 message: "error 2 hint 1".to_string(),
3559 },
3560 lsp::DiagnosticRelatedInformation {
3561 location: lsp::Location {
3562 uri: buffer_uri.clone(),
3563 range: lsp::Range::new(
3564 lsp::Position::new(1, 13),
3565 lsp::Position::new(1, 15),
3566 ),
3567 },
3568 message: "error 2 hint 2".to_string(),
3569 },
3570 ]),
3571 ..Default::default()
3572 },
3573 lsp::Diagnostic {
3574 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3575 severity: Some(DiagnosticSeverity::HINT),
3576 message: "error 2 hint 1".to_string(),
3577 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3578 location: lsp::Location {
3579 uri: buffer_uri.clone(),
3580 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3581 },
3582 message: "original diagnostic".to_string(),
3583 }]),
3584 ..Default::default()
3585 },
3586 lsp::Diagnostic {
3587 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3588 severity: Some(DiagnosticSeverity::HINT),
3589 message: "error 2 hint 2".to_string(),
3590 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3591 location: lsp::Location {
3592 uri: buffer_uri,
3593 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3594 },
3595 message: "original diagnostic".to_string(),
3596 }]),
3597 ..Default::default()
3598 },
3599 ],
3600 version: None,
3601 };
3602
3603 project
3604 .update(cx, |p, cx| {
3605 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3606 })
3607 .unwrap();
3608 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3609
3610 assert_eq!(
3611 buffer
3612 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3613 .collect::<Vec<_>>(),
3614 &[
3615 DiagnosticEntry {
3616 range: Point::new(1, 8)..Point::new(1, 9),
3617 diagnostic: Diagnostic {
3618 severity: DiagnosticSeverity::WARNING,
3619 message: "error 1".to_string(),
3620 group_id: 1,
3621 is_primary: true,
3622 ..Default::default()
3623 }
3624 },
3625 DiagnosticEntry {
3626 range: Point::new(1, 8)..Point::new(1, 9),
3627 diagnostic: Diagnostic {
3628 severity: DiagnosticSeverity::HINT,
3629 message: "error 1 hint 1".to_string(),
3630 group_id: 1,
3631 is_primary: false,
3632 ..Default::default()
3633 }
3634 },
3635 DiagnosticEntry {
3636 range: Point::new(1, 13)..Point::new(1, 15),
3637 diagnostic: Diagnostic {
3638 severity: DiagnosticSeverity::HINT,
3639 message: "error 2 hint 1".to_string(),
3640 group_id: 0,
3641 is_primary: false,
3642 ..Default::default()
3643 }
3644 },
3645 DiagnosticEntry {
3646 range: Point::new(1, 13)..Point::new(1, 15),
3647 diagnostic: Diagnostic {
3648 severity: DiagnosticSeverity::HINT,
3649 message: "error 2 hint 2".to_string(),
3650 group_id: 0,
3651 is_primary: false,
3652 ..Default::default()
3653 }
3654 },
3655 DiagnosticEntry {
3656 range: Point::new(2, 8)..Point::new(2, 17),
3657 diagnostic: Diagnostic {
3658 severity: DiagnosticSeverity::ERROR,
3659 message: "error 2".to_string(),
3660 group_id: 0,
3661 is_primary: true,
3662 ..Default::default()
3663 }
3664 }
3665 ]
3666 );
3667
3668 assert_eq!(
3669 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3670 &[
3671 DiagnosticEntry {
3672 range: Point::new(1, 13)..Point::new(1, 15),
3673 diagnostic: Diagnostic {
3674 severity: DiagnosticSeverity::HINT,
3675 message: "error 2 hint 1".to_string(),
3676 group_id: 0,
3677 is_primary: false,
3678 ..Default::default()
3679 }
3680 },
3681 DiagnosticEntry {
3682 range: Point::new(1, 13)..Point::new(1, 15),
3683 diagnostic: Diagnostic {
3684 severity: DiagnosticSeverity::HINT,
3685 message: "error 2 hint 2".to_string(),
3686 group_id: 0,
3687 is_primary: false,
3688 ..Default::default()
3689 }
3690 },
3691 DiagnosticEntry {
3692 range: Point::new(2, 8)..Point::new(2, 17),
3693 diagnostic: Diagnostic {
3694 severity: DiagnosticSeverity::ERROR,
3695 message: "error 2".to_string(),
3696 group_id: 0,
3697 is_primary: true,
3698 ..Default::default()
3699 }
3700 }
3701 ]
3702 );
3703
3704 assert_eq!(
3705 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3706 &[
3707 DiagnosticEntry {
3708 range: Point::new(1, 8)..Point::new(1, 9),
3709 diagnostic: Diagnostic {
3710 severity: DiagnosticSeverity::WARNING,
3711 message: "error 1".to_string(),
3712 group_id: 1,
3713 is_primary: true,
3714 ..Default::default()
3715 }
3716 },
3717 DiagnosticEntry {
3718 range: Point::new(1, 8)..Point::new(1, 9),
3719 diagnostic: Diagnostic {
3720 severity: DiagnosticSeverity::HINT,
3721 message: "error 1 hint 1".to_string(),
3722 group_id: 1,
3723 is_primary: false,
3724 ..Default::default()
3725 }
3726 },
3727 ]
3728 );
3729}
3730
3731#[gpui::test]
3732async fn test_rename(cx: &mut gpui::TestAppContext) {
3733 init_test(cx);
3734
3735 let fs = FakeFs::new(cx.executor());
3736 fs.insert_tree(
3737 "/dir",
3738 json!({
3739 "one.rs": "const ONE: usize = 1;",
3740 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3741 }),
3742 )
3743 .await;
3744
3745 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3746
3747 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3748 language_registry.add(rust_lang());
3749 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3750 "Rust",
3751 FakeLspAdapter {
3752 capabilities: lsp::ServerCapabilities {
3753 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3754 prepare_provider: Some(true),
3755 work_done_progress_options: Default::default(),
3756 })),
3757 ..Default::default()
3758 },
3759 ..Default::default()
3760 },
3761 );
3762
3763 let buffer = project
3764 .update(cx, |project, cx| {
3765 project.open_local_buffer("/dir/one.rs", cx)
3766 })
3767 .await
3768 .unwrap();
3769
3770 let fake_server = fake_servers.next().await.unwrap();
3771
3772 let response = project.update(cx, |project, cx| {
3773 project.prepare_rename(buffer.clone(), 7, cx)
3774 });
3775 fake_server
3776 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3777 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3778 assert_eq!(params.position, lsp::Position::new(0, 7));
3779 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3780 lsp::Position::new(0, 6),
3781 lsp::Position::new(0, 9),
3782 ))))
3783 })
3784 .next()
3785 .await
3786 .unwrap();
3787 let range = response.await.unwrap().unwrap();
3788 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3789 assert_eq!(range, 6..9);
3790
3791 let response = project.update(cx, |project, cx| {
3792 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3793 });
3794 fake_server
3795 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3796 assert_eq!(
3797 params.text_document_position.text_document.uri.as_str(),
3798 "file:///dir/one.rs"
3799 );
3800 assert_eq!(
3801 params.text_document_position.position,
3802 lsp::Position::new(0, 7)
3803 );
3804 assert_eq!(params.new_name, "THREE");
3805 Ok(Some(lsp::WorkspaceEdit {
3806 changes: Some(
3807 [
3808 (
3809 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3810 vec![lsp::TextEdit::new(
3811 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3812 "THREE".to_string(),
3813 )],
3814 ),
3815 (
3816 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3817 vec![
3818 lsp::TextEdit::new(
3819 lsp::Range::new(
3820 lsp::Position::new(0, 24),
3821 lsp::Position::new(0, 27),
3822 ),
3823 "THREE".to_string(),
3824 ),
3825 lsp::TextEdit::new(
3826 lsp::Range::new(
3827 lsp::Position::new(0, 35),
3828 lsp::Position::new(0, 38),
3829 ),
3830 "THREE".to_string(),
3831 ),
3832 ],
3833 ),
3834 ]
3835 .into_iter()
3836 .collect(),
3837 ),
3838 ..Default::default()
3839 }))
3840 })
3841 .next()
3842 .await
3843 .unwrap();
3844 let mut transaction = response.await.unwrap().0;
3845 assert_eq!(transaction.len(), 2);
3846 assert_eq!(
3847 transaction
3848 .remove_entry(&buffer)
3849 .unwrap()
3850 .0
3851 .update(cx, |buffer, _| buffer.text()),
3852 "const THREE: usize = 1;"
3853 );
3854 assert_eq!(
3855 transaction
3856 .into_keys()
3857 .next()
3858 .unwrap()
3859 .update(cx, |buffer, _| buffer.text()),
3860 "const TWO: usize = one::THREE + one::THREE;"
3861 );
3862}
3863
3864#[gpui::test]
3865async fn test_search(cx: &mut gpui::TestAppContext) {
3866 init_test(cx);
3867
3868 let fs = FakeFs::new(cx.executor());
3869 fs.insert_tree(
3870 "/dir",
3871 json!({
3872 "one.rs": "const ONE: usize = 1;",
3873 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3874 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3875 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3876 }),
3877 )
3878 .await;
3879 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3880 assert_eq!(
3881 search(
3882 &project,
3883 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3884 cx
3885 )
3886 .await
3887 .unwrap(),
3888 HashMap::from_iter([
3889 ("dir/two.rs".to_string(), vec![6..9]),
3890 ("dir/three.rs".to_string(), vec![37..40])
3891 ])
3892 );
3893
3894 let buffer_4 = project
3895 .update(cx, |project, cx| {
3896 project.open_local_buffer("/dir/four.rs", cx)
3897 })
3898 .await
3899 .unwrap();
3900 buffer_4.update(cx, |buffer, cx| {
3901 let text = "two::TWO";
3902 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3903 });
3904
3905 assert_eq!(
3906 search(
3907 &project,
3908 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3909 cx
3910 )
3911 .await
3912 .unwrap(),
3913 HashMap::from_iter([
3914 ("dir/two.rs".to_string(), vec![6..9]),
3915 ("dir/three.rs".to_string(), vec![37..40]),
3916 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3917 ])
3918 );
3919}
3920
3921#[gpui::test]
3922async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3923 init_test(cx);
3924
3925 let search_query = "file";
3926
3927 let fs = FakeFs::new(cx.executor());
3928 fs.insert_tree(
3929 "/dir",
3930 json!({
3931 "one.rs": r#"// Rust file one"#,
3932 "one.ts": r#"// TypeScript file one"#,
3933 "two.rs": r#"// Rust file two"#,
3934 "two.ts": r#"// TypeScript file two"#,
3935 }),
3936 )
3937 .await;
3938 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3939
3940 assert!(
3941 search(
3942 &project,
3943 SearchQuery::text(
3944 search_query,
3945 false,
3946 true,
3947 false,
3948 vec![PathMatcher::new("*.odd").unwrap()],
3949 Vec::new()
3950 )
3951 .unwrap(),
3952 cx
3953 )
3954 .await
3955 .unwrap()
3956 .is_empty(),
3957 "If no inclusions match, no files should be returned"
3958 );
3959
3960 assert_eq!(
3961 search(
3962 &project,
3963 SearchQuery::text(
3964 search_query,
3965 false,
3966 true,
3967 false,
3968 vec![PathMatcher::new("*.rs").unwrap()],
3969 Vec::new()
3970 )
3971 .unwrap(),
3972 cx
3973 )
3974 .await
3975 .unwrap(),
3976 HashMap::from_iter([
3977 ("dir/one.rs".to_string(), vec![8..12]),
3978 ("dir/two.rs".to_string(), vec![8..12]),
3979 ]),
3980 "Rust only search should give only Rust files"
3981 );
3982
3983 assert_eq!(
3984 search(
3985 &project,
3986 SearchQuery::text(
3987 search_query,
3988 false,
3989 true,
3990 false,
3991 vec![
3992 PathMatcher::new("*.ts").unwrap(),
3993 PathMatcher::new("*.odd").unwrap(),
3994 ],
3995 Vec::new()
3996 ).unwrap(),
3997 cx
3998 )
3999 .await
4000 .unwrap(),
4001 HashMap::from_iter([
4002 ("dir/one.ts".to_string(), vec![14..18]),
4003 ("dir/two.ts".to_string(), vec![14..18]),
4004 ]),
4005 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4006 );
4007
4008 assert_eq!(
4009 search(
4010 &project,
4011 SearchQuery::text(
4012 search_query,
4013 false,
4014 true,
4015 false,
4016 vec![
4017 PathMatcher::new("*.rs").unwrap(),
4018 PathMatcher::new("*.ts").unwrap(),
4019 PathMatcher::new("*.odd").unwrap(),
4020 ],
4021 Vec::new()
4022 ).unwrap(),
4023 cx
4024 )
4025 .await
4026 .unwrap(),
4027 HashMap::from_iter([
4028 ("dir/two.ts".to_string(), vec![14..18]),
4029 ("dir/one.rs".to_string(), vec![8..12]),
4030 ("dir/one.ts".to_string(), vec![14..18]),
4031 ("dir/two.rs".to_string(), vec![8..12]),
4032 ]),
4033 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4034 );
4035}
4036
4037#[gpui::test]
4038async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4039 init_test(cx);
4040
4041 let search_query = "file";
4042
4043 let fs = FakeFs::new(cx.executor());
4044 fs.insert_tree(
4045 "/dir",
4046 json!({
4047 "one.rs": r#"// Rust file one"#,
4048 "one.ts": r#"// TypeScript file one"#,
4049 "two.rs": r#"// Rust file two"#,
4050 "two.ts": r#"// TypeScript file two"#,
4051 }),
4052 )
4053 .await;
4054 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4055
4056 assert_eq!(
4057 search(
4058 &project,
4059 SearchQuery::text(
4060 search_query,
4061 false,
4062 true,
4063 false,
4064 Vec::new(),
4065 vec![PathMatcher::new("*.odd").unwrap()],
4066 )
4067 .unwrap(),
4068 cx
4069 )
4070 .await
4071 .unwrap(),
4072 HashMap::from_iter([
4073 ("dir/one.rs".to_string(), vec![8..12]),
4074 ("dir/one.ts".to_string(), vec![14..18]),
4075 ("dir/two.rs".to_string(), vec![8..12]),
4076 ("dir/two.ts".to_string(), vec![14..18]),
4077 ]),
4078 "If no exclusions match, all files should be returned"
4079 );
4080
4081 assert_eq!(
4082 search(
4083 &project,
4084 SearchQuery::text(
4085 search_query,
4086 false,
4087 true,
4088 false,
4089 Vec::new(),
4090 vec![PathMatcher::new("*.rs").unwrap()],
4091 )
4092 .unwrap(),
4093 cx
4094 )
4095 .await
4096 .unwrap(),
4097 HashMap::from_iter([
4098 ("dir/one.ts".to_string(), vec![14..18]),
4099 ("dir/two.ts".to_string(), vec![14..18]),
4100 ]),
4101 "Rust exclusion search should give only TypeScript files"
4102 );
4103
4104 assert_eq!(
4105 search(
4106 &project,
4107 SearchQuery::text(
4108 search_query,
4109 false,
4110 true,
4111 false,
4112 Vec::new(),
4113 vec![
4114 PathMatcher::new("*.ts").unwrap(),
4115 PathMatcher::new("*.odd").unwrap(),
4116 ],
4117 ).unwrap(),
4118 cx
4119 )
4120 .await
4121 .unwrap(),
4122 HashMap::from_iter([
4123 ("dir/one.rs".to_string(), vec![8..12]),
4124 ("dir/two.rs".to_string(), vec![8..12]),
4125 ]),
4126 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4127 );
4128
4129 assert!(
4130 search(
4131 &project,
4132 SearchQuery::text(
4133 search_query,
4134 false,
4135 true,
4136 false,
4137 Vec::new(),
4138 vec![
4139 PathMatcher::new("*.rs").unwrap(),
4140 PathMatcher::new("*.ts").unwrap(),
4141 PathMatcher::new("*.odd").unwrap(),
4142 ],
4143 ).unwrap(),
4144 cx
4145 )
4146 .await
4147 .unwrap().is_empty(),
4148 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4149 );
4150}
4151
4152#[gpui::test]
4153async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4154 init_test(cx);
4155
4156 let search_query = "file";
4157
4158 let fs = FakeFs::new(cx.executor());
4159 fs.insert_tree(
4160 "/dir",
4161 json!({
4162 "one.rs": r#"// Rust file one"#,
4163 "one.ts": r#"// TypeScript file one"#,
4164 "two.rs": r#"// Rust file two"#,
4165 "two.ts": r#"// TypeScript file two"#,
4166 }),
4167 )
4168 .await;
4169 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4170
4171 assert!(
4172 search(
4173 &project,
4174 SearchQuery::text(
4175 search_query,
4176 false,
4177 true,
4178 false,
4179 vec![PathMatcher::new("*.odd").unwrap()],
4180 vec![PathMatcher::new("*.odd").unwrap()],
4181 )
4182 .unwrap(),
4183 cx
4184 )
4185 .await
4186 .unwrap()
4187 .is_empty(),
4188 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4189 );
4190
4191 assert!(
4192 search(
4193 &project,
4194 SearchQuery::text(
4195 search_query,
4196 false,
4197 true,
4198 false,
4199 vec![PathMatcher::new("*.ts").unwrap()],
4200 vec![PathMatcher::new("*.ts").unwrap()],
4201 ).unwrap(),
4202 cx
4203 )
4204 .await
4205 .unwrap()
4206 .is_empty(),
4207 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4208 );
4209
4210 assert!(
4211 search(
4212 &project,
4213 SearchQuery::text(
4214 search_query,
4215 false,
4216 true,
4217 false,
4218 vec![
4219 PathMatcher::new("*.ts").unwrap(),
4220 PathMatcher::new("*.odd").unwrap()
4221 ],
4222 vec![
4223 PathMatcher::new("*.ts").unwrap(),
4224 PathMatcher::new("*.odd").unwrap()
4225 ],
4226 )
4227 .unwrap(),
4228 cx
4229 )
4230 .await
4231 .unwrap()
4232 .is_empty(),
4233 "Non-matching inclusions and exclusions should not change that."
4234 );
4235
4236 assert_eq!(
4237 search(
4238 &project,
4239 SearchQuery::text(
4240 search_query,
4241 false,
4242 true,
4243 false,
4244 vec![
4245 PathMatcher::new("*.ts").unwrap(),
4246 PathMatcher::new("*.odd").unwrap()
4247 ],
4248 vec![
4249 PathMatcher::new("*.rs").unwrap(),
4250 PathMatcher::new("*.odd").unwrap()
4251 ],
4252 )
4253 .unwrap(),
4254 cx
4255 )
4256 .await
4257 .unwrap(),
4258 HashMap::from_iter([
4259 ("dir/one.ts".to_string(), vec![14..18]),
4260 ("dir/two.ts".to_string(), vec![14..18]),
4261 ]),
4262 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4263 );
4264}
4265
4266#[gpui::test]
4267async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4268 init_test(cx);
4269
4270 let fs = FakeFs::new(cx.executor());
4271 fs.insert_tree(
4272 "/worktree-a",
4273 json!({
4274 "haystack.rs": r#"// NEEDLE"#,
4275 "haystack.ts": r#"// NEEDLE"#,
4276 }),
4277 )
4278 .await;
4279 fs.insert_tree(
4280 "/worktree-b",
4281 json!({
4282 "haystack.rs": r#"// NEEDLE"#,
4283 "haystack.ts": r#"// NEEDLE"#,
4284 }),
4285 )
4286 .await;
4287
4288 let project = Project::test(
4289 fs.clone(),
4290 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4291 cx,
4292 )
4293 .await;
4294
4295 assert_eq!(
4296 search(
4297 &project,
4298 SearchQuery::text(
4299 "NEEDLE",
4300 false,
4301 true,
4302 false,
4303 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4304 Vec::new()
4305 )
4306 .unwrap(),
4307 cx
4308 )
4309 .await
4310 .unwrap(),
4311 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4312 "should only return results from included worktree"
4313 );
4314 assert_eq!(
4315 search(
4316 &project,
4317 SearchQuery::text(
4318 "NEEDLE",
4319 false,
4320 true,
4321 false,
4322 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4323 Vec::new()
4324 )
4325 .unwrap(),
4326 cx
4327 )
4328 .await
4329 .unwrap(),
4330 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4331 "should only return results from included worktree"
4332 );
4333
4334 assert_eq!(
4335 search(
4336 &project,
4337 SearchQuery::text(
4338 "NEEDLE",
4339 false,
4340 true,
4341 false,
4342 vec![PathMatcher::new("*.ts").unwrap()],
4343 Vec::new()
4344 )
4345 .unwrap(),
4346 cx
4347 )
4348 .await
4349 .unwrap(),
4350 HashMap::from_iter([
4351 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4352 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4353 ]),
4354 "should return results from both worktrees"
4355 );
4356}
4357
4358#[gpui::test]
4359async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4360 init_test(cx);
4361
4362 let fs = FakeFs::new(cx.background_executor.clone());
4363 fs.insert_tree(
4364 "/dir",
4365 json!({
4366 ".git": {},
4367 ".gitignore": "**/target\n/node_modules\n",
4368 "target": {
4369 "index.txt": "index_key:index_value"
4370 },
4371 "node_modules": {
4372 "eslint": {
4373 "index.ts": "const eslint_key = 'eslint value'",
4374 "package.json": r#"{ "some_key": "some value" }"#,
4375 },
4376 "prettier": {
4377 "index.ts": "const prettier_key = 'prettier value'",
4378 "package.json": r#"{ "other_key": "other value" }"#,
4379 },
4380 },
4381 "package.json": r#"{ "main_key": "main value" }"#,
4382 }),
4383 )
4384 .await;
4385 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4386
4387 let query = "key";
4388 assert_eq!(
4389 search(
4390 &project,
4391 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4392 cx
4393 )
4394 .await
4395 .unwrap(),
4396 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4397 "Only one non-ignored file should have the query"
4398 );
4399
4400 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4401 assert_eq!(
4402 search(
4403 &project,
4404 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4405 cx
4406 )
4407 .await
4408 .unwrap(),
4409 HashMap::from_iter([
4410 ("dir/package.json".to_string(), vec![8..11]),
4411 ("dir/target/index.txt".to_string(), vec![6..9]),
4412 (
4413 "dir/node_modules/prettier/package.json".to_string(),
4414 vec![9..12]
4415 ),
4416 (
4417 "dir/node_modules/prettier/index.ts".to_string(),
4418 vec![15..18]
4419 ),
4420 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4421 (
4422 "dir/node_modules/eslint/package.json".to_string(),
4423 vec![8..11]
4424 ),
4425 ]),
4426 "Unrestricted search with ignored directories should find every file with the query"
4427 );
4428
4429 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4430 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4431 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4432 assert_eq!(
4433 search(
4434 &project,
4435 SearchQuery::text(
4436 query,
4437 false,
4438 false,
4439 true,
4440 files_to_include,
4441 files_to_exclude,
4442 )
4443 .unwrap(),
4444 cx
4445 )
4446 .await
4447 .unwrap(),
4448 HashMap::from_iter([(
4449 "dir/node_modules/prettier/package.json".to_string(),
4450 vec![9..12]
4451 )]),
4452 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4453 );
4454}
4455
4456#[test]
4457fn test_glob_literal_prefix() {
4458 assert_eq!(glob_literal_prefix("**/*.js"), "");
4459 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4460 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4461 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4462}
4463
4464#[gpui::test]
4465async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4466 init_test(cx);
4467
4468 let fs = FakeFs::new(cx.executor().clone());
4469 fs.insert_tree(
4470 "/one/two",
4471 json!({
4472 "three": {
4473 "a.txt": "",
4474 "four": {}
4475 },
4476 "c.rs": ""
4477 }),
4478 )
4479 .await;
4480
4481 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4482 project
4483 .update(cx, |project, cx| {
4484 let id = project.worktrees().next().unwrap().read(cx).id();
4485 project.create_entry((id, "b.."), true, cx)
4486 })
4487 .unwrap()
4488 .await
4489 .unwrap();
4490
4491 // Can't create paths outside the project
4492 let result = project
4493 .update(cx, |project, cx| {
4494 let id = project.worktrees().next().unwrap().read(cx).id();
4495 project.create_entry((id, "../../boop"), true, cx)
4496 })
4497 .await;
4498 assert!(result.is_err());
4499
4500 // Can't create paths with '..'
4501 let result = project
4502 .update(cx, |project, cx| {
4503 let id = project.worktrees().next().unwrap().read(cx).id();
4504 project.create_entry((id, "four/../beep"), true, cx)
4505 })
4506 .await;
4507 assert!(result.is_err());
4508
4509 assert_eq!(
4510 fs.paths(true),
4511 vec![
4512 PathBuf::from("/"),
4513 PathBuf::from("/one"),
4514 PathBuf::from("/one/two"),
4515 PathBuf::from("/one/two/c.rs"),
4516 PathBuf::from("/one/two/three"),
4517 PathBuf::from("/one/two/three/a.txt"),
4518 PathBuf::from("/one/two/three/b.."),
4519 PathBuf::from("/one/two/three/four"),
4520 ]
4521 );
4522
4523 // And we cannot open buffers with '..'
4524 let result = project
4525 .update(cx, |project, cx| {
4526 let id = project.worktrees().next().unwrap().read(cx).id();
4527 project.open_buffer((id, "../c.rs"), cx)
4528 })
4529 .await;
4530 assert!(result.is_err())
4531}
4532
4533#[gpui::test]
4534async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4535 init_test(cx);
4536
4537 let fs = FakeFs::new(cx.executor());
4538 fs.insert_tree(
4539 "/dir",
4540 json!({
4541 "a.tsx": "a",
4542 }),
4543 )
4544 .await;
4545
4546 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4547
4548 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4549 language_registry.add(tsx_lang());
4550 let language_server_names = [
4551 "TypeScriptServer",
4552 "TailwindServer",
4553 "ESLintServer",
4554 "NoHoverCapabilitiesServer",
4555 ];
4556 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4557 "tsx",
4558 true,
4559 FakeLspAdapter {
4560 name: &language_server_names[0],
4561 capabilities: lsp::ServerCapabilities {
4562 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4563 ..lsp::ServerCapabilities::default()
4564 },
4565 ..FakeLspAdapter::default()
4566 },
4567 );
4568 let _a = language_registry.register_specific_fake_lsp_adapter(
4569 "tsx",
4570 false,
4571 FakeLspAdapter {
4572 name: &language_server_names[1],
4573 capabilities: lsp::ServerCapabilities {
4574 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4575 ..lsp::ServerCapabilities::default()
4576 },
4577 ..FakeLspAdapter::default()
4578 },
4579 );
4580 let _b = language_registry.register_specific_fake_lsp_adapter(
4581 "tsx",
4582 false,
4583 FakeLspAdapter {
4584 name: &language_server_names[2],
4585 capabilities: lsp::ServerCapabilities {
4586 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4587 ..lsp::ServerCapabilities::default()
4588 },
4589 ..FakeLspAdapter::default()
4590 },
4591 );
4592 let _c = language_registry.register_specific_fake_lsp_adapter(
4593 "tsx",
4594 false,
4595 FakeLspAdapter {
4596 name: &language_server_names[3],
4597 capabilities: lsp::ServerCapabilities {
4598 hover_provider: None,
4599 ..lsp::ServerCapabilities::default()
4600 },
4601 ..FakeLspAdapter::default()
4602 },
4603 );
4604
4605 let buffer = project
4606 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4607 .await
4608 .unwrap();
4609 cx.executor().run_until_parked();
4610
4611 let mut servers_with_hover_requests = HashMap::default();
4612 for i in 0..language_server_names.len() {
4613 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4614 panic!(
4615 "Failed to get language server #{i} with name {}",
4616 &language_server_names[i]
4617 )
4618 });
4619 let new_server_name = new_server.server.name();
4620 assert!(
4621 !servers_with_hover_requests.contains_key(new_server_name),
4622 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4623 );
4624 let new_server_name = new_server_name.to_string();
4625 match new_server_name.as_str() {
4626 "TailwindServer" | "TypeScriptServer" => {
4627 servers_with_hover_requests.insert(
4628 new_server_name.clone(),
4629 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4630 let name = new_server_name.clone();
4631 async move {
4632 Ok(Some(lsp::Hover {
4633 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4634 format!("{name} hover"),
4635 )),
4636 range: None,
4637 }))
4638 }
4639 }),
4640 );
4641 }
4642 "ESLintServer" => {
4643 servers_with_hover_requests.insert(
4644 new_server_name,
4645 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4646 |_, _| async move { Ok(None) },
4647 ),
4648 );
4649 }
4650 "NoHoverCapabilitiesServer" => {
4651 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4652 |_, _| async move {
4653 panic!(
4654 "Should not call for hovers server with no corresponding capabilities"
4655 )
4656 },
4657 );
4658 }
4659 unexpected => panic!("Unexpected server name: {unexpected}"),
4660 }
4661 }
4662
4663 let hover_task = project.update(cx, |project, cx| {
4664 project.hover(&buffer, Point::new(0, 0), cx)
4665 });
4666 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4667 |mut hover_request| async move {
4668 hover_request
4669 .next()
4670 .await
4671 .expect("All hover requests should have been triggered")
4672 },
4673 ))
4674 .await;
4675 assert_eq!(
4676 vec!["TailwindServer hover", "TypeScriptServer hover"],
4677 hover_task
4678 .await
4679 .into_iter()
4680 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4681 .sorted()
4682 .collect::<Vec<_>>(),
4683 "Should receive hover responses from all related servers with hover capabilities"
4684 );
4685}
4686
4687#[gpui::test]
4688async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4689 init_test(cx);
4690
4691 let fs = FakeFs::new(cx.executor());
4692 fs.insert_tree(
4693 "/dir",
4694 json!({
4695 "a.ts": "a",
4696 }),
4697 )
4698 .await;
4699
4700 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4701
4702 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4703 language_registry.add(typescript_lang());
4704 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4705 "TypeScript",
4706 FakeLspAdapter {
4707 capabilities: lsp::ServerCapabilities {
4708 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4709 ..lsp::ServerCapabilities::default()
4710 },
4711 ..FakeLspAdapter::default()
4712 },
4713 );
4714
4715 let buffer = project
4716 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4717 .await
4718 .unwrap();
4719 cx.executor().run_until_parked();
4720
4721 let fake_server = fake_language_servers
4722 .next()
4723 .await
4724 .expect("failed to get the language server");
4725
4726 let mut request_handled =
4727 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4728 Ok(Some(lsp::Hover {
4729 contents: lsp::HoverContents::Array(vec![
4730 lsp::MarkedString::String("".to_string()),
4731 lsp::MarkedString::String(" ".to_string()),
4732 lsp::MarkedString::String("\n\n\n".to_string()),
4733 ]),
4734 range: None,
4735 }))
4736 });
4737
4738 let hover_task = project.update(cx, |project, cx| {
4739 project.hover(&buffer, Point::new(0, 0), cx)
4740 });
4741 let () = request_handled
4742 .next()
4743 .await
4744 .expect("All hover requests should have been triggered");
4745 assert_eq!(
4746 Vec::<String>::new(),
4747 hover_task
4748 .await
4749 .into_iter()
4750 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4751 .sorted()
4752 .collect::<Vec<_>>(),
4753 "Empty hover parts should be ignored"
4754 );
4755}
4756
4757#[gpui::test]
4758async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4759 init_test(cx);
4760
4761 let fs = FakeFs::new(cx.executor());
4762 fs.insert_tree(
4763 "/dir",
4764 json!({
4765 "a.tsx": "a",
4766 }),
4767 )
4768 .await;
4769
4770 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4771
4772 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4773 language_registry.add(tsx_lang());
4774 let language_server_names = [
4775 "TypeScriptServer",
4776 "TailwindServer",
4777 "ESLintServer",
4778 "NoActionsCapabilitiesServer",
4779 ];
4780 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4781 "tsx",
4782 true,
4783 FakeLspAdapter {
4784 name: &language_server_names[0],
4785 capabilities: lsp::ServerCapabilities {
4786 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4787 ..lsp::ServerCapabilities::default()
4788 },
4789 ..FakeLspAdapter::default()
4790 },
4791 );
4792 let _a = language_registry.register_specific_fake_lsp_adapter(
4793 "tsx",
4794 false,
4795 FakeLspAdapter {
4796 name: &language_server_names[1],
4797 capabilities: lsp::ServerCapabilities {
4798 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4799 ..lsp::ServerCapabilities::default()
4800 },
4801 ..FakeLspAdapter::default()
4802 },
4803 );
4804 let _b = language_registry.register_specific_fake_lsp_adapter(
4805 "tsx",
4806 false,
4807 FakeLspAdapter {
4808 name: &language_server_names[2],
4809 capabilities: lsp::ServerCapabilities {
4810 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4811 ..lsp::ServerCapabilities::default()
4812 },
4813 ..FakeLspAdapter::default()
4814 },
4815 );
4816 let _c = language_registry.register_specific_fake_lsp_adapter(
4817 "tsx",
4818 false,
4819 FakeLspAdapter {
4820 name: &language_server_names[3],
4821 capabilities: lsp::ServerCapabilities {
4822 code_action_provider: None,
4823 ..lsp::ServerCapabilities::default()
4824 },
4825 ..FakeLspAdapter::default()
4826 },
4827 );
4828
4829 let buffer = project
4830 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4831 .await
4832 .unwrap();
4833 cx.executor().run_until_parked();
4834
4835 let mut servers_with_actions_requests = HashMap::default();
4836 for i in 0..language_server_names.len() {
4837 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4838 panic!(
4839 "Failed to get language server #{i} with name {}",
4840 &language_server_names[i]
4841 )
4842 });
4843 let new_server_name = new_server.server.name();
4844 assert!(
4845 !servers_with_actions_requests.contains_key(new_server_name),
4846 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4847 );
4848 let new_server_name = new_server_name.to_string();
4849 match new_server_name.as_str() {
4850 "TailwindServer" | "TypeScriptServer" => {
4851 servers_with_actions_requests.insert(
4852 new_server_name.clone(),
4853 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4854 move |_, _| {
4855 let name = new_server_name.clone();
4856 async move {
4857 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4858 lsp::CodeAction {
4859 title: format!("{name} code action"),
4860 ..lsp::CodeAction::default()
4861 },
4862 )]))
4863 }
4864 },
4865 ),
4866 );
4867 }
4868 "ESLintServer" => {
4869 servers_with_actions_requests.insert(
4870 new_server_name,
4871 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4872 |_, _| async move { Ok(None) },
4873 ),
4874 );
4875 }
4876 "NoActionsCapabilitiesServer" => {
4877 let _never_handled = new_server
4878 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4879 panic!(
4880 "Should not call for code actions server with no corresponding capabilities"
4881 )
4882 });
4883 }
4884 unexpected => panic!("Unexpected server name: {unexpected}"),
4885 }
4886 }
4887
4888 let code_actions_task = project.update(cx, |project, cx| {
4889 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4890 });
4891 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4892 |mut code_actions_request| async move {
4893 code_actions_request
4894 .next()
4895 .await
4896 .expect("All code actions requests should have been triggered")
4897 },
4898 ))
4899 .await;
4900 assert_eq!(
4901 vec!["TailwindServer code action", "TypeScriptServer code action"],
4902 code_actions_task
4903 .await
4904 .into_iter()
4905 .map(|code_action| code_action.lsp_action.title)
4906 .sorted()
4907 .collect::<Vec<_>>(),
4908 "Should receive code actions responses from all related servers with hover capabilities"
4909 );
4910}
4911
4912async fn search(
4913 project: &Model<Project>,
4914 query: SearchQuery,
4915 cx: &mut gpui::TestAppContext,
4916) -> Result<HashMap<String, Vec<Range<usize>>>> {
4917 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4918 let mut results = HashMap::default();
4919 while let Some(search_result) = search_rx.next().await {
4920 match search_result {
4921 SearchResult::Buffer { buffer, ranges } => {
4922 results.entry(buffer).or_insert(ranges);
4923 }
4924 SearchResult::LimitReached => {}
4925 }
4926 }
4927 Ok(results
4928 .into_iter()
4929 .map(|(buffer, ranges)| {
4930 buffer.update(cx, |buffer, cx| {
4931 let path = buffer
4932 .file()
4933 .unwrap()
4934 .full_path(cx)
4935 .to_string_lossy()
4936 .to_string();
4937 let ranges = ranges
4938 .into_iter()
4939 .map(|range| range.to_offset(buffer))
4940 .collect::<Vec<_>>();
4941 (path, ranges)
4942 })
4943 })
4944 .collect())
4945}
4946
4947fn init_test(cx: &mut gpui::TestAppContext) {
4948 if std::env::var("RUST_LOG").is_ok() {
4949 env_logger::try_init().ok();
4950 }
4951
4952 cx.update(|cx| {
4953 let settings_store = SettingsStore::test(cx);
4954 cx.set_global(settings_store);
4955 release_channel::init("0.0.0", cx);
4956 language::init(cx);
4957 Project::init_settings(cx);
4958 });
4959}
4960
4961fn json_lang() -> Arc<Language> {
4962 Arc::new(Language::new(
4963 LanguageConfig {
4964 name: "JSON".into(),
4965 matcher: LanguageMatcher {
4966 path_suffixes: vec!["json".to_string()],
4967 ..Default::default()
4968 },
4969 ..Default::default()
4970 },
4971 None,
4972 ))
4973}
4974
4975fn js_lang() -> Arc<Language> {
4976 Arc::new(Language::new(
4977 LanguageConfig {
4978 name: Arc::from("JavaScript"),
4979 matcher: LanguageMatcher {
4980 path_suffixes: vec!["js".to_string()],
4981 ..Default::default()
4982 },
4983 ..Default::default()
4984 },
4985 None,
4986 ))
4987}
4988
4989fn rust_lang() -> Arc<Language> {
4990 Arc::new(Language::new(
4991 LanguageConfig {
4992 name: "Rust".into(),
4993 matcher: LanguageMatcher {
4994 path_suffixes: vec!["rs".to_string()],
4995 ..Default::default()
4996 },
4997 ..Default::default()
4998 },
4999 Some(tree_sitter_rust::language()),
5000 ))
5001}
5002
5003fn typescript_lang() -> Arc<Language> {
5004 Arc::new(Language::new(
5005 LanguageConfig {
5006 name: "TypeScript".into(),
5007 matcher: LanguageMatcher {
5008 path_suffixes: vec!["ts".to_string()],
5009 ..Default::default()
5010 },
5011 ..Default::default()
5012 },
5013 Some(tree_sitter_typescript::language_typescript()),
5014 ))
5015}
5016
5017fn tsx_lang() -> Arc<Language> {
5018 Arc::new(Language::new(
5019 LanguageConfig {
5020 name: "tsx".into(),
5021 matcher: LanguageMatcher {
5022 path_suffixes: vec!["tsx".to_string()],
5023 ..Default::default()
5024 },
5025 ..Default::default()
5026 },
5027 Some(tree_sitter_typescript::language_tsx()),
5028 ))
5029}