1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::AppContext;
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::Url;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{TaskContext, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20use worktree::WorktreeModelHandle as _;
21
22#[gpui::test]
23async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
24 cx.executor().allow_parking();
25
26 let (tx, mut rx) = futures::channel::mpsc::unbounded();
27 let _thread = std::thread::spawn(move || {
28 std::fs::metadata("/Users").unwrap();
29 std::thread::sleep(Duration::from_millis(1000));
30 tx.unbounded_send(1).unwrap();
31 });
32 rx.next().await.unwrap();
33}
34
35#[gpui::test]
36async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
37 cx.executor().allow_parking();
38
39 let io_task = smol::unblock(move || {
40 println!("sleeping on thread {:?}", std::thread::current().id());
41 std::thread::sleep(Duration::from_millis(10));
42 1
43 });
44
45 let task = cx.foreground_executor().spawn(async move {
46 io_task.await;
47 });
48
49 task.await;
50}
51
52#[cfg(not(windows))]
53#[gpui::test]
54async fn test_symlinks(cx: &mut gpui::TestAppContext) {
55 init_test(cx);
56 cx.executor().allow_parking();
57
58 let dir = temp_tree(json!({
59 "root": {
60 "apple": "",
61 "banana": {
62 "carrot": {
63 "date": "",
64 "endive": "",
65 }
66 },
67 "fennel": {
68 "grape": "",
69 }
70 }
71 }));
72
73 let root_link_path = dir.path().join("root_link");
74 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
75 os::unix::fs::symlink(
76 &dir.path().join("root/fennel"),
77 &dir.path().join("root/finnochio"),
78 )
79 .unwrap();
80
81 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
82
83 project.update(cx, |project, cx| {
84 let tree = project.worktrees().next().unwrap().read(cx);
85 assert_eq!(tree.file_count(), 5);
86 assert_eq!(
87 tree.inode_for_path("fennel/grape"),
88 tree.inode_for_path("finnochio/grape")
89 );
90 });
91}
92
93#[gpui::test]
94async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
95 init_test(cx);
96
97 let fs = FakeFs::new(cx.executor());
98 fs.insert_tree(
99 "/the-root",
100 json!({
101 ".zed": {
102 "settings.json": r#"{ "tab_size": 8 }"#,
103 "tasks.json": r#"[{
104 "label": "cargo check",
105 "command": "cargo",
106 "args": ["check", "--all"]
107 },]"#,
108 },
109 "a": {
110 "a.rs": "fn a() {\n A\n}"
111 },
112 "b": {
113 ".zed": {
114 "settings.json": r#"{ "tab_size": 2 }"#,
115 "tasks.json": r#"[{
116 "label": "cargo check",
117 "command": "cargo",
118 "args": ["check"]
119 },]"#,
120 },
121 "b.rs": "fn b() {\n B\n}"
122 }
123 }),
124 )
125 .await;
126
127 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
128 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
129 let task_context = TaskContext::default();
130
131 cx.executor().run_until_parked();
132 let workree_id = cx.update(|cx| {
133 project.update(cx, |project, cx| {
134 project.worktrees().next().unwrap().read(cx).id()
135 })
136 });
137 let global_task_source_kind = TaskSourceKind::Worktree {
138 id: workree_id,
139 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
140 id_base: "local_tasks_for_worktree",
141 };
142 cx.update(|cx| {
143 let tree = worktree.read(cx);
144
145 let settings_a = language_settings(
146 None,
147 Some(
148 &(File::for_entry(
149 tree.entry_for_path("a/a.rs").unwrap().clone(),
150 worktree.clone(),
151 ) as _),
152 ),
153 cx,
154 );
155 let settings_b = language_settings(
156 None,
157 Some(
158 &(File::for_entry(
159 tree.entry_for_path("b/b.rs").unwrap().clone(),
160 worktree.clone(),
161 ) as _),
162 ),
163 cx,
164 );
165
166 assert_eq!(settings_a.tab_size.get(), 8);
167 assert_eq!(settings_b.tab_size.get(), 2);
168
169 let all_tasks = project
170 .update(cx, |project, cx| {
171 project.task_inventory().update(cx, |inventory, _| {
172 let (mut old, new) = inventory.used_and_current_resolved_tasks(
173 None,
174 Some(workree_id),
175 &task_context,
176 );
177 old.extend(new);
178 old
179 })
180 })
181 .into_iter()
182 .map(|(source_kind, task)| {
183 let resolved = task.resolved.unwrap();
184 (
185 source_kind,
186 task.resolved_label,
187 resolved.args,
188 resolved.env,
189 )
190 })
191 .collect::<Vec<_>>();
192 assert_eq!(
193 all_tasks,
194 vec![
195 (
196 global_task_source_kind.clone(),
197 "cargo check".to_string(),
198 vec!["check".to_string(), "--all".to_string()],
199 HashMap::default(),
200 ),
201 (
202 TaskSourceKind::Worktree {
203 id: workree_id,
204 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
205 id_base: "local_tasks_for_worktree",
206 },
207 "cargo check".to_string(),
208 vec!["check".to_string()],
209 HashMap::default(),
210 ),
211 ]
212 );
213 });
214
215 project.update(cx, |project, cx| {
216 let inventory = project.task_inventory();
217 inventory.update(cx, |inventory, _| {
218 let (mut old, new) =
219 inventory.used_and_current_resolved_tasks(None, Some(workree_id), &task_context);
220 old.extend(new);
221 let (_, resolved_task) = old
222 .into_iter()
223 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
224 .expect("should have one global task");
225 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
226 })
227 });
228
229 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
230 label: "cargo check".to_string(),
231 command: "cargo".to_string(),
232 args: vec![
233 "check".to_string(),
234 "--all".to_string(),
235 "--all-targets".to_string(),
236 ],
237 env: HashMap::from_iter(Some((
238 "RUSTFLAGS".to_string(),
239 "-Zunstable-options".to_string(),
240 ))),
241 ..TaskTemplate::default()
242 }]))
243 .unwrap();
244 let (tx, rx) = futures::channel::mpsc::unbounded();
245
246 let templates = cx.update(|cx| TrackedFile::new(rx, cx));
247 tx.unbounded_send(tasks).unwrap();
248
249 let source = StaticSource::new(templates);
250 cx.run_until_parked();
251
252 cx.update(|cx| {
253 let all_tasks = project
254 .update(cx, |project, cx| {
255 project.task_inventory().update(cx, |inventory, cx| {
256 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
257 inventory.add_source(global_task_source_kind.clone(), source, cx);
258 let (mut old, new) = inventory.used_and_current_resolved_tasks(
259 None,
260 Some(workree_id),
261 &task_context,
262 );
263 old.extend(new);
264 old
265 })
266 })
267 .into_iter()
268 .map(|(source_kind, task)| {
269 let resolved = task.resolved.unwrap();
270 (
271 source_kind,
272 task.resolved_label,
273 resolved.args,
274 resolved.env,
275 )
276 })
277 .collect::<Vec<_>>();
278 assert_eq!(
279 all_tasks,
280 vec![
281 (
282 TaskSourceKind::Worktree {
283 id: workree_id,
284 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
285 id_base: "local_tasks_for_worktree",
286 },
287 "cargo check".to_string(),
288 vec![
289 "check".to_string(),
290 "--all".to_string(),
291 "--all-targets".to_string()
292 ],
293 HashMap::from_iter(Some((
294 "RUSTFLAGS".to_string(),
295 "-Zunstable-options".to_string()
296 ))),
297 ),
298 (
299 TaskSourceKind::Worktree {
300 id: workree_id,
301 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
302 id_base: "local_tasks_for_worktree",
303 },
304 "cargo check".to_string(),
305 vec!["check".to_string()],
306 HashMap::default(),
307 ),
308 ]
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 "/the-root",
320 json!({
321 "test.rs": "const A: i32 = 1;",
322 "test2.rs": "",
323 "Cargo.toml": "a = 1",
324 "package.json": "{\"a\": 1}",
325 }),
326 )
327 .await;
328
329 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
330 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
331
332 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
333 "Rust",
334 FakeLspAdapter {
335 name: "the-rust-language-server",
336 capabilities: lsp::ServerCapabilities {
337 completion_provider: Some(lsp::CompletionOptions {
338 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
339 ..Default::default()
340 }),
341 ..Default::default()
342 },
343 ..Default::default()
344 },
345 );
346 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
347 "JSON",
348 FakeLspAdapter {
349 name: "the-json-language-server",
350 capabilities: lsp::ServerCapabilities {
351 completion_provider: Some(lsp::CompletionOptions {
352 trigger_characters: Some(vec![":".to_string()]),
353 ..Default::default()
354 }),
355 ..Default::default()
356 },
357 ..Default::default()
358 },
359 );
360
361 // Open a buffer without an associated language server.
362 let toml_buffer = project
363 .update(cx, |project, cx| {
364 project.open_local_buffer("/the-root/Cargo.toml", cx)
365 })
366 .await
367 .unwrap();
368
369 // Open a buffer with an associated language server before the language for it has been loaded.
370 let rust_buffer = project
371 .update(cx, |project, cx| {
372 project.open_local_buffer("/the-root/test.rs", cx)
373 })
374 .await
375 .unwrap();
376 rust_buffer.update(cx, |buffer, _| {
377 assert_eq!(buffer.language().map(|l| l.name()), None);
378 });
379
380 // Now we add the languages to the project, and ensure they get assigned to all
381 // the relevant open buffers.
382 language_registry.add(json_lang());
383 language_registry.add(rust_lang());
384 cx.executor().run_until_parked();
385 rust_buffer.update(cx, |buffer, _| {
386 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
387 });
388
389 // A server is started up, and it is notified about Rust files.
390 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
391 assert_eq!(
392 fake_rust_server
393 .receive_notification::<lsp::notification::DidOpenTextDocument>()
394 .await
395 .text_document,
396 lsp::TextDocumentItem {
397 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
398 version: 0,
399 text: "const A: i32 = 1;".to_string(),
400 language_id: "rust".to_string(),
401 }
402 );
403
404 // The buffer is configured based on the language server's capabilities.
405 rust_buffer.update(cx, |buffer, _| {
406 assert_eq!(
407 buffer.completion_triggers(),
408 &[".".to_string(), "::".to_string()]
409 );
410 });
411 toml_buffer.update(cx, |buffer, _| {
412 assert!(buffer.completion_triggers().is_empty());
413 });
414
415 // Edit a buffer. The changes are reported to the language server.
416 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
417 assert_eq!(
418 fake_rust_server
419 .receive_notification::<lsp::notification::DidChangeTextDocument>()
420 .await
421 .text_document,
422 lsp::VersionedTextDocumentIdentifier::new(
423 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
424 1
425 )
426 );
427
428 // Open a third buffer with a different associated language server.
429 let json_buffer = project
430 .update(cx, |project, cx| {
431 project.open_local_buffer("/the-root/package.json", cx)
432 })
433 .await
434 .unwrap();
435
436 // A json language server is started up and is only notified about the json buffer.
437 let mut fake_json_server = fake_json_servers.next().await.unwrap();
438 assert_eq!(
439 fake_json_server
440 .receive_notification::<lsp::notification::DidOpenTextDocument>()
441 .await
442 .text_document,
443 lsp::TextDocumentItem {
444 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
445 version: 0,
446 text: "{\"a\": 1}".to_string(),
447 language_id: "json".to_string(),
448 }
449 );
450
451 // This buffer is configured based on the second language server's
452 // capabilities.
453 json_buffer.update(cx, |buffer, _| {
454 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
455 });
456
457 // When opening another buffer whose language server is already running,
458 // it is also configured based on the existing language server's capabilities.
459 let rust_buffer2 = project
460 .update(cx, |project, cx| {
461 project.open_local_buffer("/the-root/test2.rs", cx)
462 })
463 .await
464 .unwrap();
465 rust_buffer2.update(cx, |buffer, _| {
466 assert_eq!(
467 buffer.completion_triggers(),
468 &[".".to_string(), "::".to_string()]
469 );
470 });
471
472 // Changes are reported only to servers matching the buffer's language.
473 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
474 rust_buffer2.update(cx, |buffer, cx| {
475 buffer.edit([(0..0, "let x = 1;")], None, cx)
476 });
477 assert_eq!(
478 fake_rust_server
479 .receive_notification::<lsp::notification::DidChangeTextDocument>()
480 .await
481 .text_document,
482 lsp::VersionedTextDocumentIdentifier::new(
483 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
484 1
485 )
486 );
487
488 // Save notifications are reported to all servers.
489 project
490 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
491 .await
492 .unwrap();
493 assert_eq!(
494 fake_rust_server
495 .receive_notification::<lsp::notification::DidSaveTextDocument>()
496 .await
497 .text_document,
498 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
499 );
500 assert_eq!(
501 fake_json_server
502 .receive_notification::<lsp::notification::DidSaveTextDocument>()
503 .await
504 .text_document,
505 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
506 );
507
508 // Renames are reported only to servers matching the buffer's language.
509 fs.rename(
510 Path::new("/the-root/test2.rs"),
511 Path::new("/the-root/test3.rs"),
512 Default::default(),
513 )
514 .await
515 .unwrap();
516 assert_eq!(
517 fake_rust_server
518 .receive_notification::<lsp::notification::DidCloseTextDocument>()
519 .await
520 .text_document,
521 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
522 );
523 assert_eq!(
524 fake_rust_server
525 .receive_notification::<lsp::notification::DidOpenTextDocument>()
526 .await
527 .text_document,
528 lsp::TextDocumentItem {
529 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
530 version: 0,
531 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
532 language_id: "rust".to_string(),
533 },
534 );
535
536 rust_buffer2.update(cx, |buffer, cx| {
537 buffer.update_diagnostics(
538 LanguageServerId(0),
539 DiagnosticSet::from_sorted_entries(
540 vec![DiagnosticEntry {
541 diagnostic: Default::default(),
542 range: Anchor::MIN..Anchor::MAX,
543 }],
544 &buffer.snapshot(),
545 ),
546 cx,
547 );
548 assert_eq!(
549 buffer
550 .snapshot()
551 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
552 .count(),
553 1
554 );
555 });
556
557 // When the rename changes the extension of the file, the buffer gets closed on the old
558 // language server and gets opened on the new one.
559 fs.rename(
560 Path::new("/the-root/test3.rs"),
561 Path::new("/the-root/test3.json"),
562 Default::default(),
563 )
564 .await
565 .unwrap();
566 assert_eq!(
567 fake_rust_server
568 .receive_notification::<lsp::notification::DidCloseTextDocument>()
569 .await
570 .text_document,
571 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
572 );
573 assert_eq!(
574 fake_json_server
575 .receive_notification::<lsp::notification::DidOpenTextDocument>()
576 .await
577 .text_document,
578 lsp::TextDocumentItem {
579 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
580 version: 0,
581 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
582 language_id: "json".to_string(),
583 },
584 );
585
586 // We clear the diagnostics, since the language has changed.
587 rust_buffer2.update(cx, |buffer, _| {
588 assert_eq!(
589 buffer
590 .snapshot()
591 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
592 .count(),
593 0
594 );
595 });
596
597 // The renamed file's version resets after changing language server.
598 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
599 assert_eq!(
600 fake_json_server
601 .receive_notification::<lsp::notification::DidChangeTextDocument>()
602 .await
603 .text_document,
604 lsp::VersionedTextDocumentIdentifier::new(
605 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
606 1
607 )
608 );
609
610 // Restart language servers
611 project.update(cx, |project, cx| {
612 project.restart_language_servers_for_buffers(
613 vec![rust_buffer.clone(), json_buffer.clone()],
614 cx,
615 );
616 });
617
618 let mut rust_shutdown_requests = fake_rust_server
619 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
620 let mut json_shutdown_requests = fake_json_server
621 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
622 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
623
624 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
625 let mut fake_json_server = fake_json_servers.next().await.unwrap();
626
627 // Ensure rust document is reopened in new rust language server
628 assert_eq!(
629 fake_rust_server
630 .receive_notification::<lsp::notification::DidOpenTextDocument>()
631 .await
632 .text_document,
633 lsp::TextDocumentItem {
634 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
635 version: 0,
636 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
637 language_id: "rust".to_string(),
638 }
639 );
640
641 // Ensure json documents are reopened in new json language server
642 assert_set_eq!(
643 [
644 fake_json_server
645 .receive_notification::<lsp::notification::DidOpenTextDocument>()
646 .await
647 .text_document,
648 fake_json_server
649 .receive_notification::<lsp::notification::DidOpenTextDocument>()
650 .await
651 .text_document,
652 ],
653 [
654 lsp::TextDocumentItem {
655 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
656 version: 0,
657 text: json_buffer.update(cx, |buffer, _| buffer.text()),
658 language_id: "json".to_string(),
659 },
660 lsp::TextDocumentItem {
661 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
662 version: 0,
663 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
664 language_id: "json".to_string(),
665 }
666 ]
667 );
668
669 // Close notifications are reported only to servers matching the buffer's language.
670 cx.update(|_| drop(json_buffer));
671 let close_message = lsp::DidCloseTextDocumentParams {
672 text_document: lsp::TextDocumentIdentifier::new(
673 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
674 ),
675 };
676 assert_eq!(
677 fake_json_server
678 .receive_notification::<lsp::notification::DidCloseTextDocument>()
679 .await,
680 close_message,
681 );
682}
683
684#[gpui::test]
685async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
686 init_test(cx);
687
688 let fs = FakeFs::new(cx.executor());
689 fs.insert_tree(
690 "/the-root",
691 json!({
692 ".gitignore": "target\n",
693 "src": {
694 "a.rs": "",
695 "b.rs": "",
696 },
697 "target": {
698 "x": {
699 "out": {
700 "x.rs": ""
701 }
702 },
703 "y": {
704 "out": {
705 "y.rs": "",
706 }
707 },
708 "z": {
709 "out": {
710 "z.rs": ""
711 }
712 }
713 }
714 }),
715 )
716 .await;
717
718 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
719 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
720 language_registry.add(rust_lang());
721 let mut fake_servers = language_registry.register_fake_lsp_adapter(
722 "Rust",
723 FakeLspAdapter {
724 name: "the-language-server",
725 ..Default::default()
726 },
727 );
728
729 cx.executor().run_until_parked();
730
731 // Start the language server by opening a buffer with a compatible file extension.
732 let _buffer = project
733 .update(cx, |project, cx| {
734 project.open_local_buffer("/the-root/src/a.rs", cx)
735 })
736 .await
737 .unwrap();
738
739 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
740 project.update(cx, |project, cx| {
741 let worktree = project.worktrees().next().unwrap();
742 assert_eq!(
743 worktree
744 .read(cx)
745 .snapshot()
746 .entries(true)
747 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
748 .collect::<Vec<_>>(),
749 &[
750 (Path::new(""), false),
751 (Path::new(".gitignore"), false),
752 (Path::new("src"), false),
753 (Path::new("src/a.rs"), false),
754 (Path::new("src/b.rs"), false),
755 (Path::new("target"), true),
756 ]
757 );
758 });
759
760 let prev_read_dir_count = fs.read_dir_call_count();
761
762 // Keep track of the FS events reported to the language server.
763 let fake_server = fake_servers.next().await.unwrap();
764 let file_changes = Arc::new(Mutex::new(Vec::new()));
765 fake_server
766 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
767 registrations: vec![lsp::Registration {
768 id: Default::default(),
769 method: "workspace/didChangeWatchedFiles".to_string(),
770 register_options: serde_json::to_value(
771 lsp::DidChangeWatchedFilesRegistrationOptions {
772 watchers: vec![
773 lsp::FileSystemWatcher {
774 glob_pattern: lsp::GlobPattern::String(
775 "/the-root/Cargo.toml".to_string(),
776 ),
777 kind: None,
778 },
779 lsp::FileSystemWatcher {
780 glob_pattern: lsp::GlobPattern::String(
781 "/the-root/src/*.{rs,c}".to_string(),
782 ),
783 kind: None,
784 },
785 lsp::FileSystemWatcher {
786 glob_pattern: lsp::GlobPattern::String(
787 "/the-root/target/y/**/*.rs".to_string(),
788 ),
789 kind: None,
790 },
791 ],
792 },
793 )
794 .ok(),
795 }],
796 })
797 .await
798 .unwrap();
799 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
800 let file_changes = file_changes.clone();
801 move |params, _| {
802 let mut file_changes = file_changes.lock();
803 file_changes.extend(params.changes);
804 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
805 }
806 });
807
808 cx.executor().run_until_parked();
809 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
810 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
811
812 // Now the language server has asked us to watch an ignored directory path,
813 // so we recursively load it.
814 project.update(cx, |project, cx| {
815 let worktree = project.worktrees().next().unwrap();
816 assert_eq!(
817 worktree
818 .read(cx)
819 .snapshot()
820 .entries(true)
821 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
822 .collect::<Vec<_>>(),
823 &[
824 (Path::new(""), false),
825 (Path::new(".gitignore"), false),
826 (Path::new("src"), false),
827 (Path::new("src/a.rs"), false),
828 (Path::new("src/b.rs"), false),
829 (Path::new("target"), true),
830 (Path::new("target/x"), true),
831 (Path::new("target/y"), true),
832 (Path::new("target/y/out"), true),
833 (Path::new("target/y/out/y.rs"), true),
834 (Path::new("target/z"), true),
835 ]
836 );
837 });
838
839 // Perform some file system mutations, two of which match the watched patterns,
840 // and one of which does not.
841 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
842 .await
843 .unwrap();
844 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
845 .await
846 .unwrap();
847 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
848 .await
849 .unwrap();
850 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
851 .await
852 .unwrap();
853 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
854 .await
855 .unwrap();
856
857 // The language server receives events for the FS mutations that match its watch patterns.
858 cx.executor().run_until_parked();
859 assert_eq!(
860 &*file_changes.lock(),
861 &[
862 lsp::FileEvent {
863 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
864 typ: lsp::FileChangeType::DELETED,
865 },
866 lsp::FileEvent {
867 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
868 typ: lsp::FileChangeType::CREATED,
869 },
870 lsp::FileEvent {
871 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
872 typ: lsp::FileChangeType::CREATED,
873 },
874 ]
875 );
876}
877
878#[gpui::test]
879async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
880 init_test(cx);
881
882 let fs = FakeFs::new(cx.executor());
883 fs.insert_tree(
884 "/dir",
885 json!({
886 "a.rs": "let a = 1;",
887 "b.rs": "let b = 2;"
888 }),
889 )
890 .await;
891
892 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
893
894 let buffer_a = project
895 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
896 .await
897 .unwrap();
898 let buffer_b = project
899 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
900 .await
901 .unwrap();
902
903 project.update(cx, |project, cx| {
904 project
905 .update_diagnostics(
906 LanguageServerId(0),
907 lsp::PublishDiagnosticsParams {
908 uri: Url::from_file_path("/dir/a.rs").unwrap(),
909 version: None,
910 diagnostics: vec![lsp::Diagnostic {
911 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
912 severity: Some(lsp::DiagnosticSeverity::ERROR),
913 message: "error 1".to_string(),
914 ..Default::default()
915 }],
916 },
917 &[],
918 cx,
919 )
920 .unwrap();
921 project
922 .update_diagnostics(
923 LanguageServerId(0),
924 lsp::PublishDiagnosticsParams {
925 uri: Url::from_file_path("/dir/b.rs").unwrap(),
926 version: None,
927 diagnostics: vec![lsp::Diagnostic {
928 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
929 severity: Some(lsp::DiagnosticSeverity::WARNING),
930 message: "error 2".to_string(),
931 ..Default::default()
932 }],
933 },
934 &[],
935 cx,
936 )
937 .unwrap();
938 });
939
940 buffer_a.update(cx, |buffer, _| {
941 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
942 assert_eq!(
943 chunks
944 .iter()
945 .map(|(s, d)| (s.as_str(), *d))
946 .collect::<Vec<_>>(),
947 &[
948 ("let ", None),
949 ("a", Some(DiagnosticSeverity::ERROR)),
950 (" = 1;", None),
951 ]
952 );
953 });
954 buffer_b.update(cx, |buffer, _| {
955 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
956 assert_eq!(
957 chunks
958 .iter()
959 .map(|(s, d)| (s.as_str(), *d))
960 .collect::<Vec<_>>(),
961 &[
962 ("let ", None),
963 ("b", Some(DiagnosticSeverity::WARNING)),
964 (" = 2;", None),
965 ]
966 );
967 });
968}
969
970#[gpui::test]
971async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
972 init_test(cx);
973
974 let fs = FakeFs::new(cx.executor());
975 fs.insert_tree(
976 "/root",
977 json!({
978 "dir": {
979 ".git": {
980 "HEAD": "ref: refs/heads/main",
981 },
982 ".gitignore": "b.rs",
983 "a.rs": "let a = 1;",
984 "b.rs": "let b = 2;",
985 },
986 "other.rs": "let b = c;"
987 }),
988 )
989 .await;
990
991 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
992 let (worktree, _) = project
993 .update(cx, |project, cx| {
994 project.find_or_create_local_worktree("/root/dir", true, cx)
995 })
996 .await
997 .unwrap();
998 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
999
1000 let (worktree, _) = project
1001 .update(cx, |project, cx| {
1002 project.find_or_create_local_worktree("/root/other.rs", false, cx)
1003 })
1004 .await
1005 .unwrap();
1006 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1007
1008 let server_id = LanguageServerId(0);
1009 project.update(cx, |project, cx| {
1010 project
1011 .update_diagnostics(
1012 server_id,
1013 lsp::PublishDiagnosticsParams {
1014 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1015 version: None,
1016 diagnostics: vec![lsp::Diagnostic {
1017 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1018 severity: Some(lsp::DiagnosticSeverity::ERROR),
1019 message: "unused variable 'b'".to_string(),
1020 ..Default::default()
1021 }],
1022 },
1023 &[],
1024 cx,
1025 )
1026 .unwrap();
1027 project
1028 .update_diagnostics(
1029 server_id,
1030 lsp::PublishDiagnosticsParams {
1031 uri: Url::from_file_path("/root/other.rs").unwrap(),
1032 version: None,
1033 diagnostics: vec![lsp::Diagnostic {
1034 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1035 severity: Some(lsp::DiagnosticSeverity::ERROR),
1036 message: "unknown variable 'c'".to_string(),
1037 ..Default::default()
1038 }],
1039 },
1040 &[],
1041 cx,
1042 )
1043 .unwrap();
1044 });
1045
1046 let main_ignored_buffer = project
1047 .update(cx, |project, cx| {
1048 project.open_buffer((main_worktree_id, "b.rs"), cx)
1049 })
1050 .await
1051 .unwrap();
1052 main_ignored_buffer.update(cx, |buffer, _| {
1053 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1054 assert_eq!(
1055 chunks
1056 .iter()
1057 .map(|(s, d)| (s.as_str(), *d))
1058 .collect::<Vec<_>>(),
1059 &[
1060 ("let ", None),
1061 ("b", Some(DiagnosticSeverity::ERROR)),
1062 (" = 2;", None),
1063 ],
1064 "Gigitnored buffers should still get in-buffer diagnostics",
1065 );
1066 });
1067 let other_buffer = project
1068 .update(cx, |project, cx| {
1069 project.open_buffer((other_worktree_id, ""), cx)
1070 })
1071 .await
1072 .unwrap();
1073 other_buffer.update(cx, |buffer, _| {
1074 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1075 assert_eq!(
1076 chunks
1077 .iter()
1078 .map(|(s, d)| (s.as_str(), *d))
1079 .collect::<Vec<_>>(),
1080 &[
1081 ("let b = ", None),
1082 ("c", Some(DiagnosticSeverity::ERROR)),
1083 (";", None),
1084 ],
1085 "Buffers from hidden projects should still get in-buffer diagnostics"
1086 );
1087 });
1088
1089 project.update(cx, |project, cx| {
1090 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1091 assert_eq!(
1092 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1093 vec![(
1094 ProjectPath {
1095 worktree_id: main_worktree_id,
1096 path: Arc::from(Path::new("b.rs")),
1097 },
1098 server_id,
1099 DiagnosticSummary {
1100 error_count: 1,
1101 warning_count: 0,
1102 }
1103 )]
1104 );
1105 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1106 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1107 });
1108}
1109
1110#[gpui::test]
1111async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1112 init_test(cx);
1113
1114 let progress_token = "the-progress-token";
1115
1116 let fs = FakeFs::new(cx.executor());
1117 fs.insert_tree(
1118 "/dir",
1119 json!({
1120 "a.rs": "fn a() { A }",
1121 "b.rs": "const y: i32 = 1",
1122 }),
1123 )
1124 .await;
1125
1126 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1127 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1128
1129 language_registry.add(rust_lang());
1130 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1131 "Rust",
1132 FakeLspAdapter {
1133 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1134 disk_based_diagnostics_sources: vec!["disk".into()],
1135 ..Default::default()
1136 },
1137 );
1138
1139 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1140
1141 // Cause worktree to start the fake language server
1142 let _buffer = project
1143 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1144 .await
1145 .unwrap();
1146
1147 let mut events = cx.events(&project);
1148
1149 let fake_server = fake_servers.next().await.unwrap();
1150 assert_eq!(
1151 events.next().await.unwrap(),
1152 Event::LanguageServerAdded(LanguageServerId(0)),
1153 );
1154
1155 fake_server
1156 .start_progress(format!("{}/0", progress_token))
1157 .await;
1158 assert_eq!(
1159 events.next().await.unwrap(),
1160 Event::DiskBasedDiagnosticsStarted {
1161 language_server_id: LanguageServerId(0),
1162 }
1163 );
1164
1165 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1166 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1167 version: None,
1168 diagnostics: vec![lsp::Diagnostic {
1169 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1170 severity: Some(lsp::DiagnosticSeverity::ERROR),
1171 message: "undefined variable 'A'".to_string(),
1172 ..Default::default()
1173 }],
1174 });
1175 assert_eq!(
1176 events.next().await.unwrap(),
1177 Event::DiagnosticsUpdated {
1178 language_server_id: LanguageServerId(0),
1179 path: (worktree_id, Path::new("a.rs")).into()
1180 }
1181 );
1182
1183 fake_server.end_progress(format!("{}/0", progress_token));
1184 assert_eq!(
1185 events.next().await.unwrap(),
1186 Event::DiskBasedDiagnosticsFinished {
1187 language_server_id: LanguageServerId(0)
1188 }
1189 );
1190
1191 let buffer = project
1192 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1193 .await
1194 .unwrap();
1195
1196 buffer.update(cx, |buffer, _| {
1197 let snapshot = buffer.snapshot();
1198 let diagnostics = snapshot
1199 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1200 .collect::<Vec<_>>();
1201 assert_eq!(
1202 diagnostics,
1203 &[DiagnosticEntry {
1204 range: Point::new(0, 9)..Point::new(0, 10),
1205 diagnostic: Diagnostic {
1206 severity: lsp::DiagnosticSeverity::ERROR,
1207 message: "undefined variable 'A'".to_string(),
1208 group_id: 0,
1209 is_primary: true,
1210 ..Default::default()
1211 }
1212 }]
1213 )
1214 });
1215
1216 // Ensure publishing empty diagnostics twice only results in one update event.
1217 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1218 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1219 version: None,
1220 diagnostics: Default::default(),
1221 });
1222 assert_eq!(
1223 events.next().await.unwrap(),
1224 Event::DiagnosticsUpdated {
1225 language_server_id: LanguageServerId(0),
1226 path: (worktree_id, Path::new("a.rs")).into()
1227 }
1228 );
1229
1230 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1231 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1232 version: None,
1233 diagnostics: Default::default(),
1234 });
1235 cx.executor().run_until_parked();
1236 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1237}
1238
1239#[gpui::test]
1240async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1241 init_test(cx);
1242
1243 let progress_token = "the-progress-token";
1244
1245 let fs = FakeFs::new(cx.executor());
1246 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1247
1248 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1249
1250 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1251 language_registry.add(rust_lang());
1252 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1253 "Rust",
1254 FakeLspAdapter {
1255 name: "the-language-server",
1256 disk_based_diagnostics_sources: vec!["disk".into()],
1257 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1258 ..Default::default()
1259 },
1260 );
1261
1262 let buffer = project
1263 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1264 .await
1265 .unwrap();
1266
1267 // Simulate diagnostics starting to update.
1268 let fake_server = fake_servers.next().await.unwrap();
1269 fake_server.start_progress(progress_token).await;
1270
1271 // Restart the server before the diagnostics finish updating.
1272 project.update(cx, |project, cx| {
1273 project.restart_language_servers_for_buffers([buffer], cx);
1274 });
1275 let mut events = cx.events(&project);
1276
1277 // Simulate the newly started server sending more diagnostics.
1278 let fake_server = fake_servers.next().await.unwrap();
1279 assert_eq!(
1280 events.next().await.unwrap(),
1281 Event::LanguageServerAdded(LanguageServerId(1))
1282 );
1283 fake_server.start_progress(progress_token).await;
1284 assert_eq!(
1285 events.next().await.unwrap(),
1286 Event::DiskBasedDiagnosticsStarted {
1287 language_server_id: LanguageServerId(1)
1288 }
1289 );
1290 project.update(cx, |project, _| {
1291 assert_eq!(
1292 project
1293 .language_servers_running_disk_based_diagnostics()
1294 .collect::<Vec<_>>(),
1295 [LanguageServerId(1)]
1296 );
1297 });
1298
1299 // All diagnostics are considered done, despite the old server's diagnostic
1300 // task never completing.
1301 fake_server.end_progress(progress_token);
1302 assert_eq!(
1303 events.next().await.unwrap(),
1304 Event::DiskBasedDiagnosticsFinished {
1305 language_server_id: LanguageServerId(1)
1306 }
1307 );
1308 project.update(cx, |project, _| {
1309 assert_eq!(
1310 project
1311 .language_servers_running_disk_based_diagnostics()
1312 .collect::<Vec<_>>(),
1313 [LanguageServerId(0); 0]
1314 );
1315 });
1316}
1317
1318#[gpui::test]
1319async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1320 init_test(cx);
1321
1322 let fs = FakeFs::new(cx.executor());
1323 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1324
1325 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1326
1327 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1328 language_registry.add(rust_lang());
1329 let mut fake_servers =
1330 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1331
1332 let buffer = project
1333 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1334 .await
1335 .unwrap();
1336
1337 // Publish diagnostics
1338 let fake_server = fake_servers.next().await.unwrap();
1339 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1340 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1341 version: None,
1342 diagnostics: vec![lsp::Diagnostic {
1343 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1344 severity: Some(lsp::DiagnosticSeverity::ERROR),
1345 message: "the message".to_string(),
1346 ..Default::default()
1347 }],
1348 });
1349
1350 cx.executor().run_until_parked();
1351 buffer.update(cx, |buffer, _| {
1352 assert_eq!(
1353 buffer
1354 .snapshot()
1355 .diagnostics_in_range::<_, usize>(0..1, false)
1356 .map(|entry| entry.diagnostic.message.clone())
1357 .collect::<Vec<_>>(),
1358 ["the message".to_string()]
1359 );
1360 });
1361 project.update(cx, |project, cx| {
1362 assert_eq!(
1363 project.diagnostic_summary(false, cx),
1364 DiagnosticSummary {
1365 error_count: 1,
1366 warning_count: 0,
1367 }
1368 );
1369 });
1370
1371 project.update(cx, |project, cx| {
1372 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1373 });
1374
1375 // The diagnostics are cleared.
1376 cx.executor().run_until_parked();
1377 buffer.update(cx, |buffer, _| {
1378 assert_eq!(
1379 buffer
1380 .snapshot()
1381 .diagnostics_in_range::<_, usize>(0..1, false)
1382 .map(|entry| entry.diagnostic.message.clone())
1383 .collect::<Vec<_>>(),
1384 Vec::<String>::new(),
1385 );
1386 });
1387 project.update(cx, |project, cx| {
1388 assert_eq!(
1389 project.diagnostic_summary(false, cx),
1390 DiagnosticSummary {
1391 error_count: 0,
1392 warning_count: 0,
1393 }
1394 );
1395 });
1396}
1397
1398#[gpui::test]
1399async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1400 init_test(cx);
1401
1402 let fs = FakeFs::new(cx.executor());
1403 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1404
1405 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1406 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1407
1408 language_registry.add(rust_lang());
1409 let mut fake_servers =
1410 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1411
1412 let buffer = project
1413 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1414 .await
1415 .unwrap();
1416
1417 // Before restarting the server, report diagnostics with an unknown buffer version.
1418 let fake_server = fake_servers.next().await.unwrap();
1419 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1420 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1421 version: Some(10000),
1422 diagnostics: Vec::new(),
1423 });
1424 cx.executor().run_until_parked();
1425
1426 project.update(cx, |project, cx| {
1427 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1428 });
1429 let mut fake_server = fake_servers.next().await.unwrap();
1430 let notification = fake_server
1431 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1432 .await
1433 .text_document;
1434 assert_eq!(notification.version, 0);
1435}
1436
1437#[gpui::test]
1438async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1439 init_test(cx);
1440
1441 let fs = FakeFs::new(cx.executor());
1442 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1443 .await;
1444
1445 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1447
1448 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1449 "Rust",
1450 FakeLspAdapter {
1451 name: "rust-lsp",
1452 ..Default::default()
1453 },
1454 );
1455 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1456 "JavaScript",
1457 FakeLspAdapter {
1458 name: "js-lsp",
1459 ..Default::default()
1460 },
1461 );
1462 language_registry.add(rust_lang());
1463 language_registry.add(js_lang());
1464
1465 let _rs_buffer = project
1466 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1467 .await
1468 .unwrap();
1469 let _js_buffer = project
1470 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1471 .await
1472 .unwrap();
1473
1474 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1475 assert_eq!(
1476 fake_rust_server_1
1477 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1478 .await
1479 .text_document
1480 .uri
1481 .as_str(),
1482 "file:///dir/a.rs"
1483 );
1484
1485 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1486 assert_eq!(
1487 fake_js_server
1488 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1489 .await
1490 .text_document
1491 .uri
1492 .as_str(),
1493 "file:///dir/b.js"
1494 );
1495
1496 // Disable Rust language server, ensuring only that server gets stopped.
1497 cx.update(|cx| {
1498 cx.update_global(|settings: &mut SettingsStore, cx| {
1499 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1500 settings.languages.insert(
1501 Arc::from("Rust"),
1502 LanguageSettingsContent {
1503 enable_language_server: Some(false),
1504 ..Default::default()
1505 },
1506 );
1507 });
1508 })
1509 });
1510 fake_rust_server_1
1511 .receive_notification::<lsp::notification::Exit>()
1512 .await;
1513
1514 // Enable Rust and disable JavaScript language servers, ensuring that the
1515 // former gets started again and that the latter stops.
1516 cx.update(|cx| {
1517 cx.update_global(|settings: &mut SettingsStore, cx| {
1518 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1519 settings.languages.insert(
1520 Arc::from("Rust"),
1521 LanguageSettingsContent {
1522 enable_language_server: Some(true),
1523 ..Default::default()
1524 },
1525 );
1526 settings.languages.insert(
1527 Arc::from("JavaScript"),
1528 LanguageSettingsContent {
1529 enable_language_server: Some(false),
1530 ..Default::default()
1531 },
1532 );
1533 });
1534 })
1535 });
1536 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1537 assert_eq!(
1538 fake_rust_server_2
1539 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1540 .await
1541 .text_document
1542 .uri
1543 .as_str(),
1544 "file:///dir/a.rs"
1545 );
1546 fake_js_server
1547 .receive_notification::<lsp::notification::Exit>()
1548 .await;
1549}
1550
1551#[gpui::test(iterations = 3)]
1552async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1553 init_test(cx);
1554
1555 let text = "
1556 fn a() { A }
1557 fn b() { BB }
1558 fn c() { CCC }
1559 "
1560 .unindent();
1561
1562 let fs = FakeFs::new(cx.executor());
1563 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1564
1565 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1566 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1567
1568 language_registry.add(rust_lang());
1569 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1570 "Rust",
1571 FakeLspAdapter {
1572 disk_based_diagnostics_sources: vec!["disk".into()],
1573 ..Default::default()
1574 },
1575 );
1576
1577 let buffer = project
1578 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1579 .await
1580 .unwrap();
1581
1582 let mut fake_server = fake_servers.next().await.unwrap();
1583 let open_notification = fake_server
1584 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1585 .await;
1586
1587 // Edit the buffer, moving the content down
1588 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1589 let change_notification_1 = fake_server
1590 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1591 .await;
1592 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1593
1594 // Report some diagnostics for the initial version of the buffer
1595 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1596 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1597 version: Some(open_notification.text_document.version),
1598 diagnostics: vec![
1599 lsp::Diagnostic {
1600 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1601 severity: Some(DiagnosticSeverity::ERROR),
1602 message: "undefined variable 'A'".to_string(),
1603 source: Some("disk".to_string()),
1604 ..Default::default()
1605 },
1606 lsp::Diagnostic {
1607 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1608 severity: Some(DiagnosticSeverity::ERROR),
1609 message: "undefined variable 'BB'".to_string(),
1610 source: Some("disk".to_string()),
1611 ..Default::default()
1612 },
1613 lsp::Diagnostic {
1614 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1615 severity: Some(DiagnosticSeverity::ERROR),
1616 source: Some("disk".to_string()),
1617 message: "undefined variable 'CCC'".to_string(),
1618 ..Default::default()
1619 },
1620 ],
1621 });
1622
1623 // The diagnostics have moved down since they were created.
1624 cx.executor().run_until_parked();
1625 buffer.update(cx, |buffer, _| {
1626 assert_eq!(
1627 buffer
1628 .snapshot()
1629 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1630 .collect::<Vec<_>>(),
1631 &[
1632 DiagnosticEntry {
1633 range: Point::new(3, 9)..Point::new(3, 11),
1634 diagnostic: Diagnostic {
1635 source: Some("disk".into()),
1636 severity: DiagnosticSeverity::ERROR,
1637 message: "undefined variable 'BB'".to_string(),
1638 is_disk_based: true,
1639 group_id: 1,
1640 is_primary: true,
1641 ..Default::default()
1642 },
1643 },
1644 DiagnosticEntry {
1645 range: Point::new(4, 9)..Point::new(4, 12),
1646 diagnostic: Diagnostic {
1647 source: Some("disk".into()),
1648 severity: DiagnosticSeverity::ERROR,
1649 message: "undefined variable 'CCC'".to_string(),
1650 is_disk_based: true,
1651 group_id: 2,
1652 is_primary: true,
1653 ..Default::default()
1654 }
1655 }
1656 ]
1657 );
1658 assert_eq!(
1659 chunks_with_diagnostics(buffer, 0..buffer.len()),
1660 [
1661 ("\n\nfn a() { ".to_string(), None),
1662 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1663 (" }\nfn b() { ".to_string(), None),
1664 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1665 (" }\nfn c() { ".to_string(), None),
1666 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1667 (" }\n".to_string(), None),
1668 ]
1669 );
1670 assert_eq!(
1671 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1672 [
1673 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1674 (" }\nfn c() { ".to_string(), None),
1675 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1676 ]
1677 );
1678 });
1679
1680 // Ensure overlapping diagnostics are highlighted correctly.
1681 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1682 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1683 version: Some(open_notification.text_document.version),
1684 diagnostics: vec![
1685 lsp::Diagnostic {
1686 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1687 severity: Some(DiagnosticSeverity::ERROR),
1688 message: "undefined variable 'A'".to_string(),
1689 source: Some("disk".to_string()),
1690 ..Default::default()
1691 },
1692 lsp::Diagnostic {
1693 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1694 severity: Some(DiagnosticSeverity::WARNING),
1695 message: "unreachable statement".to_string(),
1696 source: Some("disk".to_string()),
1697 ..Default::default()
1698 },
1699 ],
1700 });
1701
1702 cx.executor().run_until_parked();
1703 buffer.update(cx, |buffer, _| {
1704 assert_eq!(
1705 buffer
1706 .snapshot()
1707 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1708 .collect::<Vec<_>>(),
1709 &[
1710 DiagnosticEntry {
1711 range: Point::new(2, 9)..Point::new(2, 12),
1712 diagnostic: Diagnostic {
1713 source: Some("disk".into()),
1714 severity: DiagnosticSeverity::WARNING,
1715 message: "unreachable statement".to_string(),
1716 is_disk_based: true,
1717 group_id: 4,
1718 is_primary: true,
1719 ..Default::default()
1720 }
1721 },
1722 DiagnosticEntry {
1723 range: Point::new(2, 9)..Point::new(2, 10),
1724 diagnostic: Diagnostic {
1725 source: Some("disk".into()),
1726 severity: DiagnosticSeverity::ERROR,
1727 message: "undefined variable 'A'".to_string(),
1728 is_disk_based: true,
1729 group_id: 3,
1730 is_primary: true,
1731 ..Default::default()
1732 },
1733 }
1734 ]
1735 );
1736 assert_eq!(
1737 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1738 [
1739 ("fn a() { ".to_string(), None),
1740 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1741 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1742 ("\n".to_string(), None),
1743 ]
1744 );
1745 assert_eq!(
1746 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1747 [
1748 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1749 ("\n".to_string(), None),
1750 ]
1751 );
1752 });
1753
1754 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1755 // changes since the last save.
1756 buffer.update(cx, |buffer, cx| {
1757 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1758 buffer.edit(
1759 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1760 None,
1761 cx,
1762 );
1763 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1764 });
1765 let change_notification_2 = fake_server
1766 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1767 .await;
1768 assert!(
1769 change_notification_2.text_document.version > change_notification_1.text_document.version
1770 );
1771
1772 // Handle out-of-order diagnostics
1773 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1774 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1775 version: Some(change_notification_2.text_document.version),
1776 diagnostics: vec![
1777 lsp::Diagnostic {
1778 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1779 severity: Some(DiagnosticSeverity::ERROR),
1780 message: "undefined variable 'BB'".to_string(),
1781 source: Some("disk".to_string()),
1782 ..Default::default()
1783 },
1784 lsp::Diagnostic {
1785 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1786 severity: Some(DiagnosticSeverity::WARNING),
1787 message: "undefined variable 'A'".to_string(),
1788 source: Some("disk".to_string()),
1789 ..Default::default()
1790 },
1791 ],
1792 });
1793
1794 cx.executor().run_until_parked();
1795 buffer.update(cx, |buffer, _| {
1796 assert_eq!(
1797 buffer
1798 .snapshot()
1799 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1800 .collect::<Vec<_>>(),
1801 &[
1802 DiagnosticEntry {
1803 range: Point::new(2, 21)..Point::new(2, 22),
1804 diagnostic: Diagnostic {
1805 source: Some("disk".into()),
1806 severity: DiagnosticSeverity::WARNING,
1807 message: "undefined variable 'A'".to_string(),
1808 is_disk_based: true,
1809 group_id: 6,
1810 is_primary: true,
1811 ..Default::default()
1812 }
1813 },
1814 DiagnosticEntry {
1815 range: Point::new(3, 9)..Point::new(3, 14),
1816 diagnostic: Diagnostic {
1817 source: Some("disk".into()),
1818 severity: DiagnosticSeverity::ERROR,
1819 message: "undefined variable 'BB'".to_string(),
1820 is_disk_based: true,
1821 group_id: 5,
1822 is_primary: true,
1823 ..Default::default()
1824 },
1825 }
1826 ]
1827 );
1828 });
1829}
1830
1831#[gpui::test]
1832async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1833 init_test(cx);
1834
1835 let text = concat!(
1836 "let one = ;\n", //
1837 "let two = \n",
1838 "let three = 3;\n",
1839 );
1840
1841 let fs = FakeFs::new(cx.executor());
1842 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1843
1844 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1845 let buffer = project
1846 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1847 .await
1848 .unwrap();
1849
1850 project.update(cx, |project, cx| {
1851 project
1852 .update_buffer_diagnostics(
1853 &buffer,
1854 LanguageServerId(0),
1855 None,
1856 vec![
1857 DiagnosticEntry {
1858 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1859 diagnostic: Diagnostic {
1860 severity: DiagnosticSeverity::ERROR,
1861 message: "syntax error 1".to_string(),
1862 ..Default::default()
1863 },
1864 },
1865 DiagnosticEntry {
1866 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1867 diagnostic: Diagnostic {
1868 severity: DiagnosticSeverity::ERROR,
1869 message: "syntax error 2".to_string(),
1870 ..Default::default()
1871 },
1872 },
1873 ],
1874 cx,
1875 )
1876 .unwrap();
1877 });
1878
1879 // An empty range is extended forward to include the following character.
1880 // At the end of a line, an empty range is extended backward to include
1881 // the preceding character.
1882 buffer.update(cx, |buffer, _| {
1883 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1884 assert_eq!(
1885 chunks
1886 .iter()
1887 .map(|(s, d)| (s.as_str(), *d))
1888 .collect::<Vec<_>>(),
1889 &[
1890 ("let one = ", None),
1891 (";", Some(DiagnosticSeverity::ERROR)),
1892 ("\nlet two =", None),
1893 (" ", Some(DiagnosticSeverity::ERROR)),
1894 ("\nlet three = 3;\n", None)
1895 ]
1896 );
1897 });
1898}
1899
1900#[gpui::test]
1901async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1902 init_test(cx);
1903
1904 let fs = FakeFs::new(cx.executor());
1905 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1906 .await;
1907
1908 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1909
1910 project.update(cx, |project, cx| {
1911 project
1912 .update_diagnostic_entries(
1913 LanguageServerId(0),
1914 Path::new("/dir/a.rs").to_owned(),
1915 None,
1916 vec![DiagnosticEntry {
1917 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1918 diagnostic: Diagnostic {
1919 severity: DiagnosticSeverity::ERROR,
1920 is_primary: true,
1921 message: "syntax error a1".to_string(),
1922 ..Default::default()
1923 },
1924 }],
1925 cx,
1926 )
1927 .unwrap();
1928 project
1929 .update_diagnostic_entries(
1930 LanguageServerId(1),
1931 Path::new("/dir/a.rs").to_owned(),
1932 None,
1933 vec![DiagnosticEntry {
1934 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1935 diagnostic: Diagnostic {
1936 severity: DiagnosticSeverity::ERROR,
1937 is_primary: true,
1938 message: "syntax error b1".to_string(),
1939 ..Default::default()
1940 },
1941 }],
1942 cx,
1943 )
1944 .unwrap();
1945
1946 assert_eq!(
1947 project.diagnostic_summary(false, cx),
1948 DiagnosticSummary {
1949 error_count: 2,
1950 warning_count: 0,
1951 }
1952 );
1953 });
1954}
1955
1956#[gpui::test]
1957async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1958 init_test(cx);
1959
1960 let text = "
1961 fn a() {
1962 f1();
1963 }
1964 fn b() {
1965 f2();
1966 }
1967 fn c() {
1968 f3();
1969 }
1970 "
1971 .unindent();
1972
1973 let fs = FakeFs::new(cx.executor());
1974 fs.insert_tree(
1975 "/dir",
1976 json!({
1977 "a.rs": text.clone(),
1978 }),
1979 )
1980 .await;
1981
1982 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1983
1984 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1985 language_registry.add(rust_lang());
1986 let mut fake_servers =
1987 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1988
1989 let buffer = project
1990 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1991 .await
1992 .unwrap();
1993
1994 let mut fake_server = fake_servers.next().await.unwrap();
1995 let lsp_document_version = fake_server
1996 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1997 .await
1998 .text_document
1999 .version;
2000
2001 // Simulate editing the buffer after the language server computes some edits.
2002 buffer.update(cx, |buffer, cx| {
2003 buffer.edit(
2004 [(
2005 Point::new(0, 0)..Point::new(0, 0),
2006 "// above first function\n",
2007 )],
2008 None,
2009 cx,
2010 );
2011 buffer.edit(
2012 [(
2013 Point::new(2, 0)..Point::new(2, 0),
2014 " // inside first function\n",
2015 )],
2016 None,
2017 cx,
2018 );
2019 buffer.edit(
2020 [(
2021 Point::new(6, 4)..Point::new(6, 4),
2022 "// inside second function ",
2023 )],
2024 None,
2025 cx,
2026 );
2027
2028 assert_eq!(
2029 buffer.text(),
2030 "
2031 // above first function
2032 fn a() {
2033 // inside first function
2034 f1();
2035 }
2036 fn b() {
2037 // inside second function f2();
2038 }
2039 fn c() {
2040 f3();
2041 }
2042 "
2043 .unindent()
2044 );
2045 });
2046
2047 let edits = project
2048 .update(cx, |project, cx| {
2049 project.edits_from_lsp(
2050 &buffer,
2051 vec![
2052 // replace body of first function
2053 lsp::TextEdit {
2054 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2055 new_text: "
2056 fn a() {
2057 f10();
2058 }
2059 "
2060 .unindent(),
2061 },
2062 // edit inside second function
2063 lsp::TextEdit {
2064 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2065 new_text: "00".into(),
2066 },
2067 // edit inside third function via two distinct edits
2068 lsp::TextEdit {
2069 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2070 new_text: "4000".into(),
2071 },
2072 lsp::TextEdit {
2073 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2074 new_text: "".into(),
2075 },
2076 ],
2077 LanguageServerId(0),
2078 Some(lsp_document_version),
2079 cx,
2080 )
2081 })
2082 .await
2083 .unwrap();
2084
2085 buffer.update(cx, |buffer, cx| {
2086 for (range, new_text) in edits {
2087 buffer.edit([(range, new_text)], None, cx);
2088 }
2089 assert_eq!(
2090 buffer.text(),
2091 "
2092 // above first function
2093 fn a() {
2094 // inside first function
2095 f10();
2096 }
2097 fn b() {
2098 // inside second function f200();
2099 }
2100 fn c() {
2101 f4000();
2102 }
2103 "
2104 .unindent()
2105 );
2106 });
2107}
2108
2109#[gpui::test]
2110async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2111 init_test(cx);
2112
2113 let text = "
2114 use a::b;
2115 use a::c;
2116
2117 fn f() {
2118 b();
2119 c();
2120 }
2121 "
2122 .unindent();
2123
2124 let fs = FakeFs::new(cx.executor());
2125 fs.insert_tree(
2126 "/dir",
2127 json!({
2128 "a.rs": text.clone(),
2129 }),
2130 )
2131 .await;
2132
2133 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2134 let buffer = project
2135 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2136 .await
2137 .unwrap();
2138
2139 // Simulate the language server sending us a small edit in the form of a very large diff.
2140 // Rust-analyzer does this when performing a merge-imports code action.
2141 let edits = project
2142 .update(cx, |project, cx| {
2143 project.edits_from_lsp(
2144 &buffer,
2145 [
2146 // Replace the first use statement without editing the semicolon.
2147 lsp::TextEdit {
2148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2149 new_text: "a::{b, c}".into(),
2150 },
2151 // Reinsert the remainder of the file between the semicolon and the final
2152 // newline of the file.
2153 lsp::TextEdit {
2154 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2155 new_text: "\n\n".into(),
2156 },
2157 lsp::TextEdit {
2158 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2159 new_text: "
2160 fn f() {
2161 b();
2162 c();
2163 }"
2164 .unindent(),
2165 },
2166 // Delete everything after the first newline of the file.
2167 lsp::TextEdit {
2168 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2169 new_text: "".into(),
2170 },
2171 ],
2172 LanguageServerId(0),
2173 None,
2174 cx,
2175 )
2176 })
2177 .await
2178 .unwrap();
2179
2180 buffer.update(cx, |buffer, cx| {
2181 let edits = edits
2182 .into_iter()
2183 .map(|(range, text)| {
2184 (
2185 range.start.to_point(buffer)..range.end.to_point(buffer),
2186 text,
2187 )
2188 })
2189 .collect::<Vec<_>>();
2190
2191 assert_eq!(
2192 edits,
2193 [
2194 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2195 (Point::new(1, 0)..Point::new(2, 0), "".into())
2196 ]
2197 );
2198
2199 for (range, new_text) in edits {
2200 buffer.edit([(range, new_text)], None, cx);
2201 }
2202 assert_eq!(
2203 buffer.text(),
2204 "
2205 use a::{b, c};
2206
2207 fn f() {
2208 b();
2209 c();
2210 }
2211 "
2212 .unindent()
2213 );
2214 });
2215}
2216
2217#[gpui::test]
2218async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2219 init_test(cx);
2220
2221 let text = "
2222 use a::b;
2223 use a::c;
2224
2225 fn f() {
2226 b();
2227 c();
2228 }
2229 "
2230 .unindent();
2231
2232 let fs = FakeFs::new(cx.executor());
2233 fs.insert_tree(
2234 "/dir",
2235 json!({
2236 "a.rs": text.clone(),
2237 }),
2238 )
2239 .await;
2240
2241 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2242 let buffer = project
2243 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2244 .await
2245 .unwrap();
2246
2247 // Simulate the language server sending us edits in a non-ordered fashion,
2248 // with ranges sometimes being inverted or pointing to invalid locations.
2249 let edits = project
2250 .update(cx, |project, cx| {
2251 project.edits_from_lsp(
2252 &buffer,
2253 [
2254 lsp::TextEdit {
2255 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2256 new_text: "\n\n".into(),
2257 },
2258 lsp::TextEdit {
2259 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2260 new_text: "a::{b, c}".into(),
2261 },
2262 lsp::TextEdit {
2263 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2264 new_text: "".into(),
2265 },
2266 lsp::TextEdit {
2267 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2268 new_text: "
2269 fn f() {
2270 b();
2271 c();
2272 }"
2273 .unindent(),
2274 },
2275 ],
2276 LanguageServerId(0),
2277 None,
2278 cx,
2279 )
2280 })
2281 .await
2282 .unwrap();
2283
2284 buffer.update(cx, |buffer, cx| {
2285 let edits = edits
2286 .into_iter()
2287 .map(|(range, text)| {
2288 (
2289 range.start.to_point(buffer)..range.end.to_point(buffer),
2290 text,
2291 )
2292 })
2293 .collect::<Vec<_>>();
2294
2295 assert_eq!(
2296 edits,
2297 [
2298 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2299 (Point::new(1, 0)..Point::new(2, 0), "".into())
2300 ]
2301 );
2302
2303 for (range, new_text) in edits {
2304 buffer.edit([(range, new_text)], None, cx);
2305 }
2306 assert_eq!(
2307 buffer.text(),
2308 "
2309 use a::{b, c};
2310
2311 fn f() {
2312 b();
2313 c();
2314 }
2315 "
2316 .unindent()
2317 );
2318 });
2319}
2320
2321fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2322 buffer: &Buffer,
2323 range: Range<T>,
2324) -> Vec<(String, Option<DiagnosticSeverity>)> {
2325 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2326 for chunk in buffer.snapshot().chunks(range, true) {
2327 if chunks.last().map_or(false, |prev_chunk| {
2328 prev_chunk.1 == chunk.diagnostic_severity
2329 }) {
2330 chunks.last_mut().unwrap().0.push_str(chunk.text);
2331 } else {
2332 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2333 }
2334 }
2335 chunks
2336}
2337
2338#[gpui::test(iterations = 10)]
2339async fn test_definition(cx: &mut gpui::TestAppContext) {
2340 init_test(cx);
2341
2342 let fs = FakeFs::new(cx.executor());
2343 fs.insert_tree(
2344 "/dir",
2345 json!({
2346 "a.rs": "const fn a() { A }",
2347 "b.rs": "const y: i32 = crate::a()",
2348 }),
2349 )
2350 .await;
2351
2352 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2353
2354 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2355 language_registry.add(rust_lang());
2356 let mut fake_servers =
2357 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2358
2359 let buffer = project
2360 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2361 .await
2362 .unwrap();
2363
2364 let fake_server = fake_servers.next().await.unwrap();
2365 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2366 let params = params.text_document_position_params;
2367 assert_eq!(
2368 params.text_document.uri.to_file_path().unwrap(),
2369 Path::new("/dir/b.rs"),
2370 );
2371 assert_eq!(params.position, lsp::Position::new(0, 22));
2372
2373 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2374 lsp::Location::new(
2375 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2376 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2377 ),
2378 )))
2379 });
2380
2381 let mut definitions = project
2382 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2383 .await
2384 .unwrap();
2385
2386 // Assert no new language server started
2387 cx.executor().run_until_parked();
2388 assert!(fake_servers.try_next().is_err());
2389
2390 assert_eq!(definitions.len(), 1);
2391 let definition = definitions.pop().unwrap();
2392 cx.update(|cx| {
2393 let target_buffer = definition.target.buffer.read(cx);
2394 assert_eq!(
2395 target_buffer
2396 .file()
2397 .unwrap()
2398 .as_local()
2399 .unwrap()
2400 .abs_path(cx),
2401 Path::new("/dir/a.rs"),
2402 );
2403 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2404 assert_eq!(
2405 list_worktrees(&project, cx),
2406 [("/dir/b.rs".as_ref(), true), ("/dir/a.rs".as_ref(), false)]
2407 );
2408
2409 drop(definition);
2410 });
2411 cx.update(|cx| {
2412 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2413 });
2414
2415 fn list_worktrees<'a>(
2416 project: &'a Model<Project>,
2417 cx: &'a AppContext,
2418 ) -> Vec<(&'a Path, bool)> {
2419 project
2420 .read(cx)
2421 .worktrees()
2422 .map(|worktree| {
2423 let worktree = worktree.read(cx);
2424 (
2425 worktree.as_local().unwrap().abs_path().as_ref(),
2426 worktree.is_visible(),
2427 )
2428 })
2429 .collect::<Vec<_>>()
2430 }
2431}
2432
2433#[gpui::test]
2434async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2435 init_test(cx);
2436
2437 let fs = FakeFs::new(cx.executor());
2438 fs.insert_tree(
2439 "/dir",
2440 json!({
2441 "a.ts": "",
2442 }),
2443 )
2444 .await;
2445
2446 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2447
2448 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2449 language_registry.add(typescript_lang());
2450 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2451 "TypeScript",
2452 FakeLspAdapter {
2453 capabilities: lsp::ServerCapabilities {
2454 completion_provider: Some(lsp::CompletionOptions {
2455 trigger_characters: Some(vec![":".to_string()]),
2456 ..Default::default()
2457 }),
2458 ..Default::default()
2459 },
2460 ..Default::default()
2461 },
2462 );
2463
2464 let buffer = project
2465 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2466 .await
2467 .unwrap();
2468
2469 let fake_server = fake_language_servers.next().await.unwrap();
2470
2471 let text = "let a = b.fqn";
2472 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2473 let completions = project.update(cx, |project, cx| {
2474 project.completions(&buffer, text.len(), cx)
2475 });
2476
2477 fake_server
2478 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2479 Ok(Some(lsp::CompletionResponse::Array(vec![
2480 lsp::CompletionItem {
2481 label: "fullyQualifiedName?".into(),
2482 insert_text: Some("fullyQualifiedName".into()),
2483 ..Default::default()
2484 },
2485 ])))
2486 })
2487 .next()
2488 .await;
2489 let completions = completions.await.unwrap();
2490 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2491 assert_eq!(completions.len(), 1);
2492 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2493 assert_eq!(
2494 completions[0].old_range.to_offset(&snapshot),
2495 text.len() - 3..text.len()
2496 );
2497
2498 let text = "let a = \"atoms/cmp\"";
2499 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2500 let completions = project.update(cx, |project, cx| {
2501 project.completions(&buffer, text.len() - 1, cx)
2502 });
2503
2504 fake_server
2505 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2506 Ok(Some(lsp::CompletionResponse::Array(vec![
2507 lsp::CompletionItem {
2508 label: "component".into(),
2509 ..Default::default()
2510 },
2511 ])))
2512 })
2513 .next()
2514 .await;
2515 let completions = completions.await.unwrap();
2516 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2517 assert_eq!(completions.len(), 1);
2518 assert_eq!(completions[0].new_text, "component");
2519 assert_eq!(
2520 completions[0].old_range.to_offset(&snapshot),
2521 text.len() - 4..text.len() - 1
2522 );
2523}
2524
2525#[gpui::test]
2526async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2527 init_test(cx);
2528
2529 let fs = FakeFs::new(cx.executor());
2530 fs.insert_tree(
2531 "/dir",
2532 json!({
2533 "a.ts": "",
2534 }),
2535 )
2536 .await;
2537
2538 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2539
2540 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2541 language_registry.add(typescript_lang());
2542 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2543 "TypeScript",
2544 FakeLspAdapter {
2545 capabilities: lsp::ServerCapabilities {
2546 completion_provider: Some(lsp::CompletionOptions {
2547 trigger_characters: Some(vec![":".to_string()]),
2548 ..Default::default()
2549 }),
2550 ..Default::default()
2551 },
2552 ..Default::default()
2553 },
2554 );
2555
2556 let buffer = project
2557 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2558 .await
2559 .unwrap();
2560
2561 let fake_server = fake_language_servers.next().await.unwrap();
2562
2563 let text = "let a = b.fqn";
2564 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2565 let completions = project.update(cx, |project, cx| {
2566 project.completions(&buffer, text.len(), cx)
2567 });
2568
2569 fake_server
2570 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2571 Ok(Some(lsp::CompletionResponse::Array(vec![
2572 lsp::CompletionItem {
2573 label: "fullyQualifiedName?".into(),
2574 insert_text: Some("fully\rQualified\r\nName".into()),
2575 ..Default::default()
2576 },
2577 ])))
2578 })
2579 .next()
2580 .await;
2581 let completions = completions.await.unwrap();
2582 assert_eq!(completions.len(), 1);
2583 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2584}
2585
2586#[gpui::test(iterations = 10)]
2587async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2588 init_test(cx);
2589
2590 let fs = FakeFs::new(cx.executor());
2591 fs.insert_tree(
2592 "/dir",
2593 json!({
2594 "a.ts": "a",
2595 }),
2596 )
2597 .await;
2598
2599 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2600
2601 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2602 language_registry.add(typescript_lang());
2603 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2604 "TypeScript",
2605 FakeLspAdapter {
2606 capabilities: lsp::ServerCapabilities {
2607 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2608 lsp::CodeActionOptions {
2609 resolve_provider: Some(true),
2610 ..lsp::CodeActionOptions::default()
2611 },
2612 )),
2613 ..lsp::ServerCapabilities::default()
2614 },
2615 ..FakeLspAdapter::default()
2616 },
2617 );
2618
2619 let buffer = project
2620 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2621 .await
2622 .unwrap();
2623
2624 let fake_server = fake_language_servers.next().await.unwrap();
2625
2626 // Language server returns code actions that contain commands, and not edits.
2627 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2628 fake_server
2629 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2630 Ok(Some(vec![
2631 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2632 title: "The code action".into(),
2633 data: Some(serde_json::json!({
2634 "command": "_the/command",
2635 })),
2636 ..lsp::CodeAction::default()
2637 }),
2638 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2639 title: "two".into(),
2640 ..lsp::CodeAction::default()
2641 }),
2642 ]))
2643 })
2644 .next()
2645 .await;
2646
2647 let action = actions.await[0].clone();
2648 let apply = project.update(cx, |project, cx| {
2649 project.apply_code_action(buffer.clone(), action, true, cx)
2650 });
2651
2652 // Resolving the code action does not populate its edits. In absence of
2653 // edits, we must execute the given command.
2654 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2655 |mut action, _| async move {
2656 if action.data.is_some() {
2657 action.command = Some(lsp::Command {
2658 title: "The command".into(),
2659 command: "_the/command".into(),
2660 arguments: Some(vec![json!("the-argument")]),
2661 });
2662 }
2663 Ok(action)
2664 },
2665 );
2666
2667 // While executing the command, the language server sends the editor
2668 // a `workspaceEdit` request.
2669 fake_server
2670 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2671 let fake = fake_server.clone();
2672 move |params, _| {
2673 assert_eq!(params.command, "_the/command");
2674 let fake = fake.clone();
2675 async move {
2676 fake.server
2677 .request::<lsp::request::ApplyWorkspaceEdit>(
2678 lsp::ApplyWorkspaceEditParams {
2679 label: None,
2680 edit: lsp::WorkspaceEdit {
2681 changes: Some(
2682 [(
2683 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2684 vec![lsp::TextEdit {
2685 range: lsp::Range::new(
2686 lsp::Position::new(0, 0),
2687 lsp::Position::new(0, 0),
2688 ),
2689 new_text: "X".into(),
2690 }],
2691 )]
2692 .into_iter()
2693 .collect(),
2694 ),
2695 ..Default::default()
2696 },
2697 },
2698 )
2699 .await
2700 .unwrap();
2701 Ok(Some(json!(null)))
2702 }
2703 }
2704 })
2705 .next()
2706 .await;
2707
2708 // Applying the code action returns a project transaction containing the edits
2709 // sent by the language server in its `workspaceEdit` request.
2710 let transaction = apply.await.unwrap();
2711 assert!(transaction.0.contains_key(&buffer));
2712 buffer.update(cx, |buffer, cx| {
2713 assert_eq!(buffer.text(), "Xa");
2714 buffer.undo(cx);
2715 assert_eq!(buffer.text(), "a");
2716 });
2717}
2718
2719#[gpui::test(iterations = 10)]
2720async fn test_save_file(cx: &mut gpui::TestAppContext) {
2721 init_test(cx);
2722
2723 let fs = FakeFs::new(cx.executor());
2724 fs.insert_tree(
2725 "/dir",
2726 json!({
2727 "file1": "the old contents",
2728 }),
2729 )
2730 .await;
2731
2732 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2733 let buffer = project
2734 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2735 .await
2736 .unwrap();
2737 buffer.update(cx, |buffer, cx| {
2738 assert_eq!(buffer.text(), "the old contents");
2739 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2740 });
2741
2742 project
2743 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2744 .await
2745 .unwrap();
2746
2747 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2748 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2749}
2750
2751#[gpui::test(iterations = 30)]
2752async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2753 init_test(cx);
2754
2755 let fs = FakeFs::new(cx.executor().clone());
2756 fs.insert_tree(
2757 "/dir",
2758 json!({
2759 "file1": "the original contents",
2760 }),
2761 )
2762 .await;
2763
2764 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2765 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2766 let buffer = project
2767 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2768 .await
2769 .unwrap();
2770
2771 // Simulate buffer diffs being slow, so that they don't complete before
2772 // the next file change occurs.
2773 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2774
2775 // Change the buffer's file on disk, and then wait for the file change
2776 // to be detected by the worktree, so that the buffer starts reloading.
2777 fs.save(
2778 "/dir/file1".as_ref(),
2779 &"the first contents".into(),
2780 Default::default(),
2781 )
2782 .await
2783 .unwrap();
2784 worktree.next_event(cx).await;
2785
2786 // Change the buffer's file again. Depending on the random seed, the
2787 // previous file change may still be in progress.
2788 fs.save(
2789 "/dir/file1".as_ref(),
2790 &"the second contents".into(),
2791 Default::default(),
2792 )
2793 .await
2794 .unwrap();
2795 worktree.next_event(cx).await;
2796
2797 cx.executor().run_until_parked();
2798 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2799 buffer.read_with(cx, |buffer, _| {
2800 assert_eq!(buffer.text(), on_disk_text);
2801 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2802 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2803 });
2804}
2805
2806#[gpui::test(iterations = 30)]
2807async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2808 init_test(cx);
2809
2810 let fs = FakeFs::new(cx.executor().clone());
2811 fs.insert_tree(
2812 "/dir",
2813 json!({
2814 "file1": "the original contents",
2815 }),
2816 )
2817 .await;
2818
2819 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2820 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2821 let buffer = project
2822 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2823 .await
2824 .unwrap();
2825
2826 // Simulate buffer diffs being slow, so that they don't complete before
2827 // the next file change occurs.
2828 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2829
2830 // Change the buffer's file on disk, and then wait for the file change
2831 // to be detected by the worktree, so that the buffer starts reloading.
2832 fs.save(
2833 "/dir/file1".as_ref(),
2834 &"the first contents".into(),
2835 Default::default(),
2836 )
2837 .await
2838 .unwrap();
2839 worktree.next_event(cx).await;
2840
2841 cx.executor()
2842 .spawn(cx.executor().simulate_random_delay())
2843 .await;
2844
2845 // Perform a noop edit, causing the buffer's version to increase.
2846 buffer.update(cx, |buffer, cx| {
2847 buffer.edit([(0..0, " ")], None, cx);
2848 buffer.undo(cx);
2849 });
2850
2851 cx.executor().run_until_parked();
2852 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2853 buffer.read_with(cx, |buffer, _| {
2854 let buffer_text = buffer.text();
2855 if buffer_text == on_disk_text {
2856 assert!(
2857 !buffer.is_dirty() && !buffer.has_conflict(),
2858 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2859 );
2860 }
2861 // If the file change occurred while the buffer was processing the first
2862 // change, the buffer will be in a conflicting state.
2863 else {
2864 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2865 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2866 }
2867 });
2868}
2869
2870#[gpui::test]
2871async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2872 init_test(cx);
2873
2874 let fs = FakeFs::new(cx.executor());
2875 fs.insert_tree(
2876 "/dir",
2877 json!({
2878 "file1": "the old contents",
2879 }),
2880 )
2881 .await;
2882
2883 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2884 let buffer = project
2885 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2886 .await
2887 .unwrap();
2888 buffer.update(cx, |buffer, cx| {
2889 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2890 });
2891
2892 project
2893 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2894 .await
2895 .unwrap();
2896
2897 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2898 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2899}
2900
2901#[gpui::test]
2902async fn test_save_as(cx: &mut gpui::TestAppContext) {
2903 init_test(cx);
2904
2905 let fs = FakeFs::new(cx.executor());
2906 fs.insert_tree("/dir", json!({})).await;
2907
2908 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2909
2910 let languages = project.update(cx, |project, _| project.languages().clone());
2911 languages.add(rust_lang());
2912
2913 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2914 buffer.update(cx, |buffer, cx| {
2915 buffer.edit([(0..0, "abc")], None, cx);
2916 assert!(buffer.is_dirty());
2917 assert!(!buffer.has_conflict());
2918 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2919 });
2920 project
2921 .update(cx, |project, cx| {
2922 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2923 let path = ProjectPath {
2924 worktree_id,
2925 path: Arc::from(Path::new("file1.rs")),
2926 };
2927 project.save_buffer_as(buffer.clone(), path, cx)
2928 })
2929 .await
2930 .unwrap();
2931 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2932
2933 cx.executor().run_until_parked();
2934 buffer.update(cx, |buffer, cx| {
2935 assert_eq!(
2936 buffer.file().unwrap().full_path(cx),
2937 Path::new("dir/file1.rs")
2938 );
2939 assert!(!buffer.is_dirty());
2940 assert!(!buffer.has_conflict());
2941 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2942 });
2943
2944 let opened_buffer = project
2945 .update(cx, |project, cx| {
2946 project.open_local_buffer("/dir/file1.rs", cx)
2947 })
2948 .await
2949 .unwrap();
2950 assert_eq!(opened_buffer, buffer);
2951}
2952
2953#[gpui::test(retries = 5)]
2954async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2955 init_test(cx);
2956 cx.executor().allow_parking();
2957
2958 let dir = temp_tree(json!({
2959 "a": {
2960 "file1": "",
2961 "file2": "",
2962 "file3": "",
2963 },
2964 "b": {
2965 "c": {
2966 "file4": "",
2967 "file5": "",
2968 }
2969 }
2970 }));
2971
2972 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
2973 let rpc = project.update(cx, |p, _| p.client.clone());
2974
2975 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2976 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
2977 async move { buffer.await.unwrap() }
2978 };
2979 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
2980 project.update(cx, |project, cx| {
2981 let tree = project.worktrees().next().unwrap();
2982 tree.read(cx)
2983 .entry_for_path(path)
2984 .unwrap_or_else(|| panic!("no entry for path {}", path))
2985 .id
2986 })
2987 };
2988
2989 let buffer2 = buffer_for_path("a/file2", cx).await;
2990 let buffer3 = buffer_for_path("a/file3", cx).await;
2991 let buffer4 = buffer_for_path("b/c/file4", cx).await;
2992 let buffer5 = buffer_for_path("b/c/file5", cx).await;
2993
2994 let file2_id = id_for_path("a/file2", cx);
2995 let file3_id = id_for_path("a/file3", cx);
2996 let file4_id = id_for_path("b/c/file4", cx);
2997
2998 // Create a remote copy of this worktree.
2999 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3000
3001 let metadata = tree.update(cx, |tree, _| tree.as_local().unwrap().metadata_proto());
3002
3003 let updates = Arc::new(Mutex::new(Vec::new()));
3004 tree.update(cx, |tree, cx| {
3005 let _ = tree.as_local_mut().unwrap().observe_updates(0, cx, {
3006 let updates = updates.clone();
3007 move |update| {
3008 updates.lock().push(update);
3009 async { true }
3010 }
3011 });
3012 });
3013
3014 let remote = cx.update(|cx| Worktree::remote(1, 1, metadata, rpc.clone(), cx));
3015
3016 cx.executor().run_until_parked();
3017
3018 cx.update(|cx| {
3019 assert!(!buffer2.read(cx).is_dirty());
3020 assert!(!buffer3.read(cx).is_dirty());
3021 assert!(!buffer4.read(cx).is_dirty());
3022 assert!(!buffer5.read(cx).is_dirty());
3023 });
3024
3025 // Rename and delete files and directories.
3026 tree.flush_fs_events(cx).await;
3027 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3028 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3029 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3030 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3031 tree.flush_fs_events(cx).await;
3032
3033 let expected_paths = vec![
3034 "a",
3035 "a/file1",
3036 "a/file2.new",
3037 "b",
3038 "d",
3039 "d/file3",
3040 "d/file4",
3041 ];
3042
3043 cx.update(|app| {
3044 assert_eq!(
3045 tree.read(app)
3046 .paths()
3047 .map(|p| p.to_str().unwrap())
3048 .collect::<Vec<_>>(),
3049 expected_paths
3050 );
3051 });
3052
3053 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3054 assert_eq!(id_for_path("d/file3", cx), file3_id);
3055 assert_eq!(id_for_path("d/file4", cx), file4_id);
3056
3057 cx.update(|cx| {
3058 assert_eq!(
3059 buffer2.read(cx).file().unwrap().path().as_ref(),
3060 Path::new("a/file2.new")
3061 );
3062 assert_eq!(
3063 buffer3.read(cx).file().unwrap().path().as_ref(),
3064 Path::new("d/file3")
3065 );
3066 assert_eq!(
3067 buffer4.read(cx).file().unwrap().path().as_ref(),
3068 Path::new("d/file4")
3069 );
3070 assert_eq!(
3071 buffer5.read(cx).file().unwrap().path().as_ref(),
3072 Path::new("b/c/file5")
3073 );
3074
3075 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3076 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3077 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3078 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3079 });
3080
3081 // Update the remote worktree. Check that it becomes consistent with the
3082 // local worktree.
3083 cx.executor().run_until_parked();
3084
3085 remote.update(cx, |remote, _| {
3086 for update in updates.lock().drain(..) {
3087 remote.as_remote_mut().unwrap().update_from_remote(update);
3088 }
3089 });
3090 cx.executor().run_until_parked();
3091 remote.update(cx, |remote, _| {
3092 assert_eq!(
3093 remote
3094 .paths()
3095 .map(|p| p.to_str().unwrap())
3096 .collect::<Vec<_>>(),
3097 expected_paths
3098 );
3099 });
3100}
3101
3102#[gpui::test(iterations = 10)]
3103async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3104 init_test(cx);
3105
3106 let fs = FakeFs::new(cx.executor());
3107 fs.insert_tree(
3108 "/dir",
3109 json!({
3110 "a": {
3111 "file1": "",
3112 }
3113 }),
3114 )
3115 .await;
3116
3117 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3118 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3119 let tree_id = tree.update(cx, |tree, _| tree.id());
3120
3121 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3122 project.update(cx, |project, cx| {
3123 let tree = project.worktrees().next().unwrap();
3124 tree.read(cx)
3125 .entry_for_path(path)
3126 .unwrap_or_else(|| panic!("no entry for path {}", path))
3127 .id
3128 })
3129 };
3130
3131 let dir_id = id_for_path("a", cx);
3132 let file_id = id_for_path("a/file1", cx);
3133 let buffer = project
3134 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3135 .await
3136 .unwrap();
3137 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3138
3139 project
3140 .update(cx, |project, cx| {
3141 project.rename_entry(dir_id, Path::new("b"), cx)
3142 })
3143 .unwrap()
3144 .await
3145 .unwrap();
3146 cx.executor().run_until_parked();
3147
3148 assert_eq!(id_for_path("b", cx), dir_id);
3149 assert_eq!(id_for_path("b/file1", cx), file_id);
3150 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3151}
3152
3153#[gpui::test]
3154async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3155 init_test(cx);
3156
3157 let fs = FakeFs::new(cx.executor());
3158 fs.insert_tree(
3159 "/dir",
3160 json!({
3161 "a.txt": "a-contents",
3162 "b.txt": "b-contents",
3163 }),
3164 )
3165 .await;
3166
3167 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3168
3169 // Spawn multiple tasks to open paths, repeating some paths.
3170 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3171 (
3172 p.open_local_buffer("/dir/a.txt", cx),
3173 p.open_local_buffer("/dir/b.txt", cx),
3174 p.open_local_buffer("/dir/a.txt", cx),
3175 )
3176 });
3177
3178 let buffer_a_1 = buffer_a_1.await.unwrap();
3179 let buffer_a_2 = buffer_a_2.await.unwrap();
3180 let buffer_b = buffer_b.await.unwrap();
3181 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3182 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3183
3184 // There is only one buffer per path.
3185 let buffer_a_id = buffer_a_1.entity_id();
3186 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3187
3188 // Open the same path again while it is still open.
3189 drop(buffer_a_1);
3190 let buffer_a_3 = project
3191 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3192 .await
3193 .unwrap();
3194
3195 // There's still only one buffer per path.
3196 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3197}
3198
3199#[gpui::test]
3200async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3201 init_test(cx);
3202
3203 let fs = FakeFs::new(cx.executor());
3204 fs.insert_tree(
3205 "/dir",
3206 json!({
3207 "file1": "abc",
3208 "file2": "def",
3209 "file3": "ghi",
3210 }),
3211 )
3212 .await;
3213
3214 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3215
3216 let buffer1 = project
3217 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3218 .await
3219 .unwrap();
3220 let events = Arc::new(Mutex::new(Vec::new()));
3221
3222 // initially, the buffer isn't dirty.
3223 buffer1.update(cx, |buffer, cx| {
3224 cx.subscribe(&buffer1, {
3225 let events = events.clone();
3226 move |_, _, event, _| match event {
3227 BufferEvent::Operation(_) => {}
3228 _ => events.lock().push(event.clone()),
3229 }
3230 })
3231 .detach();
3232
3233 assert!(!buffer.is_dirty());
3234 assert!(events.lock().is_empty());
3235
3236 buffer.edit([(1..2, "")], None, cx);
3237 });
3238
3239 // after the first edit, the buffer is dirty, and emits a dirtied event.
3240 buffer1.update(cx, |buffer, cx| {
3241 assert!(buffer.text() == "ac");
3242 assert!(buffer.is_dirty());
3243 assert_eq!(
3244 *events.lock(),
3245 &[language::Event::Edited, language::Event::DirtyChanged]
3246 );
3247 events.lock().clear();
3248 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3249 });
3250
3251 // after saving, the buffer is not dirty, and emits a saved event.
3252 buffer1.update(cx, |buffer, cx| {
3253 assert!(!buffer.is_dirty());
3254 assert_eq!(*events.lock(), &[language::Event::Saved]);
3255 events.lock().clear();
3256
3257 buffer.edit([(1..1, "B")], None, cx);
3258 buffer.edit([(2..2, "D")], None, cx);
3259 });
3260
3261 // after editing again, the buffer is dirty, and emits another dirty event.
3262 buffer1.update(cx, |buffer, cx| {
3263 assert!(buffer.text() == "aBDc");
3264 assert!(buffer.is_dirty());
3265 assert_eq!(
3266 *events.lock(),
3267 &[
3268 language::Event::Edited,
3269 language::Event::DirtyChanged,
3270 language::Event::Edited,
3271 ],
3272 );
3273 events.lock().clear();
3274
3275 // After restoring the buffer to its previously-saved state,
3276 // the buffer is not considered dirty anymore.
3277 buffer.edit([(1..3, "")], None, cx);
3278 assert!(buffer.text() == "ac");
3279 assert!(!buffer.is_dirty());
3280 });
3281
3282 assert_eq!(
3283 *events.lock(),
3284 &[language::Event::Edited, language::Event::DirtyChanged]
3285 );
3286
3287 // When a file is deleted, the buffer is considered dirty.
3288 let events = Arc::new(Mutex::new(Vec::new()));
3289 let buffer2 = project
3290 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3291 .await
3292 .unwrap();
3293 buffer2.update(cx, |_, cx| {
3294 cx.subscribe(&buffer2, {
3295 let events = events.clone();
3296 move |_, _, event, _| events.lock().push(event.clone())
3297 })
3298 .detach();
3299 });
3300
3301 fs.remove_file("/dir/file2".as_ref(), Default::default())
3302 .await
3303 .unwrap();
3304 cx.executor().run_until_parked();
3305 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3306 assert_eq!(
3307 *events.lock(),
3308 &[
3309 language::Event::DirtyChanged,
3310 language::Event::FileHandleChanged
3311 ]
3312 );
3313
3314 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3315 let events = Arc::new(Mutex::new(Vec::new()));
3316 let buffer3 = project
3317 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3318 .await
3319 .unwrap();
3320 buffer3.update(cx, |_, cx| {
3321 cx.subscribe(&buffer3, {
3322 let events = events.clone();
3323 move |_, _, event, _| events.lock().push(event.clone())
3324 })
3325 .detach();
3326 });
3327
3328 buffer3.update(cx, |buffer, cx| {
3329 buffer.edit([(0..0, "x")], None, cx);
3330 });
3331 events.lock().clear();
3332 fs.remove_file("/dir/file3".as_ref(), Default::default())
3333 .await
3334 .unwrap();
3335 cx.executor().run_until_parked();
3336 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3337 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3338}
3339
3340#[gpui::test]
3341async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3342 init_test(cx);
3343
3344 let initial_contents = "aaa\nbbbbb\nc\n";
3345 let fs = FakeFs::new(cx.executor());
3346 fs.insert_tree(
3347 "/dir",
3348 json!({
3349 "the-file": initial_contents,
3350 }),
3351 )
3352 .await;
3353 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3354 let buffer = project
3355 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3356 .await
3357 .unwrap();
3358
3359 let anchors = (0..3)
3360 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3361 .collect::<Vec<_>>();
3362
3363 // Change the file on disk, adding two new lines of text, and removing
3364 // one line.
3365 buffer.update(cx, |buffer, _| {
3366 assert!(!buffer.is_dirty());
3367 assert!(!buffer.has_conflict());
3368 });
3369 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3370 fs.save(
3371 "/dir/the-file".as_ref(),
3372 &new_contents.into(),
3373 LineEnding::Unix,
3374 )
3375 .await
3376 .unwrap();
3377
3378 // Because the buffer was not modified, it is reloaded from disk. Its
3379 // contents are edited according to the diff between the old and new
3380 // file contents.
3381 cx.executor().run_until_parked();
3382 buffer.update(cx, |buffer, _| {
3383 assert_eq!(buffer.text(), new_contents);
3384 assert!(!buffer.is_dirty());
3385 assert!(!buffer.has_conflict());
3386
3387 let anchor_positions = anchors
3388 .iter()
3389 .map(|anchor| anchor.to_point(&*buffer))
3390 .collect::<Vec<_>>();
3391 assert_eq!(
3392 anchor_positions,
3393 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3394 );
3395 });
3396
3397 // Modify the buffer
3398 buffer.update(cx, |buffer, cx| {
3399 buffer.edit([(0..0, " ")], None, cx);
3400 assert!(buffer.is_dirty());
3401 assert!(!buffer.has_conflict());
3402 });
3403
3404 // Change the file on disk again, adding blank lines to the beginning.
3405 fs.save(
3406 "/dir/the-file".as_ref(),
3407 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3408 LineEnding::Unix,
3409 )
3410 .await
3411 .unwrap();
3412
3413 // Because the buffer is modified, it doesn't reload from disk, but is
3414 // marked as having a conflict.
3415 cx.executor().run_until_parked();
3416 buffer.update(cx, |buffer, _| {
3417 assert!(buffer.has_conflict());
3418 });
3419}
3420
3421#[gpui::test]
3422async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3423 init_test(cx);
3424
3425 let fs = FakeFs::new(cx.executor());
3426 fs.insert_tree(
3427 "/dir",
3428 json!({
3429 "file1": "a\nb\nc\n",
3430 "file2": "one\r\ntwo\r\nthree\r\n",
3431 }),
3432 )
3433 .await;
3434
3435 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3436 let buffer1 = project
3437 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3438 .await
3439 .unwrap();
3440 let buffer2 = project
3441 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3442 .await
3443 .unwrap();
3444
3445 buffer1.update(cx, |buffer, _| {
3446 assert_eq!(buffer.text(), "a\nb\nc\n");
3447 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3448 });
3449 buffer2.update(cx, |buffer, _| {
3450 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3451 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3452 });
3453
3454 // Change a file's line endings on disk from unix to windows. The buffer's
3455 // state updates correctly.
3456 fs.save(
3457 "/dir/file1".as_ref(),
3458 &"aaa\nb\nc\n".into(),
3459 LineEnding::Windows,
3460 )
3461 .await
3462 .unwrap();
3463 cx.executor().run_until_parked();
3464 buffer1.update(cx, |buffer, _| {
3465 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3466 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3467 });
3468
3469 // Save a file with windows line endings. The file is written correctly.
3470 buffer2.update(cx, |buffer, cx| {
3471 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3472 });
3473 project
3474 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3475 .await
3476 .unwrap();
3477 assert_eq!(
3478 fs.load("/dir/file2".as_ref()).await.unwrap(),
3479 "one\r\ntwo\r\nthree\r\nfour\r\n",
3480 );
3481}
3482
3483#[gpui::test]
3484async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3485 init_test(cx);
3486
3487 let fs = FakeFs::new(cx.executor());
3488 fs.insert_tree(
3489 "/the-dir",
3490 json!({
3491 "a.rs": "
3492 fn foo(mut v: Vec<usize>) {
3493 for x in &v {
3494 v.push(1);
3495 }
3496 }
3497 "
3498 .unindent(),
3499 }),
3500 )
3501 .await;
3502
3503 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3504 let buffer = project
3505 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3506 .await
3507 .unwrap();
3508
3509 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3510 let message = lsp::PublishDiagnosticsParams {
3511 uri: buffer_uri.clone(),
3512 diagnostics: vec![
3513 lsp::Diagnostic {
3514 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3515 severity: Some(DiagnosticSeverity::WARNING),
3516 message: "error 1".to_string(),
3517 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3518 location: lsp::Location {
3519 uri: buffer_uri.clone(),
3520 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3521 },
3522 message: "error 1 hint 1".to_string(),
3523 }]),
3524 ..Default::default()
3525 },
3526 lsp::Diagnostic {
3527 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3528 severity: Some(DiagnosticSeverity::HINT),
3529 message: "error 1 hint 1".to_string(),
3530 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3531 location: lsp::Location {
3532 uri: buffer_uri.clone(),
3533 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3534 },
3535 message: "original diagnostic".to_string(),
3536 }]),
3537 ..Default::default()
3538 },
3539 lsp::Diagnostic {
3540 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3541 severity: Some(DiagnosticSeverity::ERROR),
3542 message: "error 2".to_string(),
3543 related_information: Some(vec![
3544 lsp::DiagnosticRelatedInformation {
3545 location: lsp::Location {
3546 uri: buffer_uri.clone(),
3547 range: lsp::Range::new(
3548 lsp::Position::new(1, 13),
3549 lsp::Position::new(1, 15),
3550 ),
3551 },
3552 message: "error 2 hint 1".to_string(),
3553 },
3554 lsp::DiagnosticRelatedInformation {
3555 location: lsp::Location {
3556 uri: buffer_uri.clone(),
3557 range: lsp::Range::new(
3558 lsp::Position::new(1, 13),
3559 lsp::Position::new(1, 15),
3560 ),
3561 },
3562 message: "error 2 hint 2".to_string(),
3563 },
3564 ]),
3565 ..Default::default()
3566 },
3567 lsp::Diagnostic {
3568 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3569 severity: Some(DiagnosticSeverity::HINT),
3570 message: "error 2 hint 1".to_string(),
3571 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3572 location: lsp::Location {
3573 uri: buffer_uri.clone(),
3574 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3575 },
3576 message: "original diagnostic".to_string(),
3577 }]),
3578 ..Default::default()
3579 },
3580 lsp::Diagnostic {
3581 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3582 severity: Some(DiagnosticSeverity::HINT),
3583 message: "error 2 hint 2".to_string(),
3584 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3585 location: lsp::Location {
3586 uri: buffer_uri,
3587 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3588 },
3589 message: "original diagnostic".to_string(),
3590 }]),
3591 ..Default::default()
3592 },
3593 ],
3594 version: None,
3595 };
3596
3597 project
3598 .update(cx, |p, cx| {
3599 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3600 })
3601 .unwrap();
3602 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3603
3604 assert_eq!(
3605 buffer
3606 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3607 .collect::<Vec<_>>(),
3608 &[
3609 DiagnosticEntry {
3610 range: Point::new(1, 8)..Point::new(1, 9),
3611 diagnostic: Diagnostic {
3612 severity: DiagnosticSeverity::WARNING,
3613 message: "error 1".to_string(),
3614 group_id: 1,
3615 is_primary: true,
3616 ..Default::default()
3617 }
3618 },
3619 DiagnosticEntry {
3620 range: Point::new(1, 8)..Point::new(1, 9),
3621 diagnostic: Diagnostic {
3622 severity: DiagnosticSeverity::HINT,
3623 message: "error 1 hint 1".to_string(),
3624 group_id: 1,
3625 is_primary: false,
3626 ..Default::default()
3627 }
3628 },
3629 DiagnosticEntry {
3630 range: Point::new(1, 13)..Point::new(1, 15),
3631 diagnostic: Diagnostic {
3632 severity: DiagnosticSeverity::HINT,
3633 message: "error 2 hint 1".to_string(),
3634 group_id: 0,
3635 is_primary: false,
3636 ..Default::default()
3637 }
3638 },
3639 DiagnosticEntry {
3640 range: Point::new(1, 13)..Point::new(1, 15),
3641 diagnostic: Diagnostic {
3642 severity: DiagnosticSeverity::HINT,
3643 message: "error 2 hint 2".to_string(),
3644 group_id: 0,
3645 is_primary: false,
3646 ..Default::default()
3647 }
3648 },
3649 DiagnosticEntry {
3650 range: Point::new(2, 8)..Point::new(2, 17),
3651 diagnostic: Diagnostic {
3652 severity: DiagnosticSeverity::ERROR,
3653 message: "error 2".to_string(),
3654 group_id: 0,
3655 is_primary: true,
3656 ..Default::default()
3657 }
3658 }
3659 ]
3660 );
3661
3662 assert_eq!(
3663 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3664 &[
3665 DiagnosticEntry {
3666 range: Point::new(1, 13)..Point::new(1, 15),
3667 diagnostic: Diagnostic {
3668 severity: DiagnosticSeverity::HINT,
3669 message: "error 2 hint 1".to_string(),
3670 group_id: 0,
3671 is_primary: false,
3672 ..Default::default()
3673 }
3674 },
3675 DiagnosticEntry {
3676 range: Point::new(1, 13)..Point::new(1, 15),
3677 diagnostic: Diagnostic {
3678 severity: DiagnosticSeverity::HINT,
3679 message: "error 2 hint 2".to_string(),
3680 group_id: 0,
3681 is_primary: false,
3682 ..Default::default()
3683 }
3684 },
3685 DiagnosticEntry {
3686 range: Point::new(2, 8)..Point::new(2, 17),
3687 diagnostic: Diagnostic {
3688 severity: DiagnosticSeverity::ERROR,
3689 message: "error 2".to_string(),
3690 group_id: 0,
3691 is_primary: true,
3692 ..Default::default()
3693 }
3694 }
3695 ]
3696 );
3697
3698 assert_eq!(
3699 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3700 &[
3701 DiagnosticEntry {
3702 range: Point::new(1, 8)..Point::new(1, 9),
3703 diagnostic: Diagnostic {
3704 severity: DiagnosticSeverity::WARNING,
3705 message: "error 1".to_string(),
3706 group_id: 1,
3707 is_primary: true,
3708 ..Default::default()
3709 }
3710 },
3711 DiagnosticEntry {
3712 range: Point::new(1, 8)..Point::new(1, 9),
3713 diagnostic: Diagnostic {
3714 severity: DiagnosticSeverity::HINT,
3715 message: "error 1 hint 1".to_string(),
3716 group_id: 1,
3717 is_primary: false,
3718 ..Default::default()
3719 }
3720 },
3721 ]
3722 );
3723}
3724
3725#[gpui::test]
3726async fn test_rename(cx: &mut gpui::TestAppContext) {
3727 init_test(cx);
3728
3729 let fs = FakeFs::new(cx.executor());
3730 fs.insert_tree(
3731 "/dir",
3732 json!({
3733 "one.rs": "const ONE: usize = 1;",
3734 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3735 }),
3736 )
3737 .await;
3738
3739 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3740
3741 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3742 language_registry.add(rust_lang());
3743 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3744 "Rust",
3745 FakeLspAdapter {
3746 capabilities: lsp::ServerCapabilities {
3747 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3748 prepare_provider: Some(true),
3749 work_done_progress_options: Default::default(),
3750 })),
3751 ..Default::default()
3752 },
3753 ..Default::default()
3754 },
3755 );
3756
3757 let buffer = project
3758 .update(cx, |project, cx| {
3759 project.open_local_buffer("/dir/one.rs", cx)
3760 })
3761 .await
3762 .unwrap();
3763
3764 let fake_server = fake_servers.next().await.unwrap();
3765
3766 let response = project.update(cx, |project, cx| {
3767 project.prepare_rename(buffer.clone(), 7, cx)
3768 });
3769 fake_server
3770 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3771 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3772 assert_eq!(params.position, lsp::Position::new(0, 7));
3773 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3774 lsp::Position::new(0, 6),
3775 lsp::Position::new(0, 9),
3776 ))))
3777 })
3778 .next()
3779 .await
3780 .unwrap();
3781 let range = response.await.unwrap().unwrap();
3782 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3783 assert_eq!(range, 6..9);
3784
3785 let response = project.update(cx, |project, cx| {
3786 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3787 });
3788 fake_server
3789 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3790 assert_eq!(
3791 params.text_document_position.text_document.uri.as_str(),
3792 "file:///dir/one.rs"
3793 );
3794 assert_eq!(
3795 params.text_document_position.position,
3796 lsp::Position::new(0, 7)
3797 );
3798 assert_eq!(params.new_name, "THREE");
3799 Ok(Some(lsp::WorkspaceEdit {
3800 changes: Some(
3801 [
3802 (
3803 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
3804 vec![lsp::TextEdit::new(
3805 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3806 "THREE".to_string(),
3807 )],
3808 ),
3809 (
3810 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
3811 vec![
3812 lsp::TextEdit::new(
3813 lsp::Range::new(
3814 lsp::Position::new(0, 24),
3815 lsp::Position::new(0, 27),
3816 ),
3817 "THREE".to_string(),
3818 ),
3819 lsp::TextEdit::new(
3820 lsp::Range::new(
3821 lsp::Position::new(0, 35),
3822 lsp::Position::new(0, 38),
3823 ),
3824 "THREE".to_string(),
3825 ),
3826 ],
3827 ),
3828 ]
3829 .into_iter()
3830 .collect(),
3831 ),
3832 ..Default::default()
3833 }))
3834 })
3835 .next()
3836 .await
3837 .unwrap();
3838 let mut transaction = response.await.unwrap().0;
3839 assert_eq!(transaction.len(), 2);
3840 assert_eq!(
3841 transaction
3842 .remove_entry(&buffer)
3843 .unwrap()
3844 .0
3845 .update(cx, |buffer, _| buffer.text()),
3846 "const THREE: usize = 1;"
3847 );
3848 assert_eq!(
3849 transaction
3850 .into_keys()
3851 .next()
3852 .unwrap()
3853 .update(cx, |buffer, _| buffer.text()),
3854 "const TWO: usize = one::THREE + one::THREE;"
3855 );
3856}
3857
3858#[gpui::test]
3859async fn test_search(cx: &mut gpui::TestAppContext) {
3860 init_test(cx);
3861
3862 let fs = FakeFs::new(cx.executor());
3863 fs.insert_tree(
3864 "/dir",
3865 json!({
3866 "one.rs": "const ONE: usize = 1;",
3867 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3868 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3869 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3870 }),
3871 )
3872 .await;
3873 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3874 assert_eq!(
3875 search(
3876 &project,
3877 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3878 cx
3879 )
3880 .await
3881 .unwrap(),
3882 HashMap::from_iter([
3883 ("dir/two.rs".to_string(), vec![6..9]),
3884 ("dir/three.rs".to_string(), vec![37..40])
3885 ])
3886 );
3887
3888 let buffer_4 = project
3889 .update(cx, |project, cx| {
3890 project.open_local_buffer("/dir/four.rs", cx)
3891 })
3892 .await
3893 .unwrap();
3894 buffer_4.update(cx, |buffer, cx| {
3895 let text = "two::TWO";
3896 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3897 });
3898
3899 assert_eq!(
3900 search(
3901 &project,
3902 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3903 cx
3904 )
3905 .await
3906 .unwrap(),
3907 HashMap::from_iter([
3908 ("dir/two.rs".to_string(), vec![6..9]),
3909 ("dir/three.rs".to_string(), vec![37..40]),
3910 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3911 ])
3912 );
3913}
3914
3915#[gpui::test]
3916async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3917 init_test(cx);
3918
3919 let search_query = "file";
3920
3921 let fs = FakeFs::new(cx.executor());
3922 fs.insert_tree(
3923 "/dir",
3924 json!({
3925 "one.rs": r#"// Rust file one"#,
3926 "one.ts": r#"// TypeScript file one"#,
3927 "two.rs": r#"// Rust file two"#,
3928 "two.ts": r#"// TypeScript file two"#,
3929 }),
3930 )
3931 .await;
3932 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3933
3934 assert!(
3935 search(
3936 &project,
3937 SearchQuery::text(
3938 search_query,
3939 false,
3940 true,
3941 false,
3942 vec![PathMatcher::new("*.odd").unwrap()],
3943 Vec::new()
3944 )
3945 .unwrap(),
3946 cx
3947 )
3948 .await
3949 .unwrap()
3950 .is_empty(),
3951 "If no inclusions match, no files should be returned"
3952 );
3953
3954 assert_eq!(
3955 search(
3956 &project,
3957 SearchQuery::text(
3958 search_query,
3959 false,
3960 true,
3961 false,
3962 vec![PathMatcher::new("*.rs").unwrap()],
3963 Vec::new()
3964 )
3965 .unwrap(),
3966 cx
3967 )
3968 .await
3969 .unwrap(),
3970 HashMap::from_iter([
3971 ("dir/one.rs".to_string(), vec![8..12]),
3972 ("dir/two.rs".to_string(), vec![8..12]),
3973 ]),
3974 "Rust only search should give only Rust files"
3975 );
3976
3977 assert_eq!(
3978 search(
3979 &project,
3980 SearchQuery::text(
3981 search_query,
3982 false,
3983 true,
3984 false,
3985 vec![
3986 PathMatcher::new("*.ts").unwrap(),
3987 PathMatcher::new("*.odd").unwrap(),
3988 ],
3989 Vec::new()
3990 ).unwrap(),
3991 cx
3992 )
3993 .await
3994 .unwrap(),
3995 HashMap::from_iter([
3996 ("dir/one.ts".to_string(), vec![14..18]),
3997 ("dir/two.ts".to_string(), vec![14..18]),
3998 ]),
3999 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4000 );
4001
4002 assert_eq!(
4003 search(
4004 &project,
4005 SearchQuery::text(
4006 search_query,
4007 false,
4008 true,
4009 false,
4010 vec![
4011 PathMatcher::new("*.rs").unwrap(),
4012 PathMatcher::new("*.ts").unwrap(),
4013 PathMatcher::new("*.odd").unwrap(),
4014 ],
4015 Vec::new()
4016 ).unwrap(),
4017 cx
4018 )
4019 .await
4020 .unwrap(),
4021 HashMap::from_iter([
4022 ("dir/two.ts".to_string(), vec![14..18]),
4023 ("dir/one.rs".to_string(), vec![8..12]),
4024 ("dir/one.ts".to_string(), vec![14..18]),
4025 ("dir/two.rs".to_string(), vec![8..12]),
4026 ]),
4027 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4028 );
4029}
4030
4031#[gpui::test]
4032async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4033 init_test(cx);
4034
4035 let search_query = "file";
4036
4037 let fs = FakeFs::new(cx.executor());
4038 fs.insert_tree(
4039 "/dir",
4040 json!({
4041 "one.rs": r#"// Rust file one"#,
4042 "one.ts": r#"// TypeScript file one"#,
4043 "two.rs": r#"// Rust file two"#,
4044 "two.ts": r#"// TypeScript file two"#,
4045 }),
4046 )
4047 .await;
4048 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4049
4050 assert_eq!(
4051 search(
4052 &project,
4053 SearchQuery::text(
4054 search_query,
4055 false,
4056 true,
4057 false,
4058 Vec::new(),
4059 vec![PathMatcher::new("*.odd").unwrap()],
4060 )
4061 .unwrap(),
4062 cx
4063 )
4064 .await
4065 .unwrap(),
4066 HashMap::from_iter([
4067 ("dir/one.rs".to_string(), vec![8..12]),
4068 ("dir/one.ts".to_string(), vec![14..18]),
4069 ("dir/two.rs".to_string(), vec![8..12]),
4070 ("dir/two.ts".to_string(), vec![14..18]),
4071 ]),
4072 "If no exclusions match, all files should be returned"
4073 );
4074
4075 assert_eq!(
4076 search(
4077 &project,
4078 SearchQuery::text(
4079 search_query,
4080 false,
4081 true,
4082 false,
4083 Vec::new(),
4084 vec![PathMatcher::new("*.rs").unwrap()],
4085 )
4086 .unwrap(),
4087 cx
4088 )
4089 .await
4090 .unwrap(),
4091 HashMap::from_iter([
4092 ("dir/one.ts".to_string(), vec![14..18]),
4093 ("dir/two.ts".to_string(), vec![14..18]),
4094 ]),
4095 "Rust exclusion search should give only TypeScript files"
4096 );
4097
4098 assert_eq!(
4099 search(
4100 &project,
4101 SearchQuery::text(
4102 search_query,
4103 false,
4104 true,
4105 false,
4106 Vec::new(),
4107 vec![
4108 PathMatcher::new("*.ts").unwrap(),
4109 PathMatcher::new("*.odd").unwrap(),
4110 ],
4111 ).unwrap(),
4112 cx
4113 )
4114 .await
4115 .unwrap(),
4116 HashMap::from_iter([
4117 ("dir/one.rs".to_string(), vec![8..12]),
4118 ("dir/two.rs".to_string(), vec![8..12]),
4119 ]),
4120 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4121 );
4122
4123 assert!(
4124 search(
4125 &project,
4126 SearchQuery::text(
4127 search_query,
4128 false,
4129 true,
4130 false,
4131 Vec::new(),
4132 vec![
4133 PathMatcher::new("*.rs").unwrap(),
4134 PathMatcher::new("*.ts").unwrap(),
4135 PathMatcher::new("*.odd").unwrap(),
4136 ],
4137 ).unwrap(),
4138 cx
4139 )
4140 .await
4141 .unwrap().is_empty(),
4142 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4143 );
4144}
4145
4146#[gpui::test]
4147async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4148 init_test(cx);
4149
4150 let search_query = "file";
4151
4152 let fs = FakeFs::new(cx.executor());
4153 fs.insert_tree(
4154 "/dir",
4155 json!({
4156 "one.rs": r#"// Rust file one"#,
4157 "one.ts": r#"// TypeScript file one"#,
4158 "two.rs": r#"// Rust file two"#,
4159 "two.ts": r#"// TypeScript file two"#,
4160 }),
4161 )
4162 .await;
4163 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4164
4165 assert!(
4166 search(
4167 &project,
4168 SearchQuery::text(
4169 search_query,
4170 false,
4171 true,
4172 false,
4173 vec![PathMatcher::new("*.odd").unwrap()],
4174 vec![PathMatcher::new("*.odd").unwrap()],
4175 )
4176 .unwrap(),
4177 cx
4178 )
4179 .await
4180 .unwrap()
4181 .is_empty(),
4182 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4183 );
4184
4185 assert!(
4186 search(
4187 &project,
4188 SearchQuery::text(
4189 search_query,
4190 false,
4191 true,
4192 false,
4193 vec![PathMatcher::new("*.ts").unwrap()],
4194 vec![PathMatcher::new("*.ts").unwrap()],
4195 ).unwrap(),
4196 cx
4197 )
4198 .await
4199 .unwrap()
4200 .is_empty(),
4201 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4202 );
4203
4204 assert!(
4205 search(
4206 &project,
4207 SearchQuery::text(
4208 search_query,
4209 false,
4210 true,
4211 false,
4212 vec![
4213 PathMatcher::new("*.ts").unwrap(),
4214 PathMatcher::new("*.odd").unwrap()
4215 ],
4216 vec![
4217 PathMatcher::new("*.ts").unwrap(),
4218 PathMatcher::new("*.odd").unwrap()
4219 ],
4220 )
4221 .unwrap(),
4222 cx
4223 )
4224 .await
4225 .unwrap()
4226 .is_empty(),
4227 "Non-matching inclusions and exclusions should not change that."
4228 );
4229
4230 assert_eq!(
4231 search(
4232 &project,
4233 SearchQuery::text(
4234 search_query,
4235 false,
4236 true,
4237 false,
4238 vec![
4239 PathMatcher::new("*.ts").unwrap(),
4240 PathMatcher::new("*.odd").unwrap()
4241 ],
4242 vec![
4243 PathMatcher::new("*.rs").unwrap(),
4244 PathMatcher::new("*.odd").unwrap()
4245 ],
4246 )
4247 .unwrap(),
4248 cx
4249 )
4250 .await
4251 .unwrap(),
4252 HashMap::from_iter([
4253 ("dir/one.ts".to_string(), vec![14..18]),
4254 ("dir/two.ts".to_string(), vec![14..18]),
4255 ]),
4256 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4257 );
4258}
4259
4260#[gpui::test]
4261async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4262 init_test(cx);
4263
4264 let fs = FakeFs::new(cx.executor());
4265 fs.insert_tree(
4266 "/worktree-a",
4267 json!({
4268 "haystack.rs": r#"// NEEDLE"#,
4269 "haystack.ts": r#"// NEEDLE"#,
4270 }),
4271 )
4272 .await;
4273 fs.insert_tree(
4274 "/worktree-b",
4275 json!({
4276 "haystack.rs": r#"// NEEDLE"#,
4277 "haystack.ts": r#"// NEEDLE"#,
4278 }),
4279 )
4280 .await;
4281
4282 let project = Project::test(
4283 fs.clone(),
4284 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4285 cx,
4286 )
4287 .await;
4288
4289 assert_eq!(
4290 search(
4291 &project,
4292 SearchQuery::text(
4293 "NEEDLE",
4294 false,
4295 true,
4296 false,
4297 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4298 Vec::new()
4299 )
4300 .unwrap(),
4301 cx
4302 )
4303 .await
4304 .unwrap(),
4305 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4306 "should only return results from included worktree"
4307 );
4308 assert_eq!(
4309 search(
4310 &project,
4311 SearchQuery::text(
4312 "NEEDLE",
4313 false,
4314 true,
4315 false,
4316 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4317 Vec::new()
4318 )
4319 .unwrap(),
4320 cx
4321 )
4322 .await
4323 .unwrap(),
4324 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4325 "should only return results from included worktree"
4326 );
4327
4328 assert_eq!(
4329 search(
4330 &project,
4331 SearchQuery::text(
4332 "NEEDLE",
4333 false,
4334 true,
4335 false,
4336 vec![PathMatcher::new("*.ts").unwrap()],
4337 Vec::new()
4338 )
4339 .unwrap(),
4340 cx
4341 )
4342 .await
4343 .unwrap(),
4344 HashMap::from_iter([
4345 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4346 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4347 ]),
4348 "should return results from both worktrees"
4349 );
4350}
4351
4352#[gpui::test]
4353async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4354 init_test(cx);
4355
4356 let fs = FakeFs::new(cx.background_executor.clone());
4357 fs.insert_tree(
4358 "/dir",
4359 json!({
4360 ".git": {},
4361 ".gitignore": "**/target\n/node_modules\n",
4362 "target": {
4363 "index.txt": "index_key:index_value"
4364 },
4365 "node_modules": {
4366 "eslint": {
4367 "index.ts": "const eslint_key = 'eslint value'",
4368 "package.json": r#"{ "some_key": "some value" }"#,
4369 },
4370 "prettier": {
4371 "index.ts": "const prettier_key = 'prettier value'",
4372 "package.json": r#"{ "other_key": "other value" }"#,
4373 },
4374 },
4375 "package.json": r#"{ "main_key": "main value" }"#,
4376 }),
4377 )
4378 .await;
4379 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4380
4381 let query = "key";
4382 assert_eq!(
4383 search(
4384 &project,
4385 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4386 cx
4387 )
4388 .await
4389 .unwrap(),
4390 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4391 "Only one non-ignored file should have the query"
4392 );
4393
4394 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4395 assert_eq!(
4396 search(
4397 &project,
4398 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4399 cx
4400 )
4401 .await
4402 .unwrap(),
4403 HashMap::from_iter([
4404 ("dir/package.json".to_string(), vec![8..11]),
4405 ("dir/target/index.txt".to_string(), vec![6..9]),
4406 (
4407 "dir/node_modules/prettier/package.json".to_string(),
4408 vec![9..12]
4409 ),
4410 (
4411 "dir/node_modules/prettier/index.ts".to_string(),
4412 vec![15..18]
4413 ),
4414 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4415 (
4416 "dir/node_modules/eslint/package.json".to_string(),
4417 vec![8..11]
4418 ),
4419 ]),
4420 "Unrestricted search with ignored directories should find every file with the query"
4421 );
4422
4423 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4424 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4425 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4426 assert_eq!(
4427 search(
4428 &project,
4429 SearchQuery::text(
4430 query,
4431 false,
4432 false,
4433 true,
4434 files_to_include,
4435 files_to_exclude,
4436 )
4437 .unwrap(),
4438 cx
4439 )
4440 .await
4441 .unwrap(),
4442 HashMap::from_iter([(
4443 "dir/node_modules/prettier/package.json".to_string(),
4444 vec![9..12]
4445 )]),
4446 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4447 );
4448}
4449
4450#[test]
4451fn test_glob_literal_prefix() {
4452 assert_eq!(glob_literal_prefix("**/*.js"), "");
4453 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4454 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4455 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4456}
4457
4458#[gpui::test]
4459async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4460 init_test(cx);
4461
4462 let fs = FakeFs::new(cx.executor().clone());
4463 fs.insert_tree(
4464 "/one/two",
4465 json!({
4466 "three": {
4467 "a.txt": "",
4468 "four": {}
4469 },
4470 "c.rs": ""
4471 }),
4472 )
4473 .await;
4474
4475 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4476 project
4477 .update(cx, |project, cx| {
4478 let id = project.worktrees().next().unwrap().read(cx).id();
4479 project.create_entry((id, "b.."), true, cx)
4480 })
4481 .unwrap()
4482 .await
4483 .unwrap();
4484
4485 // Can't create paths outside the project
4486 let result = project
4487 .update(cx, |project, cx| {
4488 let id = project.worktrees().next().unwrap().read(cx).id();
4489 project.create_entry((id, "../../boop"), true, cx)
4490 })
4491 .await;
4492 assert!(result.is_err());
4493
4494 // Can't create paths with '..'
4495 let result = project
4496 .update(cx, |project, cx| {
4497 let id = project.worktrees().next().unwrap().read(cx).id();
4498 project.create_entry((id, "four/../beep"), true, cx)
4499 })
4500 .await;
4501 assert!(result.is_err());
4502
4503 assert_eq!(
4504 fs.paths(true),
4505 vec![
4506 PathBuf::from("/"),
4507 PathBuf::from("/one"),
4508 PathBuf::from("/one/two"),
4509 PathBuf::from("/one/two/c.rs"),
4510 PathBuf::from("/one/two/three"),
4511 PathBuf::from("/one/two/three/a.txt"),
4512 PathBuf::from("/one/two/three/b.."),
4513 PathBuf::from("/one/two/three/four"),
4514 ]
4515 );
4516
4517 // And we cannot open buffers with '..'
4518 let result = project
4519 .update(cx, |project, cx| {
4520 let id = project.worktrees().next().unwrap().read(cx).id();
4521 project.open_buffer((id, "../c.rs"), cx)
4522 })
4523 .await;
4524 assert!(result.is_err())
4525}
4526
4527#[gpui::test]
4528async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4529 init_test(cx);
4530
4531 let fs = FakeFs::new(cx.executor());
4532 fs.insert_tree(
4533 "/dir",
4534 json!({
4535 "a.tsx": "a",
4536 }),
4537 )
4538 .await;
4539
4540 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4541
4542 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4543 language_registry.add(tsx_lang());
4544 let language_server_names = [
4545 "TypeScriptServer",
4546 "TailwindServer",
4547 "ESLintServer",
4548 "NoHoverCapabilitiesServer",
4549 ];
4550 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4551 "tsx",
4552 true,
4553 FakeLspAdapter {
4554 name: &language_server_names[0],
4555 capabilities: lsp::ServerCapabilities {
4556 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4557 ..lsp::ServerCapabilities::default()
4558 },
4559 ..FakeLspAdapter::default()
4560 },
4561 );
4562 let _a = language_registry.register_specific_fake_lsp_adapter(
4563 "tsx",
4564 false,
4565 FakeLspAdapter {
4566 name: &language_server_names[1],
4567 capabilities: lsp::ServerCapabilities {
4568 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4569 ..lsp::ServerCapabilities::default()
4570 },
4571 ..FakeLspAdapter::default()
4572 },
4573 );
4574 let _b = language_registry.register_specific_fake_lsp_adapter(
4575 "tsx",
4576 false,
4577 FakeLspAdapter {
4578 name: &language_server_names[2],
4579 capabilities: lsp::ServerCapabilities {
4580 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4581 ..lsp::ServerCapabilities::default()
4582 },
4583 ..FakeLspAdapter::default()
4584 },
4585 );
4586 let _c = language_registry.register_specific_fake_lsp_adapter(
4587 "tsx",
4588 false,
4589 FakeLspAdapter {
4590 name: &language_server_names[3],
4591 capabilities: lsp::ServerCapabilities {
4592 hover_provider: None,
4593 ..lsp::ServerCapabilities::default()
4594 },
4595 ..FakeLspAdapter::default()
4596 },
4597 );
4598
4599 let buffer = project
4600 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4601 .await
4602 .unwrap();
4603 cx.executor().run_until_parked();
4604
4605 let mut servers_with_hover_requests = HashMap::default();
4606 for i in 0..language_server_names.len() {
4607 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4608 panic!(
4609 "Failed to get language server #{i} with name {}",
4610 &language_server_names[i]
4611 )
4612 });
4613 let new_server_name = new_server.server.name();
4614 assert!(
4615 !servers_with_hover_requests.contains_key(new_server_name),
4616 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4617 );
4618 let new_server_name = new_server_name.to_string();
4619 match new_server_name.as_str() {
4620 "TailwindServer" | "TypeScriptServer" => {
4621 servers_with_hover_requests.insert(
4622 new_server_name.clone(),
4623 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4624 let name = new_server_name.clone();
4625 async move {
4626 Ok(Some(lsp::Hover {
4627 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4628 format!("{name} hover"),
4629 )),
4630 range: None,
4631 }))
4632 }
4633 }),
4634 );
4635 }
4636 "ESLintServer" => {
4637 servers_with_hover_requests.insert(
4638 new_server_name,
4639 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4640 |_, _| async move { Ok(None) },
4641 ),
4642 );
4643 }
4644 "NoHoverCapabilitiesServer" => {
4645 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4646 |_, _| async move {
4647 panic!(
4648 "Should not call for hovers server with no corresponding capabilities"
4649 )
4650 },
4651 );
4652 }
4653 unexpected => panic!("Unexpected server name: {unexpected}"),
4654 }
4655 }
4656
4657 let hover_task = project.update(cx, |project, cx| {
4658 project.hover(&buffer, Point::new(0, 0), cx)
4659 });
4660 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4661 |mut hover_request| async move {
4662 hover_request
4663 .next()
4664 .await
4665 .expect("All hover requests should have been triggered")
4666 },
4667 ))
4668 .await;
4669 assert_eq!(
4670 vec!["TailwindServer hover", "TypeScriptServer hover"],
4671 hover_task
4672 .await
4673 .into_iter()
4674 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4675 .sorted()
4676 .collect::<Vec<_>>(),
4677 "Should receive hover responses from all related servers with hover capabilities"
4678 );
4679}
4680
4681#[gpui::test]
4682async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4683 init_test(cx);
4684
4685 let fs = FakeFs::new(cx.executor());
4686 fs.insert_tree(
4687 "/dir",
4688 json!({
4689 "a.ts": "a",
4690 }),
4691 )
4692 .await;
4693
4694 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4695
4696 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4697 language_registry.add(typescript_lang());
4698 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4699 "TypeScript",
4700 FakeLspAdapter {
4701 capabilities: lsp::ServerCapabilities {
4702 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4703 ..lsp::ServerCapabilities::default()
4704 },
4705 ..FakeLspAdapter::default()
4706 },
4707 );
4708
4709 let buffer = project
4710 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4711 .await
4712 .unwrap();
4713 cx.executor().run_until_parked();
4714
4715 let fake_server = fake_language_servers
4716 .next()
4717 .await
4718 .expect("failed to get the language server");
4719
4720 let mut request_handled =
4721 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4722 Ok(Some(lsp::Hover {
4723 contents: lsp::HoverContents::Array(vec![
4724 lsp::MarkedString::String("".to_string()),
4725 lsp::MarkedString::String(" ".to_string()),
4726 lsp::MarkedString::String("\n\n\n".to_string()),
4727 ]),
4728 range: None,
4729 }))
4730 });
4731
4732 let hover_task = project.update(cx, |project, cx| {
4733 project.hover(&buffer, Point::new(0, 0), cx)
4734 });
4735 let () = request_handled
4736 .next()
4737 .await
4738 .expect("All hover requests should have been triggered");
4739 assert_eq!(
4740 Vec::<String>::new(),
4741 hover_task
4742 .await
4743 .into_iter()
4744 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4745 .sorted()
4746 .collect::<Vec<_>>(),
4747 "Empty hover parts should be ignored"
4748 );
4749}
4750
4751#[gpui::test]
4752async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4753 init_test(cx);
4754
4755 let fs = FakeFs::new(cx.executor());
4756 fs.insert_tree(
4757 "/dir",
4758 json!({
4759 "a.tsx": "a",
4760 }),
4761 )
4762 .await;
4763
4764 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4765
4766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4767 language_registry.add(tsx_lang());
4768 let language_server_names = [
4769 "TypeScriptServer",
4770 "TailwindServer",
4771 "ESLintServer",
4772 "NoActionsCapabilitiesServer",
4773 ];
4774 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4775 "tsx",
4776 true,
4777 FakeLspAdapter {
4778 name: &language_server_names[0],
4779 capabilities: lsp::ServerCapabilities {
4780 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4781 ..lsp::ServerCapabilities::default()
4782 },
4783 ..FakeLspAdapter::default()
4784 },
4785 );
4786 let _a = language_registry.register_specific_fake_lsp_adapter(
4787 "tsx",
4788 false,
4789 FakeLspAdapter {
4790 name: &language_server_names[1],
4791 capabilities: lsp::ServerCapabilities {
4792 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4793 ..lsp::ServerCapabilities::default()
4794 },
4795 ..FakeLspAdapter::default()
4796 },
4797 );
4798 let _b = language_registry.register_specific_fake_lsp_adapter(
4799 "tsx",
4800 false,
4801 FakeLspAdapter {
4802 name: &language_server_names[2],
4803 capabilities: lsp::ServerCapabilities {
4804 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4805 ..lsp::ServerCapabilities::default()
4806 },
4807 ..FakeLspAdapter::default()
4808 },
4809 );
4810 let _c = language_registry.register_specific_fake_lsp_adapter(
4811 "tsx",
4812 false,
4813 FakeLspAdapter {
4814 name: &language_server_names[3],
4815 capabilities: lsp::ServerCapabilities {
4816 code_action_provider: None,
4817 ..lsp::ServerCapabilities::default()
4818 },
4819 ..FakeLspAdapter::default()
4820 },
4821 );
4822
4823 let buffer = project
4824 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4825 .await
4826 .unwrap();
4827 cx.executor().run_until_parked();
4828
4829 let mut servers_with_actions_requests = HashMap::default();
4830 for i in 0..language_server_names.len() {
4831 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4832 panic!(
4833 "Failed to get language server #{i} with name {}",
4834 &language_server_names[i]
4835 )
4836 });
4837 let new_server_name = new_server.server.name();
4838 assert!(
4839 !servers_with_actions_requests.contains_key(new_server_name),
4840 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4841 );
4842 let new_server_name = new_server_name.to_string();
4843 match new_server_name.as_str() {
4844 "TailwindServer" | "TypeScriptServer" => {
4845 servers_with_actions_requests.insert(
4846 new_server_name.clone(),
4847 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4848 move |_, _| {
4849 let name = new_server_name.clone();
4850 async move {
4851 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4852 lsp::CodeAction {
4853 title: format!("{name} code action"),
4854 ..lsp::CodeAction::default()
4855 },
4856 )]))
4857 }
4858 },
4859 ),
4860 );
4861 }
4862 "ESLintServer" => {
4863 servers_with_actions_requests.insert(
4864 new_server_name,
4865 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4866 |_, _| async move { Ok(None) },
4867 ),
4868 );
4869 }
4870 "NoActionsCapabilitiesServer" => {
4871 let _never_handled = new_server
4872 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4873 panic!(
4874 "Should not call for code actions server with no corresponding capabilities"
4875 )
4876 });
4877 }
4878 unexpected => panic!("Unexpected server name: {unexpected}"),
4879 }
4880 }
4881
4882 let code_actions_task = project.update(cx, |project, cx| {
4883 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4884 });
4885 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4886 |mut code_actions_request| async move {
4887 code_actions_request
4888 .next()
4889 .await
4890 .expect("All code actions requests should have been triggered")
4891 },
4892 ))
4893 .await;
4894 assert_eq!(
4895 vec!["TailwindServer code action", "TypeScriptServer code action"],
4896 code_actions_task
4897 .await
4898 .into_iter()
4899 .map(|code_action| code_action.lsp_action.title)
4900 .sorted()
4901 .collect::<Vec<_>>(),
4902 "Should receive code actions responses from all related servers with hover capabilities"
4903 );
4904}
4905
4906async fn search(
4907 project: &Model<Project>,
4908 query: SearchQuery,
4909 cx: &mut gpui::TestAppContext,
4910) -> Result<HashMap<String, Vec<Range<usize>>>> {
4911 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
4912 let mut results = HashMap::default();
4913 while let Some(search_result) = search_rx.next().await {
4914 match search_result {
4915 SearchResult::Buffer { buffer, ranges } => {
4916 results.entry(buffer).or_insert(ranges);
4917 }
4918 SearchResult::LimitReached => {}
4919 }
4920 }
4921 Ok(results
4922 .into_iter()
4923 .map(|(buffer, ranges)| {
4924 buffer.update(cx, |buffer, cx| {
4925 let path = buffer
4926 .file()
4927 .unwrap()
4928 .full_path(cx)
4929 .to_string_lossy()
4930 .to_string();
4931 let ranges = ranges
4932 .into_iter()
4933 .map(|range| range.to_offset(buffer))
4934 .collect::<Vec<_>>();
4935 (path, ranges)
4936 })
4937 })
4938 .collect())
4939}
4940
4941fn init_test(cx: &mut gpui::TestAppContext) {
4942 if std::env::var("RUST_LOG").is_ok() {
4943 env_logger::try_init().ok();
4944 }
4945
4946 cx.update(|cx| {
4947 let settings_store = SettingsStore::test(cx);
4948 cx.set_global(settings_store);
4949 release_channel::init("0.0.0", cx);
4950 language::init(cx);
4951 Project::init_settings(cx);
4952 });
4953}
4954
4955fn json_lang() -> Arc<Language> {
4956 Arc::new(Language::new(
4957 LanguageConfig {
4958 name: "JSON".into(),
4959 matcher: LanguageMatcher {
4960 path_suffixes: vec!["json".to_string()],
4961 ..Default::default()
4962 },
4963 ..Default::default()
4964 },
4965 None,
4966 ))
4967}
4968
4969fn js_lang() -> Arc<Language> {
4970 Arc::new(Language::new(
4971 LanguageConfig {
4972 name: Arc::from("JavaScript"),
4973 matcher: LanguageMatcher {
4974 path_suffixes: vec!["js".to_string()],
4975 ..Default::default()
4976 },
4977 ..Default::default()
4978 },
4979 None,
4980 ))
4981}
4982
4983fn rust_lang() -> Arc<Language> {
4984 Arc::new(Language::new(
4985 LanguageConfig {
4986 name: "Rust".into(),
4987 matcher: LanguageMatcher {
4988 path_suffixes: vec!["rs".to_string()],
4989 ..Default::default()
4990 },
4991 ..Default::default()
4992 },
4993 Some(tree_sitter_rust::language()),
4994 ))
4995}
4996
4997fn typescript_lang() -> Arc<Language> {
4998 Arc::new(Language::new(
4999 LanguageConfig {
5000 name: "TypeScript".into(),
5001 matcher: LanguageMatcher {
5002 path_suffixes: vec!["ts".to_string()],
5003 ..Default::default()
5004 },
5005 ..Default::default()
5006 },
5007 Some(tree_sitter_typescript::language_typescript()),
5008 ))
5009}
5010
5011fn tsx_lang() -> Arc<Language> {
5012 Arc::new(Language::new(
5013 LanguageConfig {
5014 name: "tsx".into(),
5015 matcher: LanguageMatcher {
5016 path_suffixes: vec!["tsx".to_string()],
5017 ..Default::default()
5018 },
5019 ..Default::default()
5020 },
5021 Some(tree_sitter_typescript::language_tsx()),
5022 ))
5023}