1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use parking_lot::Mutex;
11use pretty_assertions::assert_eq;
12use serde_json::json;
13#[cfg(not(windows))]
14use std::os;
15use std::task::Poll;
16use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
17use unindent::Unindent as _;
18use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
19use worktree::WorktreeModelHandle as _;
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/Users").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128 let task_context = TaskContext::default();
129
130 cx.executor().run_until_parked();
131 let worktree_id = cx.update(|cx| {
132 project.update(cx, |project, cx| {
133 project.worktrees().next().unwrap().read(cx).id()
134 })
135 });
136 let global_task_source_kind = TaskSourceKind::Worktree {
137 id: worktree_id,
138 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
139 id_base: "local_tasks_for_worktree".into(),
140 };
141
142 let all_tasks = cx
143 .update(|cx| {
144 let tree = worktree.read(cx);
145
146 let settings_a = language_settings(
147 None,
148 Some(
149 &(File::for_entry(
150 tree.entry_for_path("a/a.rs").unwrap().clone(),
151 worktree.clone(),
152 ) as _),
153 ),
154 cx,
155 );
156 let settings_b = language_settings(
157 None,
158 Some(
159 &(File::for_entry(
160 tree.entry_for_path("b/b.rs").unwrap().clone(),
161 worktree.clone(),
162 ) as _),
163 ),
164 cx,
165 );
166
167 assert_eq!(settings_a.tab_size.get(), 8);
168 assert_eq!(settings_b.tab_size.get(), 2);
169
170 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
171 })
172 .await
173 .into_iter()
174 .map(|(source_kind, task)| {
175 let resolved = task.resolved.unwrap();
176 (
177 source_kind,
178 task.resolved_label,
179 resolved.args,
180 resolved.env,
181 )
182 })
183 .collect::<Vec<_>>();
184 assert_eq!(
185 all_tasks,
186 vec![
187 (
188 global_task_source_kind.clone(),
189 "cargo check".to_string(),
190 vec!["check".to_string(), "--all".to_string()],
191 HashMap::default(),
192 ),
193 (
194 TaskSourceKind::Worktree {
195 id: worktree_id,
196 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
197 id_base: "local_tasks_for_worktree".into(),
198 },
199 "cargo check".to_string(),
200 vec!["check".to_string()],
201 HashMap::default(),
202 ),
203 ]
204 );
205
206 let (_, resolved_task) = cx
207 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
208 .await
209 .into_iter()
210 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
211 .expect("should have one global task");
212 project.update(cx, |project, cx| {
213 project.task_inventory().update(cx, |inventory, _| {
214 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
215 });
216 });
217
218 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
219 label: "cargo check".to_string(),
220 command: "cargo".to_string(),
221 args: vec![
222 "check".to_string(),
223 "--all".to_string(),
224 "--all-targets".to_string(),
225 ],
226 env: HashMap::from_iter(Some((
227 "RUSTFLAGS".to_string(),
228 "-Zunstable-options".to_string(),
229 ))),
230 ..TaskTemplate::default()
231 }]))
232 .unwrap();
233 let (tx, rx) = futures::channel::mpsc::unbounded();
234 cx.update(|cx| {
235 project.update(cx, |project, cx| {
236 project.task_inventory().update(cx, |inventory, cx| {
237 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
238 inventory.add_source(
239 global_task_source_kind.clone(),
240 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
241 cx,
242 );
243 });
244 })
245 });
246 tx.unbounded_send(tasks).unwrap();
247
248 cx.run_until_parked();
249 let all_tasks = cx
250 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
251 .await
252 .into_iter()
253 .map(|(source_kind, task)| {
254 let resolved = task.resolved.unwrap();
255 (
256 source_kind,
257 task.resolved_label,
258 resolved.args,
259 resolved.env,
260 )
261 })
262 .collect::<Vec<_>>();
263 assert_eq!(
264 all_tasks,
265 vec![
266 (
267 TaskSourceKind::Worktree {
268 id: worktree_id,
269 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
270 id_base: "local_tasks_for_worktree".into(),
271 },
272 "cargo check".to_string(),
273 vec![
274 "check".to_string(),
275 "--all".to_string(),
276 "--all-targets".to_string()
277 ],
278 HashMap::from_iter(Some((
279 "RUSTFLAGS".to_string(),
280 "-Zunstable-options".to_string()
281 ))),
282 ),
283 (
284 TaskSourceKind::Worktree {
285 id: worktree_id,
286 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
287 id_base: "local_tasks_for_worktree".into(),
288 },
289 "cargo check".to_string(),
290 vec!["check".to_string()],
291 HashMap::default(),
292 ),
293 ]
294 );
295}
296
297#[gpui::test]
298async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
299 init_test(cx);
300
301 let fs = FakeFs::new(cx.executor());
302 fs.insert_tree(
303 "/the-root",
304 json!({
305 "test.rs": "const A: i32 = 1;",
306 "test2.rs": "",
307 "Cargo.toml": "a = 1",
308 "package.json": "{\"a\": 1}",
309 }),
310 )
311 .await;
312
313 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
314 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
315
316 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
317 "Rust",
318 FakeLspAdapter {
319 name: "the-rust-language-server",
320 capabilities: lsp::ServerCapabilities {
321 completion_provider: Some(lsp::CompletionOptions {
322 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
323 ..Default::default()
324 }),
325 ..Default::default()
326 },
327 ..Default::default()
328 },
329 );
330 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
331 "JSON",
332 FakeLspAdapter {
333 name: "the-json-language-server",
334 capabilities: lsp::ServerCapabilities {
335 completion_provider: Some(lsp::CompletionOptions {
336 trigger_characters: Some(vec![":".to_string()]),
337 ..Default::default()
338 }),
339 ..Default::default()
340 },
341 ..Default::default()
342 },
343 );
344
345 // Open a buffer without an associated language server.
346 let toml_buffer = project
347 .update(cx, |project, cx| {
348 project.open_local_buffer("/the-root/Cargo.toml", cx)
349 })
350 .await
351 .unwrap();
352
353 // Open a buffer with an associated language server before the language for it has been loaded.
354 let rust_buffer = project
355 .update(cx, |project, cx| {
356 project.open_local_buffer("/the-root/test.rs", cx)
357 })
358 .await
359 .unwrap();
360 rust_buffer.update(cx, |buffer, _| {
361 assert_eq!(buffer.language().map(|l| l.name()), None);
362 });
363
364 // Now we add the languages to the project, and ensure they get assigned to all
365 // the relevant open buffers.
366 language_registry.add(json_lang());
367 language_registry.add(rust_lang());
368 cx.executor().run_until_parked();
369 rust_buffer.update(cx, |buffer, _| {
370 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
371 });
372
373 // A server is started up, and it is notified about Rust files.
374 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
375 assert_eq!(
376 fake_rust_server
377 .receive_notification::<lsp::notification::DidOpenTextDocument>()
378 .await
379 .text_document,
380 lsp::TextDocumentItem {
381 uri: lsp::Uri::from_file_path("/the-root/test.rs")
382 .unwrap()
383 .into(),
384 version: 0,
385 text: "const A: i32 = 1;".to_string(),
386 language_id: "rust".to_string(),
387 }
388 );
389
390 // The buffer is configured based on the language server's capabilities.
391 rust_buffer.update(cx, |buffer, _| {
392 assert_eq!(
393 buffer.completion_triggers(),
394 &[".".to_string(), "::".to_string()]
395 );
396 });
397 toml_buffer.update(cx, |buffer, _| {
398 assert!(buffer.completion_triggers().is_empty());
399 });
400
401 // Edit a buffer. The changes are reported to the language server.
402 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
403 assert_eq!(
404 fake_rust_server
405 .receive_notification::<lsp::notification::DidChangeTextDocument>()
406 .await
407 .text_document,
408 lsp::VersionedTextDocumentIdentifier::new(
409 lsp::Uri::from_file_path("/the-root/test.rs")
410 .unwrap()
411 .into(),
412 1
413 )
414 );
415
416 // Open a third buffer with a different associated language server.
417 let json_buffer = project
418 .update(cx, |project, cx| {
419 project.open_local_buffer("/the-root/package.json", cx)
420 })
421 .await
422 .unwrap();
423
424 // A json language server is started up and is only notified about the json buffer.
425 let mut fake_json_server = fake_json_servers.next().await.unwrap();
426 assert_eq!(
427 fake_json_server
428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
429 .await
430 .text_document,
431 lsp::TextDocumentItem {
432 uri: lsp::Uri::from_file_path("/the-root/package.json")
433 .unwrap()
434 .into(),
435 version: 0,
436 text: "{\"a\": 1}".to_string(),
437 language_id: "json".to_string(),
438 }
439 );
440
441 // This buffer is configured based on the second language server's
442 // capabilities.
443 json_buffer.update(cx, |buffer, _| {
444 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
445 });
446
447 // When opening another buffer whose language server is already running,
448 // it is also configured based on the existing language server's capabilities.
449 let rust_buffer2 = project
450 .update(cx, |project, cx| {
451 project.open_local_buffer("/the-root/test2.rs", cx)
452 })
453 .await
454 .unwrap();
455 rust_buffer2.update(cx, |buffer, _| {
456 assert_eq!(
457 buffer.completion_triggers(),
458 &[".".to_string(), "::".to_string()]
459 );
460 });
461
462 // Changes are reported only to servers matching the buffer's language.
463 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
464 rust_buffer2.update(cx, |buffer, cx| {
465 buffer.edit([(0..0, "let x = 1;")], None, cx)
466 });
467 assert_eq!(
468 fake_rust_server
469 .receive_notification::<lsp::notification::DidChangeTextDocument>()
470 .await
471 .text_document,
472 lsp::VersionedTextDocumentIdentifier::new(
473 lsp::Uri::from_file_path("/the-root/test2.rs")
474 .unwrap()
475 .into(),
476 1
477 )
478 );
479
480 // Save notifications are reported to all servers.
481 project
482 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
483 .await
484 .unwrap();
485 assert_eq!(
486 fake_rust_server
487 .receive_notification::<lsp::notification::DidSaveTextDocument>()
488 .await
489 .text_document,
490 lsp::TextDocumentIdentifier::new(
491 lsp::Uri::from_file_path("/the-root/Cargo.toml")
492 .unwrap()
493 .into()
494 )
495 );
496 assert_eq!(
497 fake_json_server
498 .receive_notification::<lsp::notification::DidSaveTextDocument>()
499 .await
500 .text_document,
501 lsp::TextDocumentIdentifier::new(
502 lsp::Uri::from_file_path("/the-root/Cargo.toml")
503 .unwrap()
504 .into()
505 )
506 );
507
508 // Renames are reported only to servers matching the buffer's language.
509 fs.rename(
510 Path::new("/the-root/test2.rs"),
511 Path::new("/the-root/test3.rs"),
512 Default::default(),
513 )
514 .await
515 .unwrap();
516 assert_eq!(
517 fake_rust_server
518 .receive_notification::<lsp::notification::DidCloseTextDocument>()
519 .await
520 .text_document,
521 lsp::TextDocumentIdentifier::new(
522 lsp::Uri::from_file_path("/the-root/test2.rs")
523 .unwrap()
524 .into()
525 ),
526 );
527 assert_eq!(
528 fake_rust_server
529 .receive_notification::<lsp::notification::DidOpenTextDocument>()
530 .await
531 .text_document,
532 lsp::TextDocumentItem {
533 uri: lsp::Uri::from_file_path("/the-root/test3.rs")
534 .unwrap()
535 .into(),
536 version: 0,
537 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
538 language_id: "rust".to_string(),
539 },
540 );
541
542 rust_buffer2.update(cx, |buffer, cx| {
543 buffer.update_diagnostics(
544 LanguageServerId(0),
545 DiagnosticSet::from_sorted_entries(
546 vec![DiagnosticEntry {
547 diagnostic: Default::default(),
548 range: Anchor::MIN..Anchor::MAX,
549 }],
550 &buffer.snapshot(),
551 ),
552 cx,
553 );
554 assert_eq!(
555 buffer
556 .snapshot()
557 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
558 .count(),
559 1
560 );
561 });
562
563 // When the rename changes the extension of the file, the buffer gets closed on the old
564 // language server and gets opened on the new one.
565 fs.rename(
566 Path::new("/the-root/test3.rs"),
567 Path::new("/the-root/test3.json"),
568 Default::default(),
569 )
570 .await
571 .unwrap();
572 assert_eq!(
573 fake_rust_server
574 .receive_notification::<lsp::notification::DidCloseTextDocument>()
575 .await
576 .text_document,
577 lsp::TextDocumentIdentifier::new(
578 lsp::Uri::from_file_path("/the-root/test3.rs")
579 .unwrap()
580 .into(),
581 ),
582 );
583 assert_eq!(
584 fake_json_server
585 .receive_notification::<lsp::notification::DidOpenTextDocument>()
586 .await
587 .text_document,
588 lsp::TextDocumentItem {
589 uri: lsp::Uri::from_file_path("/the-root/test3.json")
590 .unwrap()
591 .into(),
592 version: 0,
593 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
594 language_id: "json".to_string(),
595 },
596 );
597
598 // We clear the diagnostics, since the language has changed.
599 rust_buffer2.update(cx, |buffer, _| {
600 assert_eq!(
601 buffer
602 .snapshot()
603 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
604 .count(),
605 0
606 );
607 });
608
609 // The renamed file's version resets after changing language server.
610 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
611 assert_eq!(
612 fake_json_server
613 .receive_notification::<lsp::notification::DidChangeTextDocument>()
614 .await
615 .text_document,
616 lsp::VersionedTextDocumentIdentifier::new(
617 lsp::Uri::from_file_path("/the-root/test3.json")
618 .unwrap()
619 .into(),
620 1
621 )
622 );
623
624 // Restart language servers
625 project.update(cx, |project, cx| {
626 project.restart_language_servers_for_buffers(
627 vec![rust_buffer.clone(), json_buffer.clone()],
628 cx,
629 );
630 });
631
632 let mut rust_shutdown_requests = fake_rust_server
633 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
634 let mut json_shutdown_requests = fake_json_server
635 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
636 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
637
638 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
639 let mut fake_json_server = fake_json_servers.next().await.unwrap();
640
641 // Ensure rust document is reopened in new rust language server
642 assert_eq!(
643 fake_rust_server
644 .receive_notification::<lsp::notification::DidOpenTextDocument>()
645 .await
646 .text_document,
647 lsp::TextDocumentItem {
648 uri: lsp::Uri::from_file_path("/the-root/test.rs")
649 .unwrap()
650 .into(),
651 version: 0,
652 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
653 language_id: "rust".to_string(),
654 }
655 );
656
657 // Ensure json documents are reopened in new json language server
658 assert_set_eq!(
659 [
660 fake_json_server
661 .receive_notification::<lsp::notification::DidOpenTextDocument>()
662 .await
663 .text_document,
664 fake_json_server
665 .receive_notification::<lsp::notification::DidOpenTextDocument>()
666 .await
667 .text_document,
668 ],
669 [
670 lsp::TextDocumentItem {
671 uri: lsp::Uri::from_file_path("/the-root/package.json")
672 .unwrap()
673 .into(),
674 version: 0,
675 text: json_buffer.update(cx, |buffer, _| buffer.text()),
676 language_id: "json".to_string(),
677 },
678 lsp::TextDocumentItem {
679 uri: lsp::Uri::from_file_path("/the-root/test3.json")
680 .unwrap()
681 .into(),
682 version: 0,
683 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
684 language_id: "json".to_string(),
685 }
686 ]
687 );
688
689 // Close notifications are reported only to servers matching the buffer's language.
690 cx.update(|_| drop(json_buffer));
691 let close_message = lsp::DidCloseTextDocumentParams {
692 text_document: lsp::TextDocumentIdentifier::new(
693 lsp::Uri::from_file_path("/the-root/package.json")
694 .unwrap()
695 .into(),
696 ),
697 };
698 assert_eq!(
699 fake_json_server
700 .receive_notification::<lsp::notification::DidCloseTextDocument>()
701 .await,
702 close_message,
703 );
704}
705
706#[gpui::test]
707async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
708 init_test(cx);
709
710 let fs = FakeFs::new(cx.executor());
711 fs.insert_tree(
712 "/the-root",
713 json!({
714 ".gitignore": "target\n",
715 "src": {
716 "a.rs": "",
717 "b.rs": "",
718 },
719 "target": {
720 "x": {
721 "out": {
722 "x.rs": ""
723 }
724 },
725 "y": {
726 "out": {
727 "y.rs": "",
728 }
729 },
730 "z": {
731 "out": {
732 "z.rs": ""
733 }
734 }
735 }
736 }),
737 )
738 .await;
739
740 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
741 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
742 language_registry.add(rust_lang());
743 let mut fake_servers = language_registry.register_fake_lsp_adapter(
744 "Rust",
745 FakeLspAdapter {
746 name: "the-language-server",
747 ..Default::default()
748 },
749 );
750
751 cx.executor().run_until_parked();
752
753 // Start the language server by opening a buffer with a compatible file extension.
754 let _buffer = project
755 .update(cx, |project, cx| {
756 project.open_local_buffer("/the-root/src/a.rs", cx)
757 })
758 .await
759 .unwrap();
760
761 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
762 project.update(cx, |project, cx| {
763 let worktree = project.worktrees().next().unwrap();
764 assert_eq!(
765 worktree
766 .read(cx)
767 .snapshot()
768 .entries(true)
769 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
770 .collect::<Vec<_>>(),
771 &[
772 (Path::new(""), false),
773 (Path::new(".gitignore"), false),
774 (Path::new("src"), false),
775 (Path::new("src/a.rs"), false),
776 (Path::new("src/b.rs"), false),
777 (Path::new("target"), true),
778 ]
779 );
780 });
781
782 let prev_read_dir_count = fs.read_dir_call_count();
783
784 // Keep track of the FS events reported to the language server.
785 let fake_server = fake_servers.next().await.unwrap();
786 let file_changes = Arc::new(Mutex::new(Vec::new()));
787 fake_server
788 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
789 registrations: vec![lsp::Registration {
790 id: Default::default(),
791 method: "workspace/didChangeWatchedFiles".to_string(),
792 register_options: serde_json::to_value(
793 lsp::DidChangeWatchedFilesRegistrationOptions {
794 watchers: vec![
795 lsp::FileSystemWatcher {
796 glob_pattern: lsp::GlobPattern::String(
797 "/the-root/Cargo.toml".to_string(),
798 ),
799 kind: None,
800 },
801 lsp::FileSystemWatcher {
802 glob_pattern: lsp::GlobPattern::String(
803 "/the-root/src/*.{rs,c}".to_string(),
804 ),
805 kind: None,
806 },
807 lsp::FileSystemWatcher {
808 glob_pattern: lsp::GlobPattern::String(
809 "/the-root/target/y/**/*.rs".to_string(),
810 ),
811 kind: None,
812 },
813 ],
814 },
815 )
816 .ok(),
817 }],
818 })
819 .await
820 .unwrap();
821 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
822 let file_changes = file_changes.clone();
823 move |params, _| {
824 let mut file_changes = file_changes.lock();
825 file_changes.extend(params.changes);
826 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
827 }
828 });
829
830 cx.executor().run_until_parked();
831 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
832 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
833
834 // Now the language server has asked us to watch an ignored directory path,
835 // so we recursively load it.
836 project.update(cx, |project, cx| {
837 let worktree = project.worktrees().next().unwrap();
838 assert_eq!(
839 worktree
840 .read(cx)
841 .snapshot()
842 .entries(true)
843 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
844 .collect::<Vec<_>>(),
845 &[
846 (Path::new(""), false),
847 (Path::new(".gitignore"), false),
848 (Path::new("src"), false),
849 (Path::new("src/a.rs"), false),
850 (Path::new("src/b.rs"), false),
851 (Path::new("target"), true),
852 (Path::new("target/x"), true),
853 (Path::new("target/y"), true),
854 (Path::new("target/y/out"), true),
855 (Path::new("target/y/out/y.rs"), true),
856 (Path::new("target/z"), true),
857 ]
858 );
859 });
860
861 // Perform some file system mutations, two of which match the watched patterns,
862 // and one of which does not.
863 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
864 .await
865 .unwrap();
866 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
867 .await
868 .unwrap();
869 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
870 .await
871 .unwrap();
872 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
873 .await
874 .unwrap();
875 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
876 .await
877 .unwrap();
878
879 // The language server receives events for the FS mutations that match its watch patterns.
880 cx.executor().run_until_parked();
881 assert_eq!(
882 &*file_changes.lock(),
883 &[
884 lsp::FileEvent {
885 uri: lsp::Uri::from_file_path("/the-root/src/b.rs")
886 .unwrap()
887 .into(),
888 typ: lsp::FileChangeType::DELETED,
889 },
890 lsp::FileEvent {
891 uri: lsp::Uri::from_file_path("/the-root/src/c.rs")
892 .unwrap()
893 .into(),
894 typ: lsp::FileChangeType::CREATED,
895 },
896 lsp::FileEvent {
897 uri: lsp::Uri::from_file_path("/the-root/target/y/out/y2.rs")
898 .unwrap()
899 .into(),
900 typ: lsp::FileChangeType::CREATED,
901 },
902 ]
903 );
904}
905
906#[gpui::test]
907async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
908 init_test(cx);
909
910 let fs = FakeFs::new(cx.executor());
911 fs.insert_tree(
912 "/dir",
913 json!({
914 "a.rs": "let a = 1;",
915 "b.rs": "let b = 2;"
916 }),
917 )
918 .await;
919
920 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
921
922 let buffer_a = project
923 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
924 .await
925 .unwrap();
926 let buffer_b = project
927 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
928 .await
929 .unwrap();
930
931 project.update(cx, |project, cx| {
932 project
933 .update_diagnostics(
934 LanguageServerId(0),
935 lsp::PublishDiagnosticsParams {
936 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
937 version: None,
938 diagnostics: vec![lsp::Diagnostic {
939 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
940 severity: Some(lsp::DiagnosticSeverity::ERROR),
941 message: "error 1".to_string(),
942 ..Default::default()
943 }],
944 },
945 &[],
946 cx,
947 )
948 .unwrap();
949 project
950 .update_diagnostics(
951 LanguageServerId(0),
952 lsp::PublishDiagnosticsParams {
953 uri: Uri::from_file_path("/dir/b.rs").unwrap().into(),
954 version: None,
955 diagnostics: vec![lsp::Diagnostic {
956 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
957 severity: Some(lsp::DiagnosticSeverity::WARNING),
958 message: "error 2".to_string(),
959 ..Default::default()
960 }],
961 },
962 &[],
963 cx,
964 )
965 .unwrap();
966 });
967
968 buffer_a.update(cx, |buffer, _| {
969 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
970 assert_eq!(
971 chunks
972 .iter()
973 .map(|(s, d)| (s.as_str(), *d))
974 .collect::<Vec<_>>(),
975 &[
976 ("let ", None),
977 ("a", Some(DiagnosticSeverity::ERROR)),
978 (" = 1;", None),
979 ]
980 );
981 });
982 buffer_b.update(cx, |buffer, _| {
983 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
984 assert_eq!(
985 chunks
986 .iter()
987 .map(|(s, d)| (s.as_str(), *d))
988 .collect::<Vec<_>>(),
989 &[
990 ("let ", None),
991 ("b", Some(DiagnosticSeverity::WARNING)),
992 (" = 2;", None),
993 ]
994 );
995 });
996}
997
998#[gpui::test]
999async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1000 init_test(cx);
1001
1002 let fs = FakeFs::new(cx.executor());
1003 fs.insert_tree(
1004 "/root",
1005 json!({
1006 "dir": {
1007 ".git": {
1008 "HEAD": "ref: refs/heads/main",
1009 },
1010 ".gitignore": "b.rs",
1011 "a.rs": "let a = 1;",
1012 "b.rs": "let b = 2;",
1013 },
1014 "other.rs": "let b = c;"
1015 }),
1016 )
1017 .await;
1018
1019 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1020 let (worktree, _) = project
1021 .update(cx, |project, cx| {
1022 project.find_or_create_local_worktree("/root/dir", true, cx)
1023 })
1024 .await
1025 .unwrap();
1026 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1027
1028 let (worktree, _) = project
1029 .update(cx, |project, cx| {
1030 project.find_or_create_local_worktree("/root/other.rs", false, cx)
1031 })
1032 .await
1033 .unwrap();
1034 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1035
1036 let server_id = LanguageServerId(0);
1037 project.update(cx, |project, cx| {
1038 project
1039 .update_diagnostics(
1040 server_id,
1041 lsp::PublishDiagnosticsParams {
1042 uri: Uri::from_file_path("/root/dir/b.rs").unwrap().into(),
1043 version: None,
1044 diagnostics: vec![lsp::Diagnostic {
1045 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1046 severity: Some(lsp::DiagnosticSeverity::ERROR),
1047 message: "unused variable 'b'".to_string(),
1048 ..Default::default()
1049 }],
1050 },
1051 &[],
1052 cx,
1053 )
1054 .unwrap();
1055 project
1056 .update_diagnostics(
1057 server_id,
1058 lsp::PublishDiagnosticsParams {
1059 uri: Uri::from_file_path("/root/other.rs").unwrap().into(),
1060 version: None,
1061 diagnostics: vec![lsp::Diagnostic {
1062 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1063 severity: Some(lsp::DiagnosticSeverity::ERROR),
1064 message: "unknown variable 'c'".to_string(),
1065 ..Default::default()
1066 }],
1067 },
1068 &[],
1069 cx,
1070 )
1071 .unwrap();
1072 });
1073
1074 let main_ignored_buffer = project
1075 .update(cx, |project, cx| {
1076 project.open_buffer((main_worktree_id, "b.rs"), cx)
1077 })
1078 .await
1079 .unwrap();
1080 main_ignored_buffer.update(cx, |buffer, _| {
1081 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1082 assert_eq!(
1083 chunks
1084 .iter()
1085 .map(|(s, d)| (s.as_str(), *d))
1086 .collect::<Vec<_>>(),
1087 &[
1088 ("let ", None),
1089 ("b", Some(DiagnosticSeverity::ERROR)),
1090 (" = 2;", None),
1091 ],
1092 "Gigitnored buffers should still get in-buffer diagnostics",
1093 );
1094 });
1095 let other_buffer = project
1096 .update(cx, |project, cx| {
1097 project.open_buffer((other_worktree_id, ""), cx)
1098 })
1099 .await
1100 .unwrap();
1101 other_buffer.update(cx, |buffer, _| {
1102 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1103 assert_eq!(
1104 chunks
1105 .iter()
1106 .map(|(s, d)| (s.as_str(), *d))
1107 .collect::<Vec<_>>(),
1108 &[
1109 ("let b = ", None),
1110 ("c", Some(DiagnosticSeverity::ERROR)),
1111 (";", None),
1112 ],
1113 "Buffers from hidden projects should still get in-buffer diagnostics"
1114 );
1115 });
1116
1117 project.update(cx, |project, cx| {
1118 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1119 assert_eq!(
1120 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1121 vec![(
1122 ProjectPath {
1123 worktree_id: main_worktree_id,
1124 path: Arc::from(Path::new("b.rs")),
1125 },
1126 server_id,
1127 DiagnosticSummary {
1128 error_count: 1,
1129 warning_count: 0,
1130 }
1131 )]
1132 );
1133 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1134 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1135 });
1136}
1137
1138#[gpui::test]
1139async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1140 init_test(cx);
1141
1142 let progress_token = "the-progress-token";
1143
1144 let fs = FakeFs::new(cx.executor());
1145 fs.insert_tree(
1146 "/dir",
1147 json!({
1148 "a.rs": "fn a() { A }",
1149 "b.rs": "const y: i32 = 1",
1150 }),
1151 )
1152 .await;
1153
1154 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1155 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1156
1157 language_registry.add(rust_lang());
1158 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1159 "Rust",
1160 FakeLspAdapter {
1161 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1162 disk_based_diagnostics_sources: vec!["disk".into()],
1163 ..Default::default()
1164 },
1165 );
1166
1167 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1168
1169 // Cause worktree to start the fake language server
1170 let _buffer = project
1171 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1172 .await
1173 .unwrap();
1174
1175 let mut events = cx.events(&project);
1176
1177 let fake_server = fake_servers.next().await.unwrap();
1178 assert_eq!(
1179 events.next().await.unwrap(),
1180 Event::LanguageServerAdded(LanguageServerId(0)),
1181 );
1182
1183 fake_server
1184 .start_progress(format!("{}/0", progress_token))
1185 .await;
1186 assert_eq!(
1187 events.next().await.unwrap(),
1188 Event::DiskBasedDiagnosticsStarted {
1189 language_server_id: LanguageServerId(0),
1190 }
1191 );
1192
1193 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1194 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1195 version: None,
1196 diagnostics: vec![lsp::Diagnostic {
1197 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1198 severity: Some(lsp::DiagnosticSeverity::ERROR),
1199 message: "undefined variable 'A'".to_string(),
1200 ..Default::default()
1201 }],
1202 });
1203 assert_eq!(
1204 events.next().await.unwrap(),
1205 Event::DiagnosticsUpdated {
1206 language_server_id: LanguageServerId(0),
1207 path: (worktree_id, Path::new("a.rs")).into()
1208 }
1209 );
1210
1211 fake_server.end_progress(format!("{}/0", progress_token));
1212 assert_eq!(
1213 events.next().await.unwrap(),
1214 Event::DiskBasedDiagnosticsFinished {
1215 language_server_id: LanguageServerId(0)
1216 }
1217 );
1218
1219 let buffer = project
1220 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1221 .await
1222 .unwrap();
1223
1224 buffer.update(cx, |buffer, _| {
1225 let snapshot = buffer.snapshot();
1226 let diagnostics = snapshot
1227 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1228 .collect::<Vec<_>>();
1229 assert_eq!(
1230 diagnostics,
1231 &[DiagnosticEntry {
1232 range: Point::new(0, 9)..Point::new(0, 10),
1233 diagnostic: Diagnostic {
1234 severity: lsp::DiagnosticSeverity::ERROR,
1235 message: "undefined variable 'A'".to_string(),
1236 group_id: 0,
1237 is_primary: true,
1238 ..Default::default()
1239 }
1240 }]
1241 )
1242 });
1243
1244 // Ensure publishing empty diagnostics twice only results in one update event.
1245 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1246 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1247 version: None,
1248 diagnostics: Default::default(),
1249 });
1250 assert_eq!(
1251 events.next().await.unwrap(),
1252 Event::DiagnosticsUpdated {
1253 language_server_id: LanguageServerId(0),
1254 path: (worktree_id, Path::new("a.rs")).into()
1255 }
1256 );
1257
1258 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1259 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1260 version: None,
1261 diagnostics: Default::default(),
1262 });
1263 cx.executor().run_until_parked();
1264 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1265}
1266
1267#[gpui::test]
1268async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1269 init_test(cx);
1270
1271 let progress_token = "the-progress-token";
1272
1273 let fs = FakeFs::new(cx.executor());
1274 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1275
1276 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1277
1278 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1279 language_registry.add(rust_lang());
1280 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1281 "Rust",
1282 FakeLspAdapter {
1283 name: "the-language-server",
1284 disk_based_diagnostics_sources: vec!["disk".into()],
1285 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1286 ..Default::default()
1287 },
1288 );
1289
1290 let buffer = project
1291 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1292 .await
1293 .unwrap();
1294
1295 // Simulate diagnostics starting to update.
1296 let fake_server = fake_servers.next().await.unwrap();
1297 fake_server.start_progress(progress_token).await;
1298
1299 // Restart the server before the diagnostics finish updating.
1300 project.update(cx, |project, cx| {
1301 project.restart_language_servers_for_buffers([buffer], cx);
1302 });
1303 let mut events = cx.events(&project);
1304
1305 // Simulate the newly started server sending more diagnostics.
1306 let fake_server = fake_servers.next().await.unwrap();
1307 assert_eq!(
1308 events.next().await.unwrap(),
1309 Event::LanguageServerAdded(LanguageServerId(1))
1310 );
1311 fake_server.start_progress(progress_token).await;
1312 assert_eq!(
1313 events.next().await.unwrap(),
1314 Event::DiskBasedDiagnosticsStarted {
1315 language_server_id: LanguageServerId(1)
1316 }
1317 );
1318 project.update(cx, |project, _| {
1319 assert_eq!(
1320 project
1321 .language_servers_running_disk_based_diagnostics()
1322 .collect::<Vec<_>>(),
1323 [LanguageServerId(1)]
1324 );
1325 });
1326
1327 // All diagnostics are considered done, despite the old server's diagnostic
1328 // task never completing.
1329 fake_server.end_progress(progress_token);
1330 assert_eq!(
1331 events.next().await.unwrap(),
1332 Event::DiskBasedDiagnosticsFinished {
1333 language_server_id: LanguageServerId(1)
1334 }
1335 );
1336 project.update(cx, |project, _| {
1337 assert_eq!(
1338 project
1339 .language_servers_running_disk_based_diagnostics()
1340 .collect::<Vec<_>>(),
1341 [] as [language::LanguageServerId; 0]
1342 );
1343 });
1344}
1345
1346#[gpui::test]
1347async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1348 init_test(cx);
1349
1350 let fs = FakeFs::new(cx.executor());
1351 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1352
1353 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1354
1355 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1356 language_registry.add(rust_lang());
1357 let mut fake_servers =
1358 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1359
1360 let buffer = project
1361 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1362 .await
1363 .unwrap();
1364
1365 // Publish diagnostics
1366 let fake_server = fake_servers.next().await.unwrap();
1367 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1368 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1369 version: None,
1370 diagnostics: vec![lsp::Diagnostic {
1371 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1372 severity: Some(lsp::DiagnosticSeverity::ERROR),
1373 message: "the message".to_string(),
1374 ..Default::default()
1375 }],
1376 });
1377
1378 cx.executor().run_until_parked();
1379 buffer.update(cx, |buffer, _| {
1380 assert_eq!(
1381 buffer
1382 .snapshot()
1383 .diagnostics_in_range::<_, usize>(0..1, false)
1384 .map(|entry| entry.diagnostic.message.clone())
1385 .collect::<Vec<_>>(),
1386 ["the message".to_string()]
1387 );
1388 });
1389 project.update(cx, |project, cx| {
1390 assert_eq!(
1391 project.diagnostic_summary(false, cx),
1392 DiagnosticSummary {
1393 error_count: 1,
1394 warning_count: 0,
1395 }
1396 );
1397 });
1398
1399 project.update(cx, |project, cx| {
1400 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1401 });
1402
1403 // The diagnostics are cleared.
1404 cx.executor().run_until_parked();
1405 buffer.update(cx, |buffer, _| {
1406 assert_eq!(
1407 buffer
1408 .snapshot()
1409 .diagnostics_in_range::<_, usize>(0..1, false)
1410 .map(|entry| entry.diagnostic.message.clone())
1411 .collect::<Vec<_>>(),
1412 Vec::<String>::new(),
1413 );
1414 });
1415 project.update(cx, |project, cx| {
1416 assert_eq!(
1417 project.diagnostic_summary(false, cx),
1418 DiagnosticSummary {
1419 error_count: 0,
1420 warning_count: 0,
1421 }
1422 );
1423 });
1424}
1425
1426#[gpui::test]
1427async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1428 init_test(cx);
1429
1430 let fs = FakeFs::new(cx.executor());
1431 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1432
1433 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1434 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1435
1436 language_registry.add(rust_lang());
1437 let mut fake_servers =
1438 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1439
1440 let buffer = project
1441 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1442 .await
1443 .unwrap();
1444
1445 // Before restarting the server, report diagnostics with an unknown buffer version.
1446 let fake_server = fake_servers.next().await.unwrap();
1447 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1448 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1449 version: Some(10000),
1450 diagnostics: Vec::new(),
1451 });
1452 cx.executor().run_until_parked();
1453
1454 project.update(cx, |project, cx| {
1455 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1456 });
1457 let mut fake_server = fake_servers.next().await.unwrap();
1458 let notification = fake_server
1459 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1460 .await
1461 .text_document;
1462 assert_eq!(notification.version, 0);
1463}
1464
1465#[gpui::test]
1466async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1467 init_test(cx);
1468
1469 let fs = FakeFs::new(cx.executor());
1470 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1471 .await;
1472
1473 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1474 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1475
1476 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1477 "Rust",
1478 FakeLspAdapter {
1479 name: "rust-lsp",
1480 ..Default::default()
1481 },
1482 );
1483 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1484 "JavaScript",
1485 FakeLspAdapter {
1486 name: "js-lsp",
1487 ..Default::default()
1488 },
1489 );
1490 language_registry.add(rust_lang());
1491 language_registry.add(js_lang());
1492
1493 let _rs_buffer = project
1494 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1495 .await
1496 .unwrap();
1497 let _js_buffer = project
1498 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1499 .await
1500 .unwrap();
1501
1502 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1503 assert_eq!(
1504 fake_rust_server_1
1505 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1506 .await
1507 .text_document
1508 .uri
1509 .as_str(),
1510 "file:///dir/a.rs"
1511 );
1512
1513 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1514 assert_eq!(
1515 fake_js_server
1516 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1517 .await
1518 .text_document
1519 .uri
1520 .as_str(),
1521 "file:///dir/b.js"
1522 );
1523
1524 // Disable Rust language server, ensuring only that server gets stopped.
1525 cx.update(|cx| {
1526 SettingsStore::update_global(cx, |settings, cx| {
1527 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1528 settings.languages.insert(
1529 Arc::from("Rust"),
1530 LanguageSettingsContent {
1531 enable_language_server: Some(false),
1532 ..Default::default()
1533 },
1534 );
1535 });
1536 })
1537 });
1538 fake_rust_server_1
1539 .receive_notification::<lsp::notification::Exit>()
1540 .await;
1541
1542 // Enable Rust and disable JavaScript language servers, ensuring that the
1543 // former gets started again and that the latter stops.
1544 cx.update(|cx| {
1545 SettingsStore::update_global(cx, |settings, cx| {
1546 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1547 settings.languages.insert(
1548 Arc::from("Rust"),
1549 LanguageSettingsContent {
1550 enable_language_server: Some(true),
1551 ..Default::default()
1552 },
1553 );
1554 settings.languages.insert(
1555 Arc::from("JavaScript"),
1556 LanguageSettingsContent {
1557 enable_language_server: Some(false),
1558 ..Default::default()
1559 },
1560 );
1561 });
1562 })
1563 });
1564 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1565 assert_eq!(
1566 fake_rust_server_2
1567 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1568 .await
1569 .text_document
1570 .uri
1571 .as_str(),
1572 "file:///dir/a.rs"
1573 );
1574 fake_js_server
1575 .receive_notification::<lsp::notification::Exit>()
1576 .await;
1577}
1578
1579#[gpui::test(iterations = 3)]
1580async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1581 init_test(cx);
1582
1583 let text = "
1584 fn a() { A }
1585 fn b() { BB }
1586 fn c() { CCC }
1587 "
1588 .unindent();
1589
1590 let fs = FakeFs::new(cx.executor());
1591 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1592
1593 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1594 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1595
1596 language_registry.add(rust_lang());
1597 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1598 "Rust",
1599 FakeLspAdapter {
1600 disk_based_diagnostics_sources: vec!["disk".into()],
1601 ..Default::default()
1602 },
1603 );
1604
1605 let buffer = project
1606 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1607 .await
1608 .unwrap();
1609
1610 let mut fake_server = fake_servers.next().await.unwrap();
1611 let open_notification = fake_server
1612 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1613 .await;
1614
1615 // Edit the buffer, moving the content down
1616 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1617 let change_notification_1 = fake_server
1618 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1619 .await;
1620 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1621
1622 // Report some diagnostics for the initial version of the buffer
1623 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1624 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1625 version: Some(open_notification.text_document.version),
1626 diagnostics: vec![
1627 lsp::Diagnostic {
1628 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1629 severity: Some(DiagnosticSeverity::ERROR),
1630 message: "undefined variable 'A'".to_string(),
1631 source: Some("disk".to_string()),
1632 ..Default::default()
1633 },
1634 lsp::Diagnostic {
1635 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1636 severity: Some(DiagnosticSeverity::ERROR),
1637 message: "undefined variable 'BB'".to_string(),
1638 source: Some("disk".to_string()),
1639 ..Default::default()
1640 },
1641 lsp::Diagnostic {
1642 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1643 severity: Some(DiagnosticSeverity::ERROR),
1644 source: Some("disk".to_string()),
1645 message: "undefined variable 'CCC'".to_string(),
1646 ..Default::default()
1647 },
1648 ],
1649 });
1650
1651 // The diagnostics have moved down since they were created.
1652 cx.executor().run_until_parked();
1653 buffer.update(cx, |buffer, _| {
1654 assert_eq!(
1655 buffer
1656 .snapshot()
1657 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1658 .collect::<Vec<_>>(),
1659 &[
1660 DiagnosticEntry {
1661 range: Point::new(3, 9)..Point::new(3, 11),
1662 diagnostic: Diagnostic {
1663 source: Some("disk".into()),
1664 severity: DiagnosticSeverity::ERROR,
1665 message: "undefined variable 'BB'".to_string(),
1666 is_disk_based: true,
1667 group_id: 1,
1668 is_primary: true,
1669 ..Default::default()
1670 },
1671 },
1672 DiagnosticEntry {
1673 range: Point::new(4, 9)..Point::new(4, 12),
1674 diagnostic: Diagnostic {
1675 source: Some("disk".into()),
1676 severity: DiagnosticSeverity::ERROR,
1677 message: "undefined variable 'CCC'".to_string(),
1678 is_disk_based: true,
1679 group_id: 2,
1680 is_primary: true,
1681 ..Default::default()
1682 }
1683 }
1684 ]
1685 );
1686 assert_eq!(
1687 chunks_with_diagnostics(buffer, 0..buffer.len()),
1688 [
1689 ("\n\nfn a() { ".to_string(), None),
1690 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1691 (" }\nfn b() { ".to_string(), None),
1692 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1693 (" }\nfn c() { ".to_string(), None),
1694 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1695 (" }\n".to_string(), None),
1696 ]
1697 );
1698 assert_eq!(
1699 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1700 [
1701 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1702 (" }\nfn c() { ".to_string(), None),
1703 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1704 ]
1705 );
1706 });
1707
1708 // Ensure overlapping diagnostics are highlighted correctly.
1709 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1710 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1711 version: Some(open_notification.text_document.version),
1712 diagnostics: vec![
1713 lsp::Diagnostic {
1714 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1715 severity: Some(DiagnosticSeverity::ERROR),
1716 message: "undefined variable 'A'".to_string(),
1717 source: Some("disk".to_string()),
1718 ..Default::default()
1719 },
1720 lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1722 severity: Some(DiagnosticSeverity::WARNING),
1723 message: "unreachable statement".to_string(),
1724 source: Some("disk".to_string()),
1725 ..Default::default()
1726 },
1727 ],
1728 });
1729
1730 cx.executor().run_until_parked();
1731 buffer.update(cx, |buffer, _| {
1732 assert_eq!(
1733 buffer
1734 .snapshot()
1735 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1736 .collect::<Vec<_>>(),
1737 &[
1738 DiagnosticEntry {
1739 range: Point::new(2, 9)..Point::new(2, 12),
1740 diagnostic: Diagnostic {
1741 source: Some("disk".into()),
1742 severity: DiagnosticSeverity::WARNING,
1743 message: "unreachable statement".to_string(),
1744 is_disk_based: true,
1745 group_id: 4,
1746 is_primary: true,
1747 ..Default::default()
1748 }
1749 },
1750 DiagnosticEntry {
1751 range: Point::new(2, 9)..Point::new(2, 10),
1752 diagnostic: Diagnostic {
1753 source: Some("disk".into()),
1754 severity: DiagnosticSeverity::ERROR,
1755 message: "undefined variable 'A'".to_string(),
1756 is_disk_based: true,
1757 group_id: 3,
1758 is_primary: true,
1759 ..Default::default()
1760 },
1761 }
1762 ]
1763 );
1764 assert_eq!(
1765 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1766 [
1767 ("fn a() { ".to_string(), None),
1768 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1769 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1770 ("\n".to_string(), None),
1771 ]
1772 );
1773 assert_eq!(
1774 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1775 [
1776 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1777 ("\n".to_string(), None),
1778 ]
1779 );
1780 });
1781
1782 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1783 // changes since the last save.
1784 buffer.update(cx, |buffer, cx| {
1785 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1786 buffer.edit(
1787 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1788 None,
1789 cx,
1790 );
1791 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1792 });
1793 let change_notification_2 = fake_server
1794 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1795 .await;
1796 assert!(
1797 change_notification_2.text_document.version > change_notification_1.text_document.version
1798 );
1799
1800 // Handle out-of-order diagnostics
1801 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1802 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1803 version: Some(change_notification_2.text_document.version),
1804 diagnostics: vec![
1805 lsp::Diagnostic {
1806 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1807 severity: Some(DiagnosticSeverity::ERROR),
1808 message: "undefined variable 'BB'".to_string(),
1809 source: Some("disk".to_string()),
1810 ..Default::default()
1811 },
1812 lsp::Diagnostic {
1813 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1814 severity: Some(DiagnosticSeverity::WARNING),
1815 message: "undefined variable 'A'".to_string(),
1816 source: Some("disk".to_string()),
1817 ..Default::default()
1818 },
1819 ],
1820 });
1821
1822 cx.executor().run_until_parked();
1823 buffer.update(cx, |buffer, _| {
1824 assert_eq!(
1825 buffer
1826 .snapshot()
1827 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1828 .collect::<Vec<_>>(),
1829 &[
1830 DiagnosticEntry {
1831 range: Point::new(2, 21)..Point::new(2, 22),
1832 diagnostic: Diagnostic {
1833 source: Some("disk".into()),
1834 severity: DiagnosticSeverity::WARNING,
1835 message: "undefined variable 'A'".to_string(),
1836 is_disk_based: true,
1837 group_id: 6,
1838 is_primary: true,
1839 ..Default::default()
1840 }
1841 },
1842 DiagnosticEntry {
1843 range: Point::new(3, 9)..Point::new(3, 14),
1844 diagnostic: Diagnostic {
1845 source: Some("disk".into()),
1846 severity: DiagnosticSeverity::ERROR,
1847 message: "undefined variable 'BB'".to_string(),
1848 is_disk_based: true,
1849 group_id: 5,
1850 is_primary: true,
1851 ..Default::default()
1852 },
1853 }
1854 ]
1855 );
1856 });
1857}
1858
1859#[gpui::test]
1860async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1861 init_test(cx);
1862
1863 let text = concat!(
1864 "let one = ;\n", //
1865 "let two = \n",
1866 "let three = 3;\n",
1867 );
1868
1869 let fs = FakeFs::new(cx.executor());
1870 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1871
1872 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1873 let buffer = project
1874 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1875 .await
1876 .unwrap();
1877
1878 project.update(cx, |project, cx| {
1879 project
1880 .update_buffer_diagnostics(
1881 &buffer,
1882 LanguageServerId(0),
1883 None,
1884 vec![
1885 DiagnosticEntry {
1886 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1887 diagnostic: Diagnostic {
1888 severity: DiagnosticSeverity::ERROR,
1889 message: "syntax error 1".to_string(),
1890 ..Default::default()
1891 },
1892 },
1893 DiagnosticEntry {
1894 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1895 diagnostic: Diagnostic {
1896 severity: DiagnosticSeverity::ERROR,
1897 message: "syntax error 2".to_string(),
1898 ..Default::default()
1899 },
1900 },
1901 ],
1902 cx,
1903 )
1904 .unwrap();
1905 });
1906
1907 // An empty range is extended forward to include the following character.
1908 // At the end of a line, an empty range is extended backward to include
1909 // the preceding character.
1910 buffer.update(cx, |buffer, _| {
1911 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1912 assert_eq!(
1913 chunks
1914 .iter()
1915 .map(|(s, d)| (s.as_str(), *d))
1916 .collect::<Vec<_>>(),
1917 &[
1918 ("let one = ", None),
1919 (";", Some(DiagnosticSeverity::ERROR)),
1920 ("\nlet two =", None),
1921 (" ", Some(DiagnosticSeverity::ERROR)),
1922 ("\nlet three = 3;\n", None)
1923 ]
1924 );
1925 });
1926}
1927
1928#[gpui::test]
1929async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1930 init_test(cx);
1931
1932 let fs = FakeFs::new(cx.executor());
1933 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1934 .await;
1935
1936 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1937
1938 project.update(cx, |project, cx| {
1939 project
1940 .update_diagnostic_entries(
1941 LanguageServerId(0),
1942 Path::new("/dir/a.rs").to_owned(),
1943 None,
1944 vec![DiagnosticEntry {
1945 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1946 diagnostic: Diagnostic {
1947 severity: DiagnosticSeverity::ERROR,
1948 is_primary: true,
1949 message: "syntax error a1".to_string(),
1950 ..Default::default()
1951 },
1952 }],
1953 cx,
1954 )
1955 .unwrap();
1956 project
1957 .update_diagnostic_entries(
1958 LanguageServerId(1),
1959 Path::new("/dir/a.rs").to_owned(),
1960 None,
1961 vec![DiagnosticEntry {
1962 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
1963 diagnostic: Diagnostic {
1964 severity: DiagnosticSeverity::ERROR,
1965 is_primary: true,
1966 message: "syntax error b1".to_string(),
1967 ..Default::default()
1968 },
1969 }],
1970 cx,
1971 )
1972 .unwrap();
1973
1974 assert_eq!(
1975 project.diagnostic_summary(false, cx),
1976 DiagnosticSummary {
1977 error_count: 2,
1978 warning_count: 0,
1979 }
1980 );
1981 });
1982}
1983
1984#[gpui::test]
1985async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
1986 init_test(cx);
1987
1988 let text = "
1989 fn a() {
1990 f1();
1991 }
1992 fn b() {
1993 f2();
1994 }
1995 fn c() {
1996 f3();
1997 }
1998 "
1999 .unindent();
2000
2001 let fs = FakeFs::new(cx.executor());
2002 fs.insert_tree(
2003 "/dir",
2004 json!({
2005 "a.rs": text.clone(),
2006 }),
2007 )
2008 .await;
2009
2010 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2011
2012 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2013 language_registry.add(rust_lang());
2014 let mut fake_servers =
2015 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2016
2017 let buffer = project
2018 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2019 .await
2020 .unwrap();
2021
2022 let mut fake_server = fake_servers.next().await.unwrap();
2023 let lsp_document_version = fake_server
2024 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2025 .await
2026 .text_document
2027 .version;
2028
2029 // Simulate editing the buffer after the language server computes some edits.
2030 buffer.update(cx, |buffer, cx| {
2031 buffer.edit(
2032 [(
2033 Point::new(0, 0)..Point::new(0, 0),
2034 "// above first function\n",
2035 )],
2036 None,
2037 cx,
2038 );
2039 buffer.edit(
2040 [(
2041 Point::new(2, 0)..Point::new(2, 0),
2042 " // inside first function\n",
2043 )],
2044 None,
2045 cx,
2046 );
2047 buffer.edit(
2048 [(
2049 Point::new(6, 4)..Point::new(6, 4),
2050 "// inside second function ",
2051 )],
2052 None,
2053 cx,
2054 );
2055
2056 assert_eq!(
2057 buffer.text(),
2058 "
2059 // above first function
2060 fn a() {
2061 // inside first function
2062 f1();
2063 }
2064 fn b() {
2065 // inside second function f2();
2066 }
2067 fn c() {
2068 f3();
2069 }
2070 "
2071 .unindent()
2072 );
2073 });
2074
2075 let edits = project
2076 .update(cx, |project, cx| {
2077 project.edits_from_lsp(
2078 &buffer,
2079 vec![
2080 // replace body of first function
2081 lsp::TextEdit {
2082 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2083 new_text: "
2084 fn a() {
2085 f10();
2086 }
2087 "
2088 .unindent(),
2089 },
2090 // edit inside second function
2091 lsp::TextEdit {
2092 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2093 new_text: "00".into(),
2094 },
2095 // edit inside third function via two distinct edits
2096 lsp::TextEdit {
2097 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2098 new_text: "4000".into(),
2099 },
2100 lsp::TextEdit {
2101 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2102 new_text: "".into(),
2103 },
2104 ],
2105 LanguageServerId(0),
2106 Some(lsp_document_version),
2107 cx,
2108 )
2109 })
2110 .await
2111 .unwrap();
2112
2113 buffer.update(cx, |buffer, cx| {
2114 for (range, new_text) in edits {
2115 buffer.edit([(range, new_text)], None, cx);
2116 }
2117 assert_eq!(
2118 buffer.text(),
2119 "
2120 // above first function
2121 fn a() {
2122 // inside first function
2123 f10();
2124 }
2125 fn b() {
2126 // inside second function f200();
2127 }
2128 fn c() {
2129 f4000();
2130 }
2131 "
2132 .unindent()
2133 );
2134 });
2135}
2136
2137#[gpui::test]
2138async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2139 init_test(cx);
2140
2141 let text = "
2142 use a::b;
2143 use a::c;
2144
2145 fn f() {
2146 b();
2147 c();
2148 }
2149 "
2150 .unindent();
2151
2152 let fs = FakeFs::new(cx.executor());
2153 fs.insert_tree(
2154 "/dir",
2155 json!({
2156 "a.rs": text.clone(),
2157 }),
2158 )
2159 .await;
2160
2161 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2162 let buffer = project
2163 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2164 .await
2165 .unwrap();
2166
2167 // Simulate the language server sending us a small edit in the form of a very large diff.
2168 // Rust-analyzer does this when performing a merge-imports code action.
2169 let edits = project
2170 .update(cx, |project, cx| {
2171 project.edits_from_lsp(
2172 &buffer,
2173 [
2174 // Replace the first use statement without editing the semicolon.
2175 lsp::TextEdit {
2176 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2177 new_text: "a::{b, c}".into(),
2178 },
2179 // Reinsert the remainder of the file between the semicolon and the final
2180 // newline of the file.
2181 lsp::TextEdit {
2182 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2183 new_text: "\n\n".into(),
2184 },
2185 lsp::TextEdit {
2186 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2187 new_text: "
2188 fn f() {
2189 b();
2190 c();
2191 }"
2192 .unindent(),
2193 },
2194 // Delete everything after the first newline of the file.
2195 lsp::TextEdit {
2196 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2197 new_text: "".into(),
2198 },
2199 ],
2200 LanguageServerId(0),
2201 None,
2202 cx,
2203 )
2204 })
2205 .await
2206 .unwrap();
2207
2208 buffer.update(cx, |buffer, cx| {
2209 let edits = edits
2210 .into_iter()
2211 .map(|(range, text)| {
2212 (
2213 range.start.to_point(buffer)..range.end.to_point(buffer),
2214 text,
2215 )
2216 })
2217 .collect::<Vec<_>>();
2218
2219 assert_eq!(
2220 edits,
2221 [
2222 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2223 (Point::new(1, 0)..Point::new(2, 0), "".into())
2224 ]
2225 );
2226
2227 for (range, new_text) in edits {
2228 buffer.edit([(range, new_text)], None, cx);
2229 }
2230 assert_eq!(
2231 buffer.text(),
2232 "
2233 use a::{b, c};
2234
2235 fn f() {
2236 b();
2237 c();
2238 }
2239 "
2240 .unindent()
2241 );
2242 });
2243}
2244
2245#[gpui::test]
2246async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2247 init_test(cx);
2248
2249 let text = "
2250 use a::b;
2251 use a::c;
2252
2253 fn f() {
2254 b();
2255 c();
2256 }
2257 "
2258 .unindent();
2259
2260 let fs = FakeFs::new(cx.executor());
2261 fs.insert_tree(
2262 "/dir",
2263 json!({
2264 "a.rs": text.clone(),
2265 }),
2266 )
2267 .await;
2268
2269 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2270 let buffer = project
2271 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2272 .await
2273 .unwrap();
2274
2275 // Simulate the language server sending us edits in a non-ordered fashion,
2276 // with ranges sometimes being inverted or pointing to invalid locations.
2277 let edits = project
2278 .update(cx, |project, cx| {
2279 project.edits_from_lsp(
2280 &buffer,
2281 [
2282 lsp::TextEdit {
2283 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2284 new_text: "\n\n".into(),
2285 },
2286 lsp::TextEdit {
2287 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2288 new_text: "a::{b, c}".into(),
2289 },
2290 lsp::TextEdit {
2291 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2292 new_text: "".into(),
2293 },
2294 lsp::TextEdit {
2295 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2296 new_text: "
2297 fn f() {
2298 b();
2299 c();
2300 }"
2301 .unindent(),
2302 },
2303 ],
2304 LanguageServerId(0),
2305 None,
2306 cx,
2307 )
2308 })
2309 .await
2310 .unwrap();
2311
2312 buffer.update(cx, |buffer, cx| {
2313 let edits = edits
2314 .into_iter()
2315 .map(|(range, text)| {
2316 (
2317 range.start.to_point(buffer)..range.end.to_point(buffer),
2318 text,
2319 )
2320 })
2321 .collect::<Vec<_>>();
2322
2323 assert_eq!(
2324 edits,
2325 [
2326 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2327 (Point::new(1, 0)..Point::new(2, 0), "".into())
2328 ]
2329 );
2330
2331 for (range, new_text) in edits {
2332 buffer.edit([(range, new_text)], None, cx);
2333 }
2334 assert_eq!(
2335 buffer.text(),
2336 "
2337 use a::{b, c};
2338
2339 fn f() {
2340 b();
2341 c();
2342 }
2343 "
2344 .unindent()
2345 );
2346 });
2347}
2348
2349fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2350 buffer: &Buffer,
2351 range: Range<T>,
2352) -> Vec<(String, Option<DiagnosticSeverity>)> {
2353 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2354 for chunk in buffer.snapshot().chunks(range, true) {
2355 if chunks.last().map_or(false, |prev_chunk| {
2356 prev_chunk.1 == chunk.diagnostic_severity
2357 }) {
2358 chunks.last_mut().unwrap().0.push_str(chunk.text);
2359 } else {
2360 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2361 }
2362 }
2363 chunks
2364}
2365
2366#[gpui::test(iterations = 10)]
2367async fn test_definition(cx: &mut gpui::TestAppContext) {
2368 init_test(cx);
2369
2370 let fs = FakeFs::new(cx.executor());
2371 fs.insert_tree(
2372 "/dir",
2373 json!({
2374 "a.rs": "const fn a() { A }",
2375 "b.rs": "const y: i32 = crate::a()",
2376 }),
2377 )
2378 .await;
2379
2380 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2381
2382 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2383 language_registry.add(rust_lang());
2384 let mut fake_servers =
2385 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2386
2387 let buffer = project
2388 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2389 .await
2390 .unwrap();
2391
2392 let fake_server = fake_servers.next().await.unwrap();
2393 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2394 let params = params.text_document_position_params;
2395 assert_eq!(
2396 Uri::from(params.text_document.uri).to_file_path().unwrap(),
2397 Path::new("/dir/b.rs"),
2398 );
2399 assert_eq!(params.position, lsp::Position::new(0, 22));
2400
2401 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2402 lsp::Location::new(
2403 lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
2404 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2405 ),
2406 )))
2407 });
2408
2409 let mut definitions = project
2410 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2411 .await
2412 .unwrap();
2413
2414 // Assert no new language server started
2415 cx.executor().run_until_parked();
2416 assert!(fake_servers.try_next().is_err());
2417
2418 assert_eq!(definitions.len(), 1);
2419 let definition = definitions.pop().unwrap();
2420 cx.update(|cx| {
2421 let target_buffer = definition.target.buffer.read(cx);
2422 assert_eq!(
2423 target_buffer
2424 .file()
2425 .unwrap()
2426 .as_local()
2427 .unwrap()
2428 .abs_path(cx),
2429 Path::new("/dir/a.rs"),
2430 );
2431 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2432 assert_eq!(
2433 list_worktrees(&project, cx),
2434 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2435 );
2436
2437 drop(definition);
2438 });
2439 cx.update(|cx| {
2440 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2441 });
2442
2443 fn list_worktrees<'a>(
2444 project: &'a Model<Project>,
2445 cx: &'a AppContext,
2446 ) -> Vec<(&'a Path, bool)> {
2447 project
2448 .read(cx)
2449 .worktrees()
2450 .map(|worktree| {
2451 let worktree = worktree.read(cx);
2452 (
2453 worktree.as_local().unwrap().abs_path().as_ref(),
2454 worktree.is_visible(),
2455 )
2456 })
2457 .collect::<Vec<_>>()
2458 }
2459}
2460
2461#[gpui::test]
2462async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2463 init_test(cx);
2464
2465 let fs = FakeFs::new(cx.executor());
2466 fs.insert_tree(
2467 "/dir",
2468 json!({
2469 "a.ts": "",
2470 }),
2471 )
2472 .await;
2473
2474 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2475
2476 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2477 language_registry.add(typescript_lang());
2478 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2479 "TypeScript",
2480 FakeLspAdapter {
2481 capabilities: lsp::ServerCapabilities {
2482 completion_provider: Some(lsp::CompletionOptions {
2483 trigger_characters: Some(vec![":".to_string()]),
2484 ..Default::default()
2485 }),
2486 ..Default::default()
2487 },
2488 ..Default::default()
2489 },
2490 );
2491
2492 let buffer = project
2493 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2494 .await
2495 .unwrap();
2496
2497 let fake_server = fake_language_servers.next().await.unwrap();
2498
2499 let text = "let a = b.fqn";
2500 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2501 let completions = project.update(cx, |project, cx| {
2502 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2503 });
2504
2505 fake_server
2506 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2507 Ok(Some(lsp::CompletionResponse::Array(vec![
2508 lsp::CompletionItem {
2509 label: "fullyQualifiedName?".into(),
2510 insert_text: Some("fullyQualifiedName".into()),
2511 ..Default::default()
2512 },
2513 ])))
2514 })
2515 .next()
2516 .await;
2517 let completions = completions.await.unwrap();
2518 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2519 assert_eq!(completions.len(), 1);
2520 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2521 assert_eq!(
2522 completions[0].old_range.to_offset(&snapshot),
2523 text.len() - 3..text.len()
2524 );
2525
2526 let text = "let a = \"atoms/cmp\"";
2527 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2528 let completions = project.update(cx, |project, cx| {
2529 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2530 });
2531
2532 fake_server
2533 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2534 Ok(Some(lsp::CompletionResponse::Array(vec![
2535 lsp::CompletionItem {
2536 label: "component".into(),
2537 ..Default::default()
2538 },
2539 ])))
2540 })
2541 .next()
2542 .await;
2543 let completions = completions.await.unwrap();
2544 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2545 assert_eq!(completions.len(), 1);
2546 assert_eq!(completions[0].new_text, "component");
2547 assert_eq!(
2548 completions[0].old_range.to_offset(&snapshot),
2549 text.len() - 4..text.len() - 1
2550 );
2551}
2552
2553#[gpui::test]
2554async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2555 init_test(cx);
2556
2557 let fs = FakeFs::new(cx.executor());
2558 fs.insert_tree(
2559 "/dir",
2560 json!({
2561 "a.ts": "",
2562 }),
2563 )
2564 .await;
2565
2566 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2567
2568 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2569 language_registry.add(typescript_lang());
2570 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2571 "TypeScript",
2572 FakeLspAdapter {
2573 capabilities: lsp::ServerCapabilities {
2574 completion_provider: Some(lsp::CompletionOptions {
2575 trigger_characters: Some(vec![":".to_string()]),
2576 ..Default::default()
2577 }),
2578 ..Default::default()
2579 },
2580 ..Default::default()
2581 },
2582 );
2583
2584 let buffer = project
2585 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2586 .await
2587 .unwrap();
2588
2589 let fake_server = fake_language_servers.next().await.unwrap();
2590
2591 let text = "let a = b.fqn";
2592 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2593 let completions = project.update(cx, |project, cx| {
2594 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2595 });
2596
2597 fake_server
2598 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2599 Ok(Some(lsp::CompletionResponse::Array(vec![
2600 lsp::CompletionItem {
2601 label: "fullyQualifiedName?".into(),
2602 insert_text: Some("fully\rQualified\r\nName".into()),
2603 ..Default::default()
2604 },
2605 ])))
2606 })
2607 .next()
2608 .await;
2609 let completions = completions.await.unwrap();
2610 assert_eq!(completions.len(), 1);
2611 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2612}
2613
2614#[gpui::test(iterations = 10)]
2615async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2616 init_test(cx);
2617
2618 let fs = FakeFs::new(cx.executor());
2619 fs.insert_tree(
2620 "/dir",
2621 json!({
2622 "a.ts": "a",
2623 }),
2624 )
2625 .await;
2626
2627 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2628
2629 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2630 language_registry.add(typescript_lang());
2631 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2632 "TypeScript",
2633 FakeLspAdapter {
2634 capabilities: lsp::ServerCapabilities {
2635 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2636 lsp::CodeActionOptions {
2637 resolve_provider: Some(true),
2638 ..lsp::CodeActionOptions::default()
2639 },
2640 )),
2641 ..lsp::ServerCapabilities::default()
2642 },
2643 ..FakeLspAdapter::default()
2644 },
2645 );
2646
2647 let buffer = project
2648 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2649 .await
2650 .unwrap();
2651
2652 let fake_server = fake_language_servers.next().await.unwrap();
2653
2654 // Language server returns code actions that contain commands, and not edits.
2655 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2656 fake_server
2657 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2658 Ok(Some(vec![
2659 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2660 title: "The code action".into(),
2661 data: Some(serde_json::json!({
2662 "command": "_the/command",
2663 })),
2664 ..lsp::CodeAction::default()
2665 }),
2666 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2667 title: "two".into(),
2668 ..lsp::CodeAction::default()
2669 }),
2670 ]))
2671 })
2672 .next()
2673 .await;
2674
2675 let action = actions.await[0].clone();
2676 let apply = project.update(cx, |project, cx| {
2677 project.apply_code_action(buffer.clone(), action, true, cx)
2678 });
2679
2680 // Resolving the code action does not populate its edits. In absence of
2681 // edits, we must execute the given command.
2682 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2683 |mut action, _| async move {
2684 if action.data.is_some() {
2685 action.command = Some(lsp::Command {
2686 title: "The command".into(),
2687 command: "_the/command".into(),
2688 arguments: Some(vec![json!("the-argument")]),
2689 });
2690 }
2691 Ok(action)
2692 },
2693 );
2694
2695 // While executing the command, the language server sends the editor
2696 // a `workspaceEdit` request.
2697 fake_server
2698 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2699 let fake = fake_server.clone();
2700 move |params, _| {
2701 assert_eq!(params.command, "_the/command");
2702 let fake = fake.clone();
2703 async move {
2704 fake.server
2705 .request::<lsp::request::ApplyWorkspaceEdit>(
2706 lsp::ApplyWorkspaceEditParams {
2707 label: None,
2708 edit: lsp::WorkspaceEdit {
2709 changes: Some(
2710 [(
2711 lsp::Uri::from_file_path("/dir/a.ts").unwrap().into(),
2712 vec![lsp::TextEdit {
2713 range: lsp::Range::new(
2714 lsp::Position::new(0, 0),
2715 lsp::Position::new(0, 0),
2716 ),
2717 new_text: "X".into(),
2718 }],
2719 )]
2720 .into_iter()
2721 .collect(),
2722 ),
2723 ..Default::default()
2724 },
2725 },
2726 )
2727 .await
2728 .unwrap();
2729 Ok(Some(json!(null)))
2730 }
2731 }
2732 })
2733 .next()
2734 .await;
2735
2736 // Applying the code action returns a project transaction containing the edits
2737 // sent by the language server in its `workspaceEdit` request.
2738 let transaction = apply.await.unwrap();
2739 assert!(transaction.0.contains_key(&buffer));
2740 buffer.update(cx, |buffer, cx| {
2741 assert_eq!(buffer.text(), "Xa");
2742 buffer.undo(cx);
2743 assert_eq!(buffer.text(), "a");
2744 });
2745}
2746
2747#[gpui::test(iterations = 10)]
2748async fn test_save_file(cx: &mut gpui::TestAppContext) {
2749 init_test(cx);
2750
2751 let fs = FakeFs::new(cx.executor());
2752 fs.insert_tree(
2753 "/dir",
2754 json!({
2755 "file1": "the old contents",
2756 }),
2757 )
2758 .await;
2759
2760 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2761 let buffer = project
2762 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2763 .await
2764 .unwrap();
2765 buffer.update(cx, |buffer, cx| {
2766 assert_eq!(buffer.text(), "the old contents");
2767 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2768 });
2769
2770 project
2771 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2772 .await
2773 .unwrap();
2774
2775 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2776 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2777}
2778
2779#[gpui::test(iterations = 30)]
2780async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor().clone());
2784 fs.insert_tree(
2785 "/dir",
2786 json!({
2787 "file1": "the original contents",
2788 }),
2789 )
2790 .await;
2791
2792 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2793 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2794 let buffer = project
2795 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2796 .await
2797 .unwrap();
2798
2799 // Simulate buffer diffs being slow, so that they don't complete before
2800 // the next file change occurs.
2801 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2802
2803 // Change the buffer's file on disk, and then wait for the file change
2804 // to be detected by the worktree, so that the buffer starts reloading.
2805 fs.save(
2806 "/dir/file1".as_ref(),
2807 &"the first contents".into(),
2808 Default::default(),
2809 )
2810 .await
2811 .unwrap();
2812 worktree.next_event(cx).await;
2813
2814 // Change the buffer's file again. Depending on the random seed, the
2815 // previous file change may still be in progress.
2816 fs.save(
2817 "/dir/file1".as_ref(),
2818 &"the second contents".into(),
2819 Default::default(),
2820 )
2821 .await
2822 .unwrap();
2823 worktree.next_event(cx).await;
2824
2825 cx.executor().run_until_parked();
2826 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2827 buffer.read_with(cx, |buffer, _| {
2828 assert_eq!(buffer.text(), on_disk_text);
2829 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2830 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2831 });
2832}
2833
2834#[gpui::test(iterations = 30)]
2835async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2836 init_test(cx);
2837
2838 let fs = FakeFs::new(cx.executor().clone());
2839 fs.insert_tree(
2840 "/dir",
2841 json!({
2842 "file1": "the original contents",
2843 }),
2844 )
2845 .await;
2846
2847 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2848 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2849 let buffer = project
2850 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2851 .await
2852 .unwrap();
2853
2854 // Simulate buffer diffs being slow, so that they don't complete before
2855 // the next file change occurs.
2856 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2857
2858 // Change the buffer's file on disk, and then wait for the file change
2859 // to be detected by the worktree, so that the buffer starts reloading.
2860 fs.save(
2861 "/dir/file1".as_ref(),
2862 &"the first contents".into(),
2863 Default::default(),
2864 )
2865 .await
2866 .unwrap();
2867 worktree.next_event(cx).await;
2868
2869 cx.executor()
2870 .spawn(cx.executor().simulate_random_delay())
2871 .await;
2872
2873 // Perform a noop edit, causing the buffer's version to increase.
2874 buffer.update(cx, |buffer, cx| {
2875 buffer.edit([(0..0, " ")], None, cx);
2876 buffer.undo(cx);
2877 });
2878
2879 cx.executor().run_until_parked();
2880 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2881 buffer.read_with(cx, |buffer, _| {
2882 let buffer_text = buffer.text();
2883 if buffer_text == on_disk_text {
2884 assert!(
2885 !buffer.is_dirty() && !buffer.has_conflict(),
2886 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2887 );
2888 }
2889 // If the file change occurred while the buffer was processing the first
2890 // change, the buffer will be in a conflicting state.
2891 else {
2892 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2893 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2894 }
2895 });
2896}
2897
2898#[gpui::test]
2899async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2900 init_test(cx);
2901
2902 let fs = FakeFs::new(cx.executor());
2903 fs.insert_tree(
2904 "/dir",
2905 json!({
2906 "file1": "the old contents",
2907 }),
2908 )
2909 .await;
2910
2911 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2912 let buffer = project
2913 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2914 .await
2915 .unwrap();
2916 buffer.update(cx, |buffer, cx| {
2917 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2918 });
2919
2920 project
2921 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2922 .await
2923 .unwrap();
2924
2925 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2926 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2927}
2928
2929#[gpui::test]
2930async fn test_save_as(cx: &mut gpui::TestAppContext) {
2931 init_test(cx);
2932
2933 let fs = FakeFs::new(cx.executor());
2934 fs.insert_tree("/dir", json!({})).await;
2935
2936 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2937
2938 let languages = project.update(cx, |project, _| project.languages().clone());
2939 languages.add(rust_lang());
2940
2941 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
2942 buffer.update(cx, |buffer, cx| {
2943 buffer.edit([(0..0, "abc")], None, cx);
2944 assert!(buffer.is_dirty());
2945 assert!(!buffer.has_conflict());
2946 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
2947 });
2948 project
2949 .update(cx, |project, cx| {
2950 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
2951 let path = ProjectPath {
2952 worktree_id,
2953 path: Arc::from(Path::new("file1.rs")),
2954 };
2955 project.save_buffer_as(buffer.clone(), path, cx)
2956 })
2957 .await
2958 .unwrap();
2959 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
2960
2961 cx.executor().run_until_parked();
2962 buffer.update(cx, |buffer, cx| {
2963 assert_eq!(
2964 buffer.file().unwrap().full_path(cx),
2965 Path::new("dir/file1.rs")
2966 );
2967 assert!(!buffer.is_dirty());
2968 assert!(!buffer.has_conflict());
2969 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
2970 });
2971
2972 let opened_buffer = project
2973 .update(cx, |project, cx| {
2974 project.open_local_buffer("/dir/file1.rs", cx)
2975 })
2976 .await
2977 .unwrap();
2978 assert_eq!(opened_buffer, buffer);
2979}
2980
2981#[gpui::test(retries = 5)]
2982async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
2983 init_test(cx);
2984 cx.executor().allow_parking();
2985
2986 let dir = temp_tree(json!({
2987 "a": {
2988 "file1": "",
2989 "file2": "",
2990 "file3": "",
2991 },
2992 "b": {
2993 "c": {
2994 "file4": "",
2995 "file5": "",
2996 }
2997 }
2998 }));
2999
3000 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3001
3002 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3003 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3004 async move { buffer.await.unwrap() }
3005 };
3006 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3007 project.update(cx, |project, cx| {
3008 let tree = project.worktrees().next().unwrap();
3009 tree.read(cx)
3010 .entry_for_path(path)
3011 .unwrap_or_else(|| panic!("no entry for path {}", path))
3012 .id
3013 })
3014 };
3015
3016 let buffer2 = buffer_for_path("a/file2", cx).await;
3017 let buffer3 = buffer_for_path("a/file3", cx).await;
3018 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3019 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3020
3021 let file2_id = id_for_path("a/file2", cx);
3022 let file3_id = id_for_path("a/file3", cx);
3023 let file4_id = id_for_path("b/c/file4", cx);
3024
3025 // Create a remote copy of this worktree.
3026 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3027 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3028
3029 let updates = Arc::new(Mutex::new(Vec::new()));
3030 tree.update(cx, |tree, cx| {
3031 let updates = updates.clone();
3032 tree.observe_updates(0, cx, move |update| {
3033 updates.lock().push(update);
3034 async { true }
3035 });
3036 });
3037
3038 let remote = cx.update(|cx| {
3039 Worktree::remote(
3040 0,
3041 1,
3042 metadata,
3043 Box::new(CollabRemoteWorktreeClient(project.read(cx).client())),
3044 cx,
3045 )
3046 });
3047
3048 cx.executor().run_until_parked();
3049
3050 cx.update(|cx| {
3051 assert!(!buffer2.read(cx).is_dirty());
3052 assert!(!buffer3.read(cx).is_dirty());
3053 assert!(!buffer4.read(cx).is_dirty());
3054 assert!(!buffer5.read(cx).is_dirty());
3055 });
3056
3057 // Rename and delete files and directories.
3058 tree.flush_fs_events(cx).await;
3059 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3060 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3061 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3062 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3063 tree.flush_fs_events(cx).await;
3064
3065 let expected_paths = vec![
3066 "a",
3067 "a/file1",
3068 "a/file2.new",
3069 "b",
3070 "d",
3071 "d/file3",
3072 "d/file4",
3073 ];
3074
3075 cx.update(|app| {
3076 assert_eq!(
3077 tree.read(app)
3078 .paths()
3079 .map(|p| p.to_str().unwrap())
3080 .collect::<Vec<_>>(),
3081 expected_paths
3082 );
3083 });
3084
3085 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3086 assert_eq!(id_for_path("d/file3", cx), file3_id);
3087 assert_eq!(id_for_path("d/file4", cx), file4_id);
3088
3089 cx.update(|cx| {
3090 assert_eq!(
3091 buffer2.read(cx).file().unwrap().path().as_ref(),
3092 Path::new("a/file2.new")
3093 );
3094 assert_eq!(
3095 buffer3.read(cx).file().unwrap().path().as_ref(),
3096 Path::new("d/file3")
3097 );
3098 assert_eq!(
3099 buffer4.read(cx).file().unwrap().path().as_ref(),
3100 Path::new("d/file4")
3101 );
3102 assert_eq!(
3103 buffer5.read(cx).file().unwrap().path().as_ref(),
3104 Path::new("b/c/file5")
3105 );
3106
3107 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3108 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3109 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3110 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3111 });
3112
3113 // Update the remote worktree. Check that it becomes consistent with the
3114 // local worktree.
3115 cx.executor().run_until_parked();
3116
3117 remote.update(cx, |remote, _| {
3118 for update in updates.lock().drain(..) {
3119 remote.as_remote_mut().unwrap().update_from_remote(update);
3120 }
3121 });
3122 cx.executor().run_until_parked();
3123 remote.update(cx, |remote, _| {
3124 assert_eq!(
3125 remote
3126 .paths()
3127 .map(|p| p.to_str().unwrap())
3128 .collect::<Vec<_>>(),
3129 expected_paths
3130 );
3131 });
3132}
3133
3134#[gpui::test(iterations = 10)]
3135async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3136 init_test(cx);
3137
3138 let fs = FakeFs::new(cx.executor());
3139 fs.insert_tree(
3140 "/dir",
3141 json!({
3142 "a": {
3143 "file1": "",
3144 }
3145 }),
3146 )
3147 .await;
3148
3149 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3150 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3151 let tree_id = tree.update(cx, |tree, _| tree.id());
3152
3153 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3154 project.update(cx, |project, cx| {
3155 let tree = project.worktrees().next().unwrap();
3156 tree.read(cx)
3157 .entry_for_path(path)
3158 .unwrap_or_else(|| panic!("no entry for path {}", path))
3159 .id
3160 })
3161 };
3162
3163 let dir_id = id_for_path("a", cx);
3164 let file_id = id_for_path("a/file1", cx);
3165 let buffer = project
3166 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3167 .await
3168 .unwrap();
3169 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3170
3171 project
3172 .update(cx, |project, cx| {
3173 project.rename_entry(dir_id, Path::new("b"), cx)
3174 })
3175 .unwrap()
3176 .await
3177 .to_included()
3178 .unwrap();
3179 cx.executor().run_until_parked();
3180
3181 assert_eq!(id_for_path("b", cx), dir_id);
3182 assert_eq!(id_for_path("b/file1", cx), file_id);
3183 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3184}
3185
3186#[gpui::test]
3187async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3188 init_test(cx);
3189
3190 let fs = FakeFs::new(cx.executor());
3191 fs.insert_tree(
3192 "/dir",
3193 json!({
3194 "a.txt": "a-contents",
3195 "b.txt": "b-contents",
3196 }),
3197 )
3198 .await;
3199
3200 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3201
3202 // Spawn multiple tasks to open paths, repeating some paths.
3203 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3204 (
3205 p.open_local_buffer("/dir/a.txt", cx),
3206 p.open_local_buffer("/dir/b.txt", cx),
3207 p.open_local_buffer("/dir/a.txt", cx),
3208 )
3209 });
3210
3211 let buffer_a_1 = buffer_a_1.await.unwrap();
3212 let buffer_a_2 = buffer_a_2.await.unwrap();
3213 let buffer_b = buffer_b.await.unwrap();
3214 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3215 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3216
3217 // There is only one buffer per path.
3218 let buffer_a_id = buffer_a_1.entity_id();
3219 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3220
3221 // Open the same path again while it is still open.
3222 drop(buffer_a_1);
3223 let buffer_a_3 = project
3224 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3225 .await
3226 .unwrap();
3227
3228 // There's still only one buffer per path.
3229 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3230}
3231
3232#[gpui::test]
3233async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3234 init_test(cx);
3235
3236 let fs = FakeFs::new(cx.executor());
3237 fs.insert_tree(
3238 "/dir",
3239 json!({
3240 "file1": "abc",
3241 "file2": "def",
3242 "file3": "ghi",
3243 }),
3244 )
3245 .await;
3246
3247 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3248
3249 let buffer1 = project
3250 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3251 .await
3252 .unwrap();
3253 let events = Arc::new(Mutex::new(Vec::new()));
3254
3255 // initially, the buffer isn't dirty.
3256 buffer1.update(cx, |buffer, cx| {
3257 cx.subscribe(&buffer1, {
3258 let events = events.clone();
3259 move |_, _, event, _| match event {
3260 BufferEvent::Operation(_) => {}
3261 _ => events.lock().push(event.clone()),
3262 }
3263 })
3264 .detach();
3265
3266 assert!(!buffer.is_dirty());
3267 assert!(events.lock().is_empty());
3268
3269 buffer.edit([(1..2, "")], None, cx);
3270 });
3271
3272 // after the first edit, the buffer is dirty, and emits a dirtied event.
3273 buffer1.update(cx, |buffer, cx| {
3274 assert!(buffer.text() == "ac");
3275 assert!(buffer.is_dirty());
3276 assert_eq!(
3277 *events.lock(),
3278 &[language::Event::Edited, language::Event::DirtyChanged]
3279 );
3280 events.lock().clear();
3281 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3282 });
3283
3284 // after saving, the buffer is not dirty, and emits a saved event.
3285 buffer1.update(cx, |buffer, cx| {
3286 assert!(!buffer.is_dirty());
3287 assert_eq!(*events.lock(), &[language::Event::Saved]);
3288 events.lock().clear();
3289
3290 buffer.edit([(1..1, "B")], None, cx);
3291 buffer.edit([(2..2, "D")], None, cx);
3292 });
3293
3294 // after editing again, the buffer is dirty, and emits another dirty event.
3295 buffer1.update(cx, |buffer, cx| {
3296 assert!(buffer.text() == "aBDc");
3297 assert!(buffer.is_dirty());
3298 assert_eq!(
3299 *events.lock(),
3300 &[
3301 language::Event::Edited,
3302 language::Event::DirtyChanged,
3303 language::Event::Edited,
3304 ],
3305 );
3306 events.lock().clear();
3307
3308 // After restoring the buffer to its previously-saved state,
3309 // the buffer is not considered dirty anymore.
3310 buffer.edit([(1..3, "")], None, cx);
3311 assert!(buffer.text() == "ac");
3312 assert!(!buffer.is_dirty());
3313 });
3314
3315 assert_eq!(
3316 *events.lock(),
3317 &[language::Event::Edited, language::Event::DirtyChanged]
3318 );
3319
3320 // When a file is deleted, the buffer is considered dirty.
3321 let events = Arc::new(Mutex::new(Vec::new()));
3322 let buffer2 = project
3323 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3324 .await
3325 .unwrap();
3326 buffer2.update(cx, |_, cx| {
3327 cx.subscribe(&buffer2, {
3328 let events = events.clone();
3329 move |_, _, event, _| events.lock().push(event.clone())
3330 })
3331 .detach();
3332 });
3333
3334 fs.remove_file("/dir/file2".as_ref(), Default::default())
3335 .await
3336 .unwrap();
3337 cx.executor().run_until_parked();
3338 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3339 assert_eq!(
3340 *events.lock(),
3341 &[
3342 language::Event::DirtyChanged,
3343 language::Event::FileHandleChanged
3344 ]
3345 );
3346
3347 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3348 let events = Arc::new(Mutex::new(Vec::new()));
3349 let buffer3 = project
3350 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3351 .await
3352 .unwrap();
3353 buffer3.update(cx, |_, cx| {
3354 cx.subscribe(&buffer3, {
3355 let events = events.clone();
3356 move |_, _, event, _| events.lock().push(event.clone())
3357 })
3358 .detach();
3359 });
3360
3361 buffer3.update(cx, |buffer, cx| {
3362 buffer.edit([(0..0, "x")], None, cx);
3363 });
3364 events.lock().clear();
3365 fs.remove_file("/dir/file3".as_ref(), Default::default())
3366 .await
3367 .unwrap();
3368 cx.executor().run_until_parked();
3369 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3370 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3371}
3372
3373#[gpui::test]
3374async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3375 init_test(cx);
3376
3377 let initial_contents = "aaa\nbbbbb\nc\n";
3378 let fs = FakeFs::new(cx.executor());
3379 fs.insert_tree(
3380 "/dir",
3381 json!({
3382 "the-file": initial_contents,
3383 }),
3384 )
3385 .await;
3386 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3387 let buffer = project
3388 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3389 .await
3390 .unwrap();
3391
3392 let anchors = (0..3)
3393 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3394 .collect::<Vec<_>>();
3395
3396 // Change the file on disk, adding two new lines of text, and removing
3397 // one line.
3398 buffer.update(cx, |buffer, _| {
3399 assert!(!buffer.is_dirty());
3400 assert!(!buffer.has_conflict());
3401 });
3402 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3403 fs.save(
3404 "/dir/the-file".as_ref(),
3405 &new_contents.into(),
3406 LineEnding::Unix,
3407 )
3408 .await
3409 .unwrap();
3410
3411 // Because the buffer was not modified, it is reloaded from disk. Its
3412 // contents are edited according to the diff between the old and new
3413 // file contents.
3414 cx.executor().run_until_parked();
3415 buffer.update(cx, |buffer, _| {
3416 assert_eq!(buffer.text(), new_contents);
3417 assert!(!buffer.is_dirty());
3418 assert!(!buffer.has_conflict());
3419
3420 let anchor_positions = anchors
3421 .iter()
3422 .map(|anchor| anchor.to_point(&*buffer))
3423 .collect::<Vec<_>>();
3424 assert_eq!(
3425 anchor_positions,
3426 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3427 );
3428 });
3429
3430 // Modify the buffer
3431 buffer.update(cx, |buffer, cx| {
3432 buffer.edit([(0..0, " ")], None, cx);
3433 assert!(buffer.is_dirty());
3434 assert!(!buffer.has_conflict());
3435 });
3436
3437 // Change the file on disk again, adding blank lines to the beginning.
3438 fs.save(
3439 "/dir/the-file".as_ref(),
3440 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3441 LineEnding::Unix,
3442 )
3443 .await
3444 .unwrap();
3445
3446 // Because the buffer is modified, it doesn't reload from disk, but is
3447 // marked as having a conflict.
3448 cx.executor().run_until_parked();
3449 buffer.update(cx, |buffer, _| {
3450 assert!(buffer.has_conflict());
3451 });
3452}
3453
3454#[gpui::test]
3455async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3456 init_test(cx);
3457
3458 let fs = FakeFs::new(cx.executor());
3459 fs.insert_tree(
3460 "/dir",
3461 json!({
3462 "file1": "a\nb\nc\n",
3463 "file2": "one\r\ntwo\r\nthree\r\n",
3464 }),
3465 )
3466 .await;
3467
3468 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3469 let buffer1 = project
3470 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3471 .await
3472 .unwrap();
3473 let buffer2 = project
3474 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3475 .await
3476 .unwrap();
3477
3478 buffer1.update(cx, |buffer, _| {
3479 assert_eq!(buffer.text(), "a\nb\nc\n");
3480 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3481 });
3482 buffer2.update(cx, |buffer, _| {
3483 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3484 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3485 });
3486
3487 // Change a file's line endings on disk from unix to windows. The buffer's
3488 // state updates correctly.
3489 fs.save(
3490 "/dir/file1".as_ref(),
3491 &"aaa\nb\nc\n".into(),
3492 LineEnding::Windows,
3493 )
3494 .await
3495 .unwrap();
3496 cx.executor().run_until_parked();
3497 buffer1.update(cx, |buffer, _| {
3498 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3499 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3500 });
3501
3502 // Save a file with windows line endings. The file is written correctly.
3503 buffer2.update(cx, |buffer, cx| {
3504 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3505 });
3506 project
3507 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3508 .await
3509 .unwrap();
3510 assert_eq!(
3511 fs.load("/dir/file2".as_ref()).await.unwrap(),
3512 "one\r\ntwo\r\nthree\r\nfour\r\n",
3513 );
3514}
3515
3516#[gpui::test]
3517async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3518 init_test(cx);
3519
3520 let fs = FakeFs::new(cx.executor());
3521 fs.insert_tree(
3522 "/the-dir",
3523 json!({
3524 "a.rs": "
3525 fn foo(mut v: Vec<usize>) {
3526 for x in &v {
3527 v.push(1);
3528 }
3529 }
3530 "
3531 .unindent(),
3532 }),
3533 )
3534 .await;
3535
3536 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3537 let buffer = project
3538 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3539 .await
3540 .unwrap();
3541
3542 let buffer_uri = Uri::from_file_path("/the-dir/a.rs").unwrap();
3543 let message = lsp::PublishDiagnosticsParams {
3544 uri: buffer_uri.clone().into(),
3545 diagnostics: vec![
3546 lsp::Diagnostic {
3547 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3548 severity: Some(DiagnosticSeverity::WARNING),
3549 message: "error 1".to_string(),
3550 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3551 location: lsp::Location {
3552 uri: buffer_uri.clone().into(),
3553 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3554 },
3555 message: "error 1 hint 1".to_string(),
3556 }]),
3557 ..Default::default()
3558 },
3559 lsp::Diagnostic {
3560 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3561 severity: Some(DiagnosticSeverity::HINT),
3562 message: "error 1 hint 1".to_string(),
3563 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3564 location: lsp::Location {
3565 uri: buffer_uri.clone().into(),
3566 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3567 },
3568 message: "original diagnostic".to_string(),
3569 }]),
3570 ..Default::default()
3571 },
3572 lsp::Diagnostic {
3573 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3574 severity: Some(DiagnosticSeverity::ERROR),
3575 message: "error 2".to_string(),
3576 related_information: Some(vec![
3577 lsp::DiagnosticRelatedInformation {
3578 location: lsp::Location {
3579 uri: buffer_uri.clone().into(),
3580 range: lsp::Range::new(
3581 lsp::Position::new(1, 13),
3582 lsp::Position::new(1, 15),
3583 ),
3584 },
3585 message: "error 2 hint 1".to_string(),
3586 },
3587 lsp::DiagnosticRelatedInformation {
3588 location: lsp::Location {
3589 uri: buffer_uri.clone().into(),
3590 range: lsp::Range::new(
3591 lsp::Position::new(1, 13),
3592 lsp::Position::new(1, 15),
3593 ),
3594 },
3595 message: "error 2 hint 2".to_string(),
3596 },
3597 ]),
3598 ..Default::default()
3599 },
3600 lsp::Diagnostic {
3601 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3602 severity: Some(DiagnosticSeverity::HINT),
3603 message: "error 2 hint 1".to_string(),
3604 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3605 location: lsp::Location {
3606 uri: buffer_uri.clone().into(),
3607 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3608 },
3609 message: "original diagnostic".to_string(),
3610 }]),
3611 ..Default::default()
3612 },
3613 lsp::Diagnostic {
3614 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3615 severity: Some(DiagnosticSeverity::HINT),
3616 message: "error 2 hint 2".to_string(),
3617 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3618 location: lsp::Location {
3619 uri: buffer_uri.into(),
3620 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3621 },
3622 message: "original diagnostic".to_string(),
3623 }]),
3624 ..Default::default()
3625 },
3626 ],
3627 version: None,
3628 };
3629
3630 project
3631 .update(cx, |p, cx| {
3632 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3633 })
3634 .unwrap();
3635 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3636
3637 assert_eq!(
3638 buffer
3639 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3640 .collect::<Vec<_>>(),
3641 &[
3642 DiagnosticEntry {
3643 range: Point::new(1, 8)..Point::new(1, 9),
3644 diagnostic: Diagnostic {
3645 severity: DiagnosticSeverity::WARNING,
3646 message: "error 1".to_string(),
3647 group_id: 1,
3648 is_primary: true,
3649 ..Default::default()
3650 }
3651 },
3652 DiagnosticEntry {
3653 range: Point::new(1, 8)..Point::new(1, 9),
3654 diagnostic: Diagnostic {
3655 severity: DiagnosticSeverity::HINT,
3656 message: "error 1 hint 1".to_string(),
3657 group_id: 1,
3658 is_primary: false,
3659 ..Default::default()
3660 }
3661 },
3662 DiagnosticEntry {
3663 range: Point::new(1, 13)..Point::new(1, 15),
3664 diagnostic: Diagnostic {
3665 severity: DiagnosticSeverity::HINT,
3666 message: "error 2 hint 1".to_string(),
3667 group_id: 0,
3668 is_primary: false,
3669 ..Default::default()
3670 }
3671 },
3672 DiagnosticEntry {
3673 range: Point::new(1, 13)..Point::new(1, 15),
3674 diagnostic: Diagnostic {
3675 severity: DiagnosticSeverity::HINT,
3676 message: "error 2 hint 2".to_string(),
3677 group_id: 0,
3678 is_primary: false,
3679 ..Default::default()
3680 }
3681 },
3682 DiagnosticEntry {
3683 range: Point::new(2, 8)..Point::new(2, 17),
3684 diagnostic: Diagnostic {
3685 severity: DiagnosticSeverity::ERROR,
3686 message: "error 2".to_string(),
3687 group_id: 0,
3688 is_primary: true,
3689 ..Default::default()
3690 }
3691 }
3692 ]
3693 );
3694
3695 assert_eq!(
3696 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3697 &[
3698 DiagnosticEntry {
3699 range: Point::new(1, 13)..Point::new(1, 15),
3700 diagnostic: Diagnostic {
3701 severity: DiagnosticSeverity::HINT,
3702 message: "error 2 hint 1".to_string(),
3703 group_id: 0,
3704 is_primary: false,
3705 ..Default::default()
3706 }
3707 },
3708 DiagnosticEntry {
3709 range: Point::new(1, 13)..Point::new(1, 15),
3710 diagnostic: Diagnostic {
3711 severity: DiagnosticSeverity::HINT,
3712 message: "error 2 hint 2".to_string(),
3713 group_id: 0,
3714 is_primary: false,
3715 ..Default::default()
3716 }
3717 },
3718 DiagnosticEntry {
3719 range: Point::new(2, 8)..Point::new(2, 17),
3720 diagnostic: Diagnostic {
3721 severity: DiagnosticSeverity::ERROR,
3722 message: "error 2".to_string(),
3723 group_id: 0,
3724 is_primary: true,
3725 ..Default::default()
3726 }
3727 }
3728 ]
3729 );
3730
3731 assert_eq!(
3732 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3733 &[
3734 DiagnosticEntry {
3735 range: Point::new(1, 8)..Point::new(1, 9),
3736 diagnostic: Diagnostic {
3737 severity: DiagnosticSeverity::WARNING,
3738 message: "error 1".to_string(),
3739 group_id: 1,
3740 is_primary: true,
3741 ..Default::default()
3742 }
3743 },
3744 DiagnosticEntry {
3745 range: Point::new(1, 8)..Point::new(1, 9),
3746 diagnostic: Diagnostic {
3747 severity: DiagnosticSeverity::HINT,
3748 message: "error 1 hint 1".to_string(),
3749 group_id: 1,
3750 is_primary: false,
3751 ..Default::default()
3752 }
3753 },
3754 ]
3755 );
3756}
3757
3758#[gpui::test]
3759async fn test_rename(cx: &mut gpui::TestAppContext) {
3760 init_test(cx);
3761
3762 let fs = FakeFs::new(cx.executor());
3763 fs.insert_tree(
3764 "/dir",
3765 json!({
3766 "one.rs": "const ONE: usize = 1;",
3767 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3768 }),
3769 )
3770 .await;
3771
3772 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3773
3774 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3775 language_registry.add(rust_lang());
3776 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3777 "Rust",
3778 FakeLspAdapter {
3779 capabilities: lsp::ServerCapabilities {
3780 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3781 prepare_provider: Some(true),
3782 work_done_progress_options: Default::default(),
3783 })),
3784 ..Default::default()
3785 },
3786 ..Default::default()
3787 },
3788 );
3789
3790 let buffer = project
3791 .update(cx, |project, cx| {
3792 project.open_local_buffer("/dir/one.rs", cx)
3793 })
3794 .await
3795 .unwrap();
3796
3797 let fake_server = fake_servers.next().await.unwrap();
3798
3799 let response = project.update(cx, |project, cx| {
3800 project.prepare_rename(buffer.clone(), 7, cx)
3801 });
3802 fake_server
3803 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3804 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3805 assert_eq!(params.position, lsp::Position::new(0, 7));
3806 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3807 lsp::Position::new(0, 6),
3808 lsp::Position::new(0, 9),
3809 ))))
3810 })
3811 .next()
3812 .await
3813 .unwrap();
3814 let range = response.await.unwrap().unwrap();
3815 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3816 assert_eq!(range, 6..9);
3817
3818 let response = project.update(cx, |project, cx| {
3819 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3820 });
3821 fake_server
3822 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3823 assert_eq!(
3824 params.text_document_position.text_document.uri.as_str(),
3825 "file:///dir/one.rs"
3826 );
3827 assert_eq!(
3828 params.text_document_position.position,
3829 lsp::Position::new(0, 7)
3830 );
3831 assert_eq!(params.new_name, "THREE");
3832 Ok(Some(lsp::WorkspaceEdit {
3833 changes: Some(
3834 [
3835 (
3836 lsp::Uri::from_file_path("/dir/one.rs").unwrap().into(),
3837 vec![lsp::TextEdit::new(
3838 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3839 "THREE".to_string(),
3840 )],
3841 ),
3842 (
3843 lsp::Uri::from_file_path("/dir/two.rs").unwrap().into(),
3844 vec![
3845 lsp::TextEdit::new(
3846 lsp::Range::new(
3847 lsp::Position::new(0, 24),
3848 lsp::Position::new(0, 27),
3849 ),
3850 "THREE".to_string(),
3851 ),
3852 lsp::TextEdit::new(
3853 lsp::Range::new(
3854 lsp::Position::new(0, 35),
3855 lsp::Position::new(0, 38),
3856 ),
3857 "THREE".to_string(),
3858 ),
3859 ],
3860 ),
3861 ]
3862 .into_iter()
3863 .collect(),
3864 ),
3865 ..Default::default()
3866 }))
3867 })
3868 .next()
3869 .await
3870 .unwrap();
3871 let mut transaction = response.await.unwrap().0;
3872 assert_eq!(transaction.len(), 2);
3873 assert_eq!(
3874 transaction
3875 .remove_entry(&buffer)
3876 .unwrap()
3877 .0
3878 .update(cx, |buffer, _| buffer.text()),
3879 "const THREE: usize = 1;"
3880 );
3881 assert_eq!(
3882 transaction
3883 .into_keys()
3884 .next()
3885 .unwrap()
3886 .update(cx, |buffer, _| buffer.text()),
3887 "const TWO: usize = one::THREE + one::THREE;"
3888 );
3889}
3890
3891#[gpui::test]
3892async fn test_search(cx: &mut gpui::TestAppContext) {
3893 init_test(cx);
3894
3895 let fs = FakeFs::new(cx.executor());
3896 fs.insert_tree(
3897 "/dir",
3898 json!({
3899 "one.rs": "const ONE: usize = 1;",
3900 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3901 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3902 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3903 }),
3904 )
3905 .await;
3906 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3907 assert_eq!(
3908 search(
3909 &project,
3910 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3911 cx
3912 )
3913 .await
3914 .unwrap(),
3915 HashMap::from_iter([
3916 ("dir/two.rs".to_string(), vec![6..9]),
3917 ("dir/three.rs".to_string(), vec![37..40])
3918 ])
3919 );
3920
3921 let buffer_4 = project
3922 .update(cx, |project, cx| {
3923 project.open_local_buffer("/dir/four.rs", cx)
3924 })
3925 .await
3926 .unwrap();
3927 buffer_4.update(cx, |buffer, cx| {
3928 let text = "two::TWO";
3929 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3930 });
3931
3932 assert_eq!(
3933 search(
3934 &project,
3935 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3936 cx
3937 )
3938 .await
3939 .unwrap(),
3940 HashMap::from_iter([
3941 ("dir/two.rs".to_string(), vec![6..9]),
3942 ("dir/three.rs".to_string(), vec![37..40]),
3943 ("dir/four.rs".to_string(), vec![25..28, 36..39])
3944 ])
3945 );
3946}
3947
3948#[gpui::test]
3949async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
3950 init_test(cx);
3951
3952 let search_query = "file";
3953
3954 let fs = FakeFs::new(cx.executor());
3955 fs.insert_tree(
3956 "/dir",
3957 json!({
3958 "one.rs": r#"// Rust file one"#,
3959 "one.ts": r#"// TypeScript file one"#,
3960 "two.rs": r#"// Rust file two"#,
3961 "two.ts": r#"// TypeScript file two"#,
3962 }),
3963 )
3964 .await;
3965 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3966
3967 assert!(
3968 search(
3969 &project,
3970 SearchQuery::text(
3971 search_query,
3972 false,
3973 true,
3974 false,
3975 vec![PathMatcher::new("*.odd").unwrap()],
3976 Vec::new()
3977 )
3978 .unwrap(),
3979 cx
3980 )
3981 .await
3982 .unwrap()
3983 .is_empty(),
3984 "If no inclusions match, no files should be returned"
3985 );
3986
3987 assert_eq!(
3988 search(
3989 &project,
3990 SearchQuery::text(
3991 search_query,
3992 false,
3993 true,
3994 false,
3995 vec![PathMatcher::new("*.rs").unwrap()],
3996 Vec::new()
3997 )
3998 .unwrap(),
3999 cx
4000 )
4001 .await
4002 .unwrap(),
4003 HashMap::from_iter([
4004 ("dir/one.rs".to_string(), vec![8..12]),
4005 ("dir/two.rs".to_string(), vec![8..12]),
4006 ]),
4007 "Rust only search should give only Rust files"
4008 );
4009
4010 assert_eq!(
4011 search(
4012 &project,
4013 SearchQuery::text(
4014 search_query,
4015 false,
4016 true,
4017 false,
4018 vec![
4019 PathMatcher::new("*.ts").unwrap(),
4020 PathMatcher::new("*.odd").unwrap(),
4021 ],
4022 Vec::new()
4023 ).unwrap(),
4024 cx
4025 )
4026 .await
4027 .unwrap(),
4028 HashMap::from_iter([
4029 ("dir/one.ts".to_string(), vec![14..18]),
4030 ("dir/two.ts".to_string(), vec![14..18]),
4031 ]),
4032 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4033 );
4034
4035 assert_eq!(
4036 search(
4037 &project,
4038 SearchQuery::text(
4039 search_query,
4040 false,
4041 true,
4042 false,
4043 vec![
4044 PathMatcher::new("*.rs").unwrap(),
4045 PathMatcher::new("*.ts").unwrap(),
4046 PathMatcher::new("*.odd").unwrap(),
4047 ],
4048 Vec::new()
4049 ).unwrap(),
4050 cx
4051 )
4052 .await
4053 .unwrap(),
4054 HashMap::from_iter([
4055 ("dir/two.ts".to_string(), vec![14..18]),
4056 ("dir/one.rs".to_string(), vec![8..12]),
4057 ("dir/one.ts".to_string(), vec![14..18]),
4058 ("dir/two.rs".to_string(), vec![8..12]),
4059 ]),
4060 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4061 );
4062}
4063
4064#[gpui::test]
4065async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4066 init_test(cx);
4067
4068 let search_query = "file";
4069
4070 let fs = FakeFs::new(cx.executor());
4071 fs.insert_tree(
4072 "/dir",
4073 json!({
4074 "one.rs": r#"// Rust file one"#,
4075 "one.ts": r#"// TypeScript file one"#,
4076 "two.rs": r#"// Rust file two"#,
4077 "two.ts": r#"// TypeScript file two"#,
4078 }),
4079 )
4080 .await;
4081 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4082
4083 assert_eq!(
4084 search(
4085 &project,
4086 SearchQuery::text(
4087 search_query,
4088 false,
4089 true,
4090 false,
4091 Vec::new(),
4092 vec![PathMatcher::new("*.odd").unwrap()],
4093 )
4094 .unwrap(),
4095 cx
4096 )
4097 .await
4098 .unwrap(),
4099 HashMap::from_iter([
4100 ("dir/one.rs".to_string(), vec![8..12]),
4101 ("dir/one.ts".to_string(), vec![14..18]),
4102 ("dir/two.rs".to_string(), vec![8..12]),
4103 ("dir/two.ts".to_string(), vec![14..18]),
4104 ]),
4105 "If no exclusions match, all files should be returned"
4106 );
4107
4108 assert_eq!(
4109 search(
4110 &project,
4111 SearchQuery::text(
4112 search_query,
4113 false,
4114 true,
4115 false,
4116 Vec::new(),
4117 vec![PathMatcher::new("*.rs").unwrap()],
4118 )
4119 .unwrap(),
4120 cx
4121 )
4122 .await
4123 .unwrap(),
4124 HashMap::from_iter([
4125 ("dir/one.ts".to_string(), vec![14..18]),
4126 ("dir/two.ts".to_string(), vec![14..18]),
4127 ]),
4128 "Rust exclusion search should give only TypeScript files"
4129 );
4130
4131 assert_eq!(
4132 search(
4133 &project,
4134 SearchQuery::text(
4135 search_query,
4136 false,
4137 true,
4138 false,
4139 Vec::new(),
4140 vec![
4141 PathMatcher::new("*.ts").unwrap(),
4142 PathMatcher::new("*.odd").unwrap(),
4143 ],
4144 ).unwrap(),
4145 cx
4146 )
4147 .await
4148 .unwrap(),
4149 HashMap::from_iter([
4150 ("dir/one.rs".to_string(), vec![8..12]),
4151 ("dir/two.rs".to_string(), vec![8..12]),
4152 ]),
4153 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4154 );
4155
4156 assert!(
4157 search(
4158 &project,
4159 SearchQuery::text(
4160 search_query,
4161 false,
4162 true,
4163 false,
4164 Vec::new(),
4165 vec![
4166 PathMatcher::new("*.rs").unwrap(),
4167 PathMatcher::new("*.ts").unwrap(),
4168 PathMatcher::new("*.odd").unwrap(),
4169 ],
4170 ).unwrap(),
4171 cx
4172 )
4173 .await
4174 .unwrap().is_empty(),
4175 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4176 );
4177}
4178
4179#[gpui::test]
4180async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4181 init_test(cx);
4182
4183 let search_query = "file";
4184
4185 let fs = FakeFs::new(cx.executor());
4186 fs.insert_tree(
4187 "/dir",
4188 json!({
4189 "one.rs": r#"// Rust file one"#,
4190 "one.ts": r#"// TypeScript file one"#,
4191 "two.rs": r#"// Rust file two"#,
4192 "two.ts": r#"// TypeScript file two"#,
4193 }),
4194 )
4195 .await;
4196 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4197
4198 assert!(
4199 search(
4200 &project,
4201 SearchQuery::text(
4202 search_query,
4203 false,
4204 true,
4205 false,
4206 vec![PathMatcher::new("*.odd").unwrap()],
4207 vec![PathMatcher::new("*.odd").unwrap()],
4208 )
4209 .unwrap(),
4210 cx
4211 )
4212 .await
4213 .unwrap()
4214 .is_empty(),
4215 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4216 );
4217
4218 assert!(
4219 search(
4220 &project,
4221 SearchQuery::text(
4222 search_query,
4223 false,
4224 true,
4225 false,
4226 vec![PathMatcher::new("*.ts").unwrap()],
4227 vec![PathMatcher::new("*.ts").unwrap()],
4228 ).unwrap(),
4229 cx
4230 )
4231 .await
4232 .unwrap()
4233 .is_empty(),
4234 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4235 );
4236
4237 assert!(
4238 search(
4239 &project,
4240 SearchQuery::text(
4241 search_query,
4242 false,
4243 true,
4244 false,
4245 vec![
4246 PathMatcher::new("*.ts").unwrap(),
4247 PathMatcher::new("*.odd").unwrap()
4248 ],
4249 vec![
4250 PathMatcher::new("*.ts").unwrap(),
4251 PathMatcher::new("*.odd").unwrap()
4252 ],
4253 )
4254 .unwrap(),
4255 cx
4256 )
4257 .await
4258 .unwrap()
4259 .is_empty(),
4260 "Non-matching inclusions and exclusions should not change that."
4261 );
4262
4263 assert_eq!(
4264 search(
4265 &project,
4266 SearchQuery::text(
4267 search_query,
4268 false,
4269 true,
4270 false,
4271 vec![
4272 PathMatcher::new("*.ts").unwrap(),
4273 PathMatcher::new("*.odd").unwrap()
4274 ],
4275 vec![
4276 PathMatcher::new("*.rs").unwrap(),
4277 PathMatcher::new("*.odd").unwrap()
4278 ],
4279 )
4280 .unwrap(),
4281 cx
4282 )
4283 .await
4284 .unwrap(),
4285 HashMap::from_iter([
4286 ("dir/one.ts".to_string(), vec![14..18]),
4287 ("dir/two.ts".to_string(), vec![14..18]),
4288 ]),
4289 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4290 );
4291}
4292
4293#[gpui::test]
4294async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4295 init_test(cx);
4296
4297 let fs = FakeFs::new(cx.executor());
4298 fs.insert_tree(
4299 "/worktree-a",
4300 json!({
4301 "haystack.rs": r#"// NEEDLE"#,
4302 "haystack.ts": r#"// NEEDLE"#,
4303 }),
4304 )
4305 .await;
4306 fs.insert_tree(
4307 "/worktree-b",
4308 json!({
4309 "haystack.rs": r#"// NEEDLE"#,
4310 "haystack.ts": r#"// NEEDLE"#,
4311 }),
4312 )
4313 .await;
4314
4315 let project = Project::test(
4316 fs.clone(),
4317 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4318 cx,
4319 )
4320 .await;
4321
4322 assert_eq!(
4323 search(
4324 &project,
4325 SearchQuery::text(
4326 "NEEDLE",
4327 false,
4328 true,
4329 false,
4330 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4331 Vec::new()
4332 )
4333 .unwrap(),
4334 cx
4335 )
4336 .await
4337 .unwrap(),
4338 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4339 "should only return results from included worktree"
4340 );
4341 assert_eq!(
4342 search(
4343 &project,
4344 SearchQuery::text(
4345 "NEEDLE",
4346 false,
4347 true,
4348 false,
4349 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4350 Vec::new()
4351 )
4352 .unwrap(),
4353 cx
4354 )
4355 .await
4356 .unwrap(),
4357 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4358 "should only return results from included worktree"
4359 );
4360
4361 assert_eq!(
4362 search(
4363 &project,
4364 SearchQuery::text(
4365 "NEEDLE",
4366 false,
4367 true,
4368 false,
4369 vec![PathMatcher::new("*.ts").unwrap()],
4370 Vec::new()
4371 )
4372 .unwrap(),
4373 cx
4374 )
4375 .await
4376 .unwrap(),
4377 HashMap::from_iter([
4378 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4379 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4380 ]),
4381 "should return results from both worktrees"
4382 );
4383}
4384
4385#[gpui::test]
4386async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4387 init_test(cx);
4388
4389 let fs = FakeFs::new(cx.background_executor.clone());
4390 fs.insert_tree(
4391 "/dir",
4392 json!({
4393 ".git": {},
4394 ".gitignore": "**/target\n/node_modules\n",
4395 "target": {
4396 "index.txt": "index_key:index_value"
4397 },
4398 "node_modules": {
4399 "eslint": {
4400 "index.ts": "const eslint_key = 'eslint value'",
4401 "package.json": r#"{ "some_key": "some value" }"#,
4402 },
4403 "prettier": {
4404 "index.ts": "const prettier_key = 'prettier value'",
4405 "package.json": r#"{ "other_key": "other value" }"#,
4406 },
4407 },
4408 "package.json": r#"{ "main_key": "main value" }"#,
4409 }),
4410 )
4411 .await;
4412 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4413
4414 let query = "key";
4415 assert_eq!(
4416 search(
4417 &project,
4418 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4419 cx
4420 )
4421 .await
4422 .unwrap(),
4423 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4424 "Only one non-ignored file should have the query"
4425 );
4426
4427 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4428 assert_eq!(
4429 search(
4430 &project,
4431 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4432 cx
4433 )
4434 .await
4435 .unwrap(),
4436 HashMap::from_iter([
4437 ("dir/package.json".to_string(), vec![8..11]),
4438 ("dir/target/index.txt".to_string(), vec![6..9]),
4439 (
4440 "dir/node_modules/prettier/package.json".to_string(),
4441 vec![9..12]
4442 ),
4443 (
4444 "dir/node_modules/prettier/index.ts".to_string(),
4445 vec![15..18]
4446 ),
4447 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4448 (
4449 "dir/node_modules/eslint/package.json".to_string(),
4450 vec![8..11]
4451 ),
4452 ]),
4453 "Unrestricted search with ignored directories should find every file with the query"
4454 );
4455
4456 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4457 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4458 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4459 assert_eq!(
4460 search(
4461 &project,
4462 SearchQuery::text(
4463 query,
4464 false,
4465 false,
4466 true,
4467 files_to_include,
4468 files_to_exclude,
4469 )
4470 .unwrap(),
4471 cx
4472 )
4473 .await
4474 .unwrap(),
4475 HashMap::from_iter([(
4476 "dir/node_modules/prettier/package.json".to_string(),
4477 vec![9..12]
4478 )]),
4479 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4480 );
4481}
4482
4483#[test]
4484fn test_glob_literal_prefix() {
4485 assert_eq!(glob_literal_prefix("**/*.js"), "");
4486 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4487 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4488 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4489}
4490
4491#[gpui::test]
4492async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4493 init_test(cx);
4494
4495 let fs = FakeFs::new(cx.executor().clone());
4496 fs.insert_tree(
4497 "/one/two",
4498 json!({
4499 "three": {
4500 "a.txt": "",
4501 "four": {}
4502 },
4503 "c.rs": ""
4504 }),
4505 )
4506 .await;
4507
4508 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4509 project
4510 .update(cx, |project, cx| {
4511 let id = project.worktrees().next().unwrap().read(cx).id();
4512 project.create_entry((id, "b.."), true, cx)
4513 })
4514 .unwrap()
4515 .await
4516 .to_included()
4517 .unwrap();
4518
4519 // Can't create paths outside the project
4520 let result = project
4521 .update(cx, |project, cx| {
4522 let id = project.worktrees().next().unwrap().read(cx).id();
4523 project.create_entry((id, "../../boop"), true, cx)
4524 })
4525 .await;
4526 assert!(result.is_err());
4527
4528 // Can't create paths with '..'
4529 let result = project
4530 .update(cx, |project, cx| {
4531 let id = project.worktrees().next().unwrap().read(cx).id();
4532 project.create_entry((id, "four/../beep"), true, cx)
4533 })
4534 .await;
4535 assert!(result.is_err());
4536
4537 assert_eq!(
4538 fs.paths(true),
4539 vec![
4540 PathBuf::from("/"),
4541 PathBuf::from("/one"),
4542 PathBuf::from("/one/two"),
4543 PathBuf::from("/one/two/c.rs"),
4544 PathBuf::from("/one/two/three"),
4545 PathBuf::from("/one/two/three/a.txt"),
4546 PathBuf::from("/one/two/three/b.."),
4547 PathBuf::from("/one/two/three/four"),
4548 ]
4549 );
4550
4551 // And we cannot open buffers with '..'
4552 let result = project
4553 .update(cx, |project, cx| {
4554 let id = project.worktrees().next().unwrap().read(cx).id();
4555 project.open_buffer((id, "../c.rs"), cx)
4556 })
4557 .await;
4558 assert!(result.is_err())
4559}
4560
4561#[gpui::test]
4562async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4563 init_test(cx);
4564
4565 let fs = FakeFs::new(cx.executor());
4566 fs.insert_tree(
4567 "/dir",
4568 json!({
4569 "a.tsx": "a",
4570 }),
4571 )
4572 .await;
4573
4574 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4575
4576 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4577 language_registry.add(tsx_lang());
4578 let language_server_names = [
4579 "TypeScriptServer",
4580 "TailwindServer",
4581 "ESLintServer",
4582 "NoHoverCapabilitiesServer",
4583 ];
4584 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4585 "tsx",
4586 true,
4587 FakeLspAdapter {
4588 name: &language_server_names[0],
4589 capabilities: lsp::ServerCapabilities {
4590 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4591 ..lsp::ServerCapabilities::default()
4592 },
4593 ..FakeLspAdapter::default()
4594 },
4595 );
4596 let _a = language_registry.register_specific_fake_lsp_adapter(
4597 "tsx",
4598 false,
4599 FakeLspAdapter {
4600 name: &language_server_names[1],
4601 capabilities: lsp::ServerCapabilities {
4602 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4603 ..lsp::ServerCapabilities::default()
4604 },
4605 ..FakeLspAdapter::default()
4606 },
4607 );
4608 let _b = language_registry.register_specific_fake_lsp_adapter(
4609 "tsx",
4610 false,
4611 FakeLspAdapter {
4612 name: &language_server_names[2],
4613 capabilities: lsp::ServerCapabilities {
4614 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4615 ..lsp::ServerCapabilities::default()
4616 },
4617 ..FakeLspAdapter::default()
4618 },
4619 );
4620 let _c = language_registry.register_specific_fake_lsp_adapter(
4621 "tsx",
4622 false,
4623 FakeLspAdapter {
4624 name: &language_server_names[3],
4625 capabilities: lsp::ServerCapabilities {
4626 hover_provider: None,
4627 ..lsp::ServerCapabilities::default()
4628 },
4629 ..FakeLspAdapter::default()
4630 },
4631 );
4632
4633 let buffer = project
4634 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4635 .await
4636 .unwrap();
4637 cx.executor().run_until_parked();
4638
4639 let mut servers_with_hover_requests = HashMap::default();
4640 for i in 0..language_server_names.len() {
4641 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4642 panic!(
4643 "Failed to get language server #{i} with name {}",
4644 &language_server_names[i]
4645 )
4646 });
4647 let new_server_name = new_server.server.name();
4648 assert!(
4649 !servers_with_hover_requests.contains_key(new_server_name),
4650 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4651 );
4652 let new_server_name = new_server_name.to_string();
4653 match new_server_name.as_str() {
4654 "TailwindServer" | "TypeScriptServer" => {
4655 servers_with_hover_requests.insert(
4656 new_server_name.clone(),
4657 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4658 let name = new_server_name.clone();
4659 async move {
4660 Ok(Some(lsp::Hover {
4661 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4662 format!("{name} hover"),
4663 )),
4664 range: None,
4665 }))
4666 }
4667 }),
4668 );
4669 }
4670 "ESLintServer" => {
4671 servers_with_hover_requests.insert(
4672 new_server_name,
4673 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4674 |_, _| async move { Ok(None) },
4675 ),
4676 );
4677 }
4678 "NoHoverCapabilitiesServer" => {
4679 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4680 |_, _| async move {
4681 panic!(
4682 "Should not call for hovers server with no corresponding capabilities"
4683 )
4684 },
4685 );
4686 }
4687 unexpected => panic!("Unexpected server name: {unexpected}"),
4688 }
4689 }
4690
4691 let hover_task = project.update(cx, |project, cx| {
4692 project.hover(&buffer, Point::new(0, 0), cx)
4693 });
4694 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4695 |mut hover_request| async move {
4696 hover_request
4697 .next()
4698 .await
4699 .expect("All hover requests should have been triggered")
4700 },
4701 ))
4702 .await;
4703 assert_eq!(
4704 vec!["TailwindServer hover", "TypeScriptServer hover"],
4705 hover_task
4706 .await
4707 .into_iter()
4708 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4709 .sorted()
4710 .collect::<Vec<_>>(),
4711 "Should receive hover responses from all related servers with hover capabilities"
4712 );
4713}
4714
4715#[gpui::test]
4716async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4717 init_test(cx);
4718
4719 let fs = FakeFs::new(cx.executor());
4720 fs.insert_tree(
4721 "/dir",
4722 json!({
4723 "a.ts": "a",
4724 }),
4725 )
4726 .await;
4727
4728 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4729
4730 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4731 language_registry.add(typescript_lang());
4732 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4733 "TypeScript",
4734 FakeLspAdapter {
4735 capabilities: lsp::ServerCapabilities {
4736 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4737 ..lsp::ServerCapabilities::default()
4738 },
4739 ..FakeLspAdapter::default()
4740 },
4741 );
4742
4743 let buffer = project
4744 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4745 .await
4746 .unwrap();
4747 cx.executor().run_until_parked();
4748
4749 let fake_server = fake_language_servers
4750 .next()
4751 .await
4752 .expect("failed to get the language server");
4753
4754 let mut request_handled =
4755 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4756 Ok(Some(lsp::Hover {
4757 contents: lsp::HoverContents::Array(vec![
4758 lsp::MarkedString::String("".to_string()),
4759 lsp::MarkedString::String(" ".to_string()),
4760 lsp::MarkedString::String("\n\n\n".to_string()),
4761 ]),
4762 range: None,
4763 }))
4764 });
4765
4766 let hover_task = project.update(cx, |project, cx| {
4767 project.hover(&buffer, Point::new(0, 0), cx)
4768 });
4769 let () = request_handled
4770 .next()
4771 .await
4772 .expect("All hover requests should have been triggered");
4773 assert_eq!(
4774 Vec::<String>::new(),
4775 hover_task
4776 .await
4777 .into_iter()
4778 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4779 .sorted()
4780 .collect::<Vec<_>>(),
4781 "Empty hover parts should be ignored"
4782 );
4783}
4784
4785#[gpui::test]
4786async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4787 init_test(cx);
4788
4789 let fs = FakeFs::new(cx.executor());
4790 fs.insert_tree(
4791 "/dir",
4792 json!({
4793 "a.tsx": "a",
4794 }),
4795 )
4796 .await;
4797
4798 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4799
4800 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4801 language_registry.add(tsx_lang());
4802 let language_server_names = [
4803 "TypeScriptServer",
4804 "TailwindServer",
4805 "ESLintServer",
4806 "NoActionsCapabilitiesServer",
4807 ];
4808 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4809 "tsx",
4810 true,
4811 FakeLspAdapter {
4812 name: &language_server_names[0],
4813 capabilities: lsp::ServerCapabilities {
4814 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4815 ..lsp::ServerCapabilities::default()
4816 },
4817 ..FakeLspAdapter::default()
4818 },
4819 );
4820 let _a = language_registry.register_specific_fake_lsp_adapter(
4821 "tsx",
4822 false,
4823 FakeLspAdapter {
4824 name: &language_server_names[1],
4825 capabilities: lsp::ServerCapabilities {
4826 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4827 ..lsp::ServerCapabilities::default()
4828 },
4829 ..FakeLspAdapter::default()
4830 },
4831 );
4832 let _b = language_registry.register_specific_fake_lsp_adapter(
4833 "tsx",
4834 false,
4835 FakeLspAdapter {
4836 name: &language_server_names[2],
4837 capabilities: lsp::ServerCapabilities {
4838 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4839 ..lsp::ServerCapabilities::default()
4840 },
4841 ..FakeLspAdapter::default()
4842 },
4843 );
4844 let _c = language_registry.register_specific_fake_lsp_adapter(
4845 "tsx",
4846 false,
4847 FakeLspAdapter {
4848 name: &language_server_names[3],
4849 capabilities: lsp::ServerCapabilities {
4850 code_action_provider: None,
4851 ..lsp::ServerCapabilities::default()
4852 },
4853 ..FakeLspAdapter::default()
4854 },
4855 );
4856
4857 let buffer = project
4858 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4859 .await
4860 .unwrap();
4861 cx.executor().run_until_parked();
4862
4863 let mut servers_with_actions_requests = HashMap::default();
4864 for i in 0..language_server_names.len() {
4865 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4866 panic!(
4867 "Failed to get language server #{i} with name {}",
4868 &language_server_names[i]
4869 )
4870 });
4871 let new_server_name = new_server.server.name();
4872 assert!(
4873 !servers_with_actions_requests.contains_key(new_server_name),
4874 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4875 );
4876 let new_server_name = new_server_name.to_string();
4877 match new_server_name.as_str() {
4878 "TailwindServer" | "TypeScriptServer" => {
4879 servers_with_actions_requests.insert(
4880 new_server_name.clone(),
4881 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4882 move |_, _| {
4883 let name = new_server_name.clone();
4884 async move {
4885 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4886 lsp::CodeAction {
4887 title: format!("{name} code action"),
4888 ..lsp::CodeAction::default()
4889 },
4890 )]))
4891 }
4892 },
4893 ),
4894 );
4895 }
4896 "ESLintServer" => {
4897 servers_with_actions_requests.insert(
4898 new_server_name,
4899 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4900 |_, _| async move { Ok(None) },
4901 ),
4902 );
4903 }
4904 "NoActionsCapabilitiesServer" => {
4905 let _never_handled = new_server
4906 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4907 panic!(
4908 "Should not call for code actions server with no corresponding capabilities"
4909 )
4910 });
4911 }
4912 unexpected => panic!("Unexpected server name: {unexpected}"),
4913 }
4914 }
4915
4916 let code_actions_task = project.update(cx, |project, cx| {
4917 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4918 });
4919 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4920 |mut code_actions_request| async move {
4921 code_actions_request
4922 .next()
4923 .await
4924 .expect("All code actions requests should have been triggered")
4925 },
4926 ))
4927 .await;
4928 assert_eq!(
4929 vec!["TailwindServer code action", "TypeScriptServer code action"],
4930 code_actions_task
4931 .await
4932 .into_iter()
4933 .map(|code_action| code_action.lsp_action.title)
4934 .sorted()
4935 .collect::<Vec<_>>(),
4936 "Should receive code actions responses from all related servers with hover capabilities"
4937 );
4938}
4939
4940#[gpui::test]
4941async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
4942 init_test(cx);
4943
4944 let fs = FakeFs::new(cx.executor());
4945 fs.insert_tree(
4946 "/dir",
4947 json!({
4948 "a.rs": "let a = 1;",
4949 "b.rs": "let b = 2;",
4950 "c.rs": "let c = 2;",
4951 }),
4952 )
4953 .await;
4954
4955 let project = Project::test(
4956 fs,
4957 [
4958 "/dir/a.rs".as_ref(),
4959 "/dir/b.rs".as_ref(),
4960 "/dir/c.rs".as_ref(),
4961 ],
4962 cx,
4963 )
4964 .await;
4965
4966 // check the initial state and get the worktrees
4967 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
4968 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
4969 assert_eq!(worktrees.len(), 3);
4970
4971 let worktree_a = worktrees[0].read(cx);
4972 let worktree_b = worktrees[1].read(cx);
4973 let worktree_c = worktrees[2].read(cx);
4974
4975 // check they start in the right order
4976 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
4977 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
4978 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
4979
4980 (
4981 worktrees[0].clone(),
4982 worktrees[1].clone(),
4983 worktrees[2].clone(),
4984 )
4985 });
4986
4987 // move first worktree to after the second
4988 // [a, b, c] -> [b, a, c]
4989 project
4990 .update(cx, |project, cx| {
4991 let first = worktree_a.read(cx);
4992 let second = worktree_b.read(cx);
4993 project.move_worktree(first.id(), second.id(), cx)
4994 })
4995 .expect("moving first after second");
4996
4997 // check the state after moving
4998 project.update(cx, |project, cx| {
4999 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5000 assert_eq!(worktrees.len(), 3);
5001
5002 let first = worktrees[0].read(cx);
5003 let second = worktrees[1].read(cx);
5004 let third = worktrees[2].read(cx);
5005
5006 // check they are now in the right order
5007 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5008 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5009 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5010 });
5011
5012 // move the second worktree to before the first
5013 // [b, a, c] -> [a, b, c]
5014 project
5015 .update(cx, |project, cx| {
5016 let second = worktree_a.read(cx);
5017 let first = worktree_b.read(cx);
5018 project.move_worktree(first.id(), second.id(), cx)
5019 })
5020 .expect("moving second before first");
5021
5022 // check the state after moving
5023 project.update(cx, |project, cx| {
5024 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5025 assert_eq!(worktrees.len(), 3);
5026
5027 let first = worktrees[0].read(cx);
5028 let second = worktrees[1].read(cx);
5029 let third = worktrees[2].read(cx);
5030
5031 // check they are now in the right order
5032 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5033 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5034 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5035 });
5036
5037 // move the second worktree to after the third
5038 // [a, b, c] -> [a, c, b]
5039 project
5040 .update(cx, |project, cx| {
5041 let second = worktree_b.read(cx);
5042 let third = worktree_c.read(cx);
5043 project.move_worktree(second.id(), third.id(), cx)
5044 })
5045 .expect("moving second after third");
5046
5047 // check the state after moving
5048 project.update(cx, |project, cx| {
5049 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5050 assert_eq!(worktrees.len(), 3);
5051
5052 let first = worktrees[0].read(cx);
5053 let second = worktrees[1].read(cx);
5054 let third = worktrees[2].read(cx);
5055
5056 // check they are now in the right order
5057 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5058 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5059 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5060 });
5061
5062 // move the third worktree to before the second
5063 // [a, c, b] -> [a, b, c]
5064 project
5065 .update(cx, |project, cx| {
5066 let third = worktree_c.read(cx);
5067 let second = worktree_b.read(cx);
5068 project.move_worktree(third.id(), second.id(), cx)
5069 })
5070 .expect("moving third before second");
5071
5072 // check the state after moving
5073 project.update(cx, |project, cx| {
5074 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5075 assert_eq!(worktrees.len(), 3);
5076
5077 let first = worktrees[0].read(cx);
5078 let second = worktrees[1].read(cx);
5079 let third = worktrees[2].read(cx);
5080
5081 // check they are now in the right order
5082 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5083 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5084 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5085 });
5086
5087 // move the first worktree to after the third
5088 // [a, b, c] -> [b, c, a]
5089 project
5090 .update(cx, |project, cx| {
5091 let first = worktree_a.read(cx);
5092 let third = worktree_c.read(cx);
5093 project.move_worktree(first.id(), third.id(), cx)
5094 })
5095 .expect("moving first after third");
5096
5097 // check the state after moving
5098 project.update(cx, |project, cx| {
5099 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5100 assert_eq!(worktrees.len(), 3);
5101
5102 let first = worktrees[0].read(cx);
5103 let second = worktrees[1].read(cx);
5104 let third = worktrees[2].read(cx);
5105
5106 // check they are now in the right order
5107 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5108 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5109 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5110 });
5111
5112 // move the third worktree to before the first
5113 // [b, c, a] -> [a, b, c]
5114 project
5115 .update(cx, |project, cx| {
5116 let third = worktree_a.read(cx);
5117 let first = worktree_b.read(cx);
5118 project.move_worktree(third.id(), first.id(), cx)
5119 })
5120 .expect("moving third before first");
5121
5122 // check the state after moving
5123 project.update(cx, |project, cx| {
5124 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5125 assert_eq!(worktrees.len(), 3);
5126
5127 let first = worktrees[0].read(cx);
5128 let second = worktrees[1].read(cx);
5129 let third = worktrees[2].read(cx);
5130
5131 // check they are now in the right order
5132 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5133 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5134 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5135 });
5136}
5137
5138async fn search(
5139 project: &Model<Project>,
5140 query: SearchQuery,
5141 cx: &mut gpui::TestAppContext,
5142) -> Result<HashMap<String, Vec<Range<usize>>>> {
5143 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5144 let mut results = HashMap::default();
5145 while let Some(search_result) = search_rx.next().await {
5146 match search_result {
5147 SearchResult::Buffer { buffer, ranges } => {
5148 results.entry(buffer).or_insert(ranges);
5149 }
5150 SearchResult::LimitReached => {}
5151 }
5152 }
5153 Ok(results
5154 .into_iter()
5155 .map(|(buffer, ranges)| {
5156 buffer.update(cx, |buffer, cx| {
5157 let path = buffer
5158 .file()
5159 .unwrap()
5160 .full_path(cx)
5161 .to_string_lossy()
5162 .to_string();
5163 let ranges = ranges
5164 .into_iter()
5165 .map(|range| range.to_offset(buffer))
5166 .collect::<Vec<_>>();
5167 (path, ranges)
5168 })
5169 })
5170 .collect())
5171}
5172
5173fn init_test(cx: &mut gpui::TestAppContext) {
5174 if std::env::var("RUST_LOG").is_ok() {
5175 env_logger::try_init().ok();
5176 }
5177
5178 cx.update(|cx| {
5179 let settings_store = SettingsStore::test(cx);
5180 cx.set_global(settings_store);
5181 release_channel::init(SemanticVersion::default(), cx);
5182 language::init(cx);
5183 Project::init_settings(cx);
5184 });
5185}
5186
5187fn json_lang() -> Arc<Language> {
5188 Arc::new(Language::new(
5189 LanguageConfig {
5190 name: "JSON".into(),
5191 matcher: LanguageMatcher {
5192 path_suffixes: vec!["json".to_string()],
5193 ..Default::default()
5194 },
5195 ..Default::default()
5196 },
5197 None,
5198 ))
5199}
5200
5201fn js_lang() -> Arc<Language> {
5202 Arc::new(Language::new(
5203 LanguageConfig {
5204 name: Arc::from("JavaScript"),
5205 matcher: LanguageMatcher {
5206 path_suffixes: vec!["js".to_string()],
5207 ..Default::default()
5208 },
5209 ..Default::default()
5210 },
5211 None,
5212 ))
5213}
5214
5215fn rust_lang() -> Arc<Language> {
5216 Arc::new(Language::new(
5217 LanguageConfig {
5218 name: "Rust".into(),
5219 matcher: LanguageMatcher {
5220 path_suffixes: vec!["rs".to_string()],
5221 ..Default::default()
5222 },
5223 ..Default::default()
5224 },
5225 Some(tree_sitter_rust::language()),
5226 ))
5227}
5228
5229fn typescript_lang() -> Arc<Language> {
5230 Arc::new(Language::new(
5231 LanguageConfig {
5232 name: "TypeScript".into(),
5233 matcher: LanguageMatcher {
5234 path_suffixes: vec!["ts".to_string()],
5235 ..Default::default()
5236 },
5237 ..Default::default()
5238 },
5239 Some(tree_sitter_typescript::language_typescript()),
5240 ))
5241}
5242
5243fn tsx_lang() -> Arc<Language> {
5244 Arc::new(Language::new(
5245 LanguageConfig {
5246 name: "tsx".into(),
5247 matcher: LanguageMatcher {
5248 path_suffixes: vec!["tsx".to_string()],
5249 ..Default::default()
5250 },
5251 ..Default::default()
5252 },
5253 Some(tree_sitter_typescript::language_tsx()),
5254 ))
5255}
5256
5257fn get_all_tasks(
5258 project: &Model<Project>,
5259 worktree_id: Option<WorktreeId>,
5260 task_context: &TaskContext,
5261 cx: &mut AppContext,
5262) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5263 let resolved_tasks = project.update(cx, |project, cx| {
5264 project
5265 .task_inventory()
5266 .read(cx)
5267 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5268 });
5269
5270 cx.spawn(|_| async move {
5271 let (mut old, new) = resolved_tasks.await;
5272 old.extend(new);
5273 old
5274 })
5275}