1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use gpui::{AppContext, SemanticVersion, UpdateGlobal};
5use language::{
6 language_settings::{AllLanguageSettings, LanguageSettingsContent},
7 tree_sitter_rust, tree_sitter_typescript, Diagnostic, FakeLspAdapter, LanguageConfig,
8 LanguageMatcher, LineEnding, OffsetRangeExt, Point, ToPoint,
9};
10use lsp::NumberOrString;
11use parking_lot::Mutex;
12use pretty_assertions::assert_eq;
13use serde_json::json;
14#[cfg(not(windows))]
15use std::os;
16use std::task::Poll;
17use task::{ResolvedTask, TaskContext, TaskTemplate, TaskTemplates};
18use unindent::Unindent as _;
19use util::{assert_set_eq, paths::PathMatcher, test::temp_tree};
20
21#[gpui::test]
22async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
23 cx.executor().allow_parking();
24
25 let (tx, mut rx) = futures::channel::mpsc::unbounded();
26 let _thread = std::thread::spawn(move || {
27 std::fs::metadata("/tmp").unwrap();
28 std::thread::sleep(Duration::from_millis(1000));
29 tx.unbounded_send(1).unwrap();
30 });
31 rx.next().await.unwrap();
32}
33
34#[gpui::test]
35async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
36 cx.executor().allow_parking();
37
38 let io_task = smol::unblock(move || {
39 println!("sleeping on thread {:?}", std::thread::current().id());
40 std::thread::sleep(Duration::from_millis(10));
41 1
42 });
43
44 let task = cx.foreground_executor().spawn(async move {
45 io_task.await;
46 });
47
48 task.await;
49}
50
51#[cfg(not(windows))]
52#[gpui::test]
53async fn test_symlinks(cx: &mut gpui::TestAppContext) {
54 init_test(cx);
55 cx.executor().allow_parking();
56
57 let dir = temp_tree(json!({
58 "root": {
59 "apple": "",
60 "banana": {
61 "carrot": {
62 "date": "",
63 "endive": "",
64 }
65 },
66 "fennel": {
67 "grape": "",
68 }
69 }
70 }));
71
72 let root_link_path = dir.path().join("root_link");
73 os::unix::fs::symlink(&dir.path().join("root"), &root_link_path).unwrap();
74 os::unix::fs::symlink(
75 &dir.path().join("root/fennel"),
76 &dir.path().join("root/finnochio"),
77 )
78 .unwrap();
79
80 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
81
82 project.update(cx, |project, cx| {
83 let tree = project.worktrees().next().unwrap().read(cx);
84 assert_eq!(tree.file_count(), 5);
85 assert_eq!(
86 tree.inode_for_path("fennel/grape"),
87 tree.inode_for_path("finnochio/grape")
88 );
89 });
90}
91
92#[gpui::test]
93async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
94 init_test(cx);
95
96 let fs = FakeFs::new(cx.executor());
97 fs.insert_tree(
98 "/the-root",
99 json!({
100 ".zed": {
101 "settings.json": r#"{ "tab_size": 8 }"#,
102 "tasks.json": r#"[{
103 "label": "cargo check",
104 "command": "cargo",
105 "args": ["check", "--all"]
106 },]"#,
107 },
108 "a": {
109 "a.rs": "fn a() {\n A\n}"
110 },
111 "b": {
112 ".zed": {
113 "settings.json": r#"{ "tab_size": 2 }"#,
114 "tasks.json": r#"[{
115 "label": "cargo check",
116 "command": "cargo",
117 "args": ["check"]
118 },]"#,
119 },
120 "b.rs": "fn b() {\n B\n}"
121 }
122 }),
123 )
124 .await;
125
126 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
127 let worktree = project.update(cx, |project, _| project.worktrees().next().unwrap());
128 let task_context = TaskContext::default();
129
130 cx.executor().run_until_parked();
131 let worktree_id = cx.update(|cx| {
132 project.update(cx, |project, cx| {
133 project.worktrees().next().unwrap().read(cx).id()
134 })
135 });
136 let global_task_source_kind = TaskSourceKind::Worktree {
137 id: worktree_id,
138 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
139 id_base: "local_tasks_for_worktree".into(),
140 };
141
142 let all_tasks = cx
143 .update(|cx| {
144 let tree = worktree.read(cx);
145
146 let settings_a = language_settings(
147 None,
148 Some(
149 &(File::for_entry(
150 tree.entry_for_path("a/a.rs").unwrap().clone(),
151 worktree.clone(),
152 ) as _),
153 ),
154 cx,
155 );
156 let settings_b = language_settings(
157 None,
158 Some(
159 &(File::for_entry(
160 tree.entry_for_path("b/b.rs").unwrap().clone(),
161 worktree.clone(),
162 ) as _),
163 ),
164 cx,
165 );
166
167 assert_eq!(settings_a.tab_size.get(), 8);
168 assert_eq!(settings_b.tab_size.get(), 2);
169
170 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
171 })
172 .await
173 .into_iter()
174 .map(|(source_kind, task)| {
175 let resolved = task.resolved.unwrap();
176 (
177 source_kind,
178 task.resolved_label,
179 resolved.args,
180 resolved.env,
181 )
182 })
183 .collect::<Vec<_>>();
184 assert_eq!(
185 all_tasks,
186 vec![
187 (
188 global_task_source_kind.clone(),
189 "cargo check".to_string(),
190 vec!["check".to_string(), "--all".to_string()],
191 HashMap::default(),
192 ),
193 (
194 TaskSourceKind::Worktree {
195 id: worktree_id,
196 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
197 id_base: "local_tasks_for_worktree".into(),
198 },
199 "cargo check".to_string(),
200 vec!["check".to_string()],
201 HashMap::default(),
202 ),
203 ]
204 );
205
206 let (_, resolved_task) = cx
207 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
208 .await
209 .into_iter()
210 .find(|(source_kind, _)| source_kind == &global_task_source_kind)
211 .expect("should have one global task");
212 project.update(cx, |project, cx| {
213 project.task_inventory().update(cx, |inventory, _| {
214 inventory.task_scheduled(global_task_source_kind.clone(), resolved_task);
215 });
216 });
217
218 let tasks = serde_json::to_string(&TaskTemplates(vec![TaskTemplate {
219 label: "cargo check".to_string(),
220 command: "cargo".to_string(),
221 args: vec![
222 "check".to_string(),
223 "--all".to_string(),
224 "--all-targets".to_string(),
225 ],
226 env: HashMap::from_iter(Some((
227 "RUSTFLAGS".to_string(),
228 "-Zunstable-options".to_string(),
229 ))),
230 ..TaskTemplate::default()
231 }]))
232 .unwrap();
233 let (tx, rx) = futures::channel::mpsc::unbounded();
234 cx.update(|cx| {
235 project.update(cx, |project, cx| {
236 project.task_inventory().update(cx, |inventory, cx| {
237 inventory.remove_local_static_source(Path::new("/the-root/.zed/tasks.json"));
238 inventory.add_source(
239 global_task_source_kind.clone(),
240 |tx, cx| StaticSource::new(TrackedFile::new(rx, tx, cx)),
241 cx,
242 );
243 });
244 })
245 });
246 tx.unbounded_send(tasks).unwrap();
247
248 cx.run_until_parked();
249 let all_tasks = cx
250 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
251 .await
252 .into_iter()
253 .map(|(source_kind, task)| {
254 let resolved = task.resolved.unwrap();
255 (
256 source_kind,
257 task.resolved_label,
258 resolved.args,
259 resolved.env,
260 )
261 })
262 .collect::<Vec<_>>();
263 assert_eq!(
264 all_tasks,
265 vec![
266 (
267 TaskSourceKind::Worktree {
268 id: worktree_id,
269 abs_path: PathBuf::from("/the-root/.zed/tasks.json"),
270 id_base: "local_tasks_for_worktree".into(),
271 },
272 "cargo check".to_string(),
273 vec![
274 "check".to_string(),
275 "--all".to_string(),
276 "--all-targets".to_string()
277 ],
278 HashMap::from_iter(Some((
279 "RUSTFLAGS".to_string(),
280 "-Zunstable-options".to_string()
281 ))),
282 ),
283 (
284 TaskSourceKind::Worktree {
285 id: worktree_id,
286 abs_path: PathBuf::from("/the-root/b/.zed/tasks.json"),
287 id_base: "local_tasks_for_worktree".into(),
288 },
289 "cargo check".to_string(),
290 vec!["check".to_string()],
291 HashMap::default(),
292 ),
293 ]
294 );
295}
296
297#[gpui::test]
298async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
299 init_test(cx);
300
301 let fs = FakeFs::new(cx.executor());
302 fs.insert_tree(
303 "/the-root",
304 json!({
305 "test.rs": "const A: i32 = 1;",
306 "test2.rs": "",
307 "Cargo.toml": "a = 1",
308 "package.json": "{\"a\": 1}",
309 }),
310 )
311 .await;
312
313 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
314 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
315
316 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
317 "Rust",
318 FakeLspAdapter {
319 name: "the-rust-language-server",
320 capabilities: lsp::ServerCapabilities {
321 completion_provider: Some(lsp::CompletionOptions {
322 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
323 ..Default::default()
324 }),
325 ..Default::default()
326 },
327 ..Default::default()
328 },
329 );
330 let mut fake_json_servers = language_registry.register_fake_lsp_adapter(
331 "JSON",
332 FakeLspAdapter {
333 name: "the-json-language-server",
334 capabilities: lsp::ServerCapabilities {
335 completion_provider: Some(lsp::CompletionOptions {
336 trigger_characters: Some(vec![":".to_string()]),
337 ..Default::default()
338 }),
339 ..Default::default()
340 },
341 ..Default::default()
342 },
343 );
344
345 // Open a buffer without an associated language server.
346 let toml_buffer = project
347 .update(cx, |project, cx| {
348 project.open_local_buffer("/the-root/Cargo.toml", cx)
349 })
350 .await
351 .unwrap();
352
353 // Open a buffer with an associated language server before the language for it has been loaded.
354 let rust_buffer = project
355 .update(cx, |project, cx| {
356 project.open_local_buffer("/the-root/test.rs", cx)
357 })
358 .await
359 .unwrap();
360 rust_buffer.update(cx, |buffer, _| {
361 assert_eq!(buffer.language().map(|l| l.name()), None);
362 });
363
364 // Now we add the languages to the project, and ensure they get assigned to all
365 // the relevant open buffers.
366 language_registry.add(json_lang());
367 language_registry.add(rust_lang());
368 cx.executor().run_until_parked();
369 rust_buffer.update(cx, |buffer, _| {
370 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
371 });
372
373 // A server is started up, and it is notified about Rust files.
374 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
375 assert_eq!(
376 fake_rust_server
377 .receive_notification::<lsp::notification::DidOpenTextDocument>()
378 .await
379 .text_document,
380 lsp::TextDocumentItem {
381 uri: lsp::Uri::from_file_path("/the-root/test.rs")
382 .unwrap()
383 .into(),
384 version: 0,
385 text: "const A: i32 = 1;".to_string(),
386 language_id: "rust".to_string(),
387 }
388 );
389
390 // The buffer is configured based on the language server's capabilities.
391 rust_buffer.update(cx, |buffer, _| {
392 assert_eq!(
393 buffer.completion_triggers(),
394 &[".".to_string(), "::".to_string()]
395 );
396 });
397 toml_buffer.update(cx, |buffer, _| {
398 assert!(buffer.completion_triggers().is_empty());
399 });
400
401 // Edit a buffer. The changes are reported to the language server.
402 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
403 assert_eq!(
404 fake_rust_server
405 .receive_notification::<lsp::notification::DidChangeTextDocument>()
406 .await
407 .text_document,
408 lsp::VersionedTextDocumentIdentifier::new(
409 lsp::Uri::from_file_path("/the-root/test.rs")
410 .unwrap()
411 .into(),
412 1
413 )
414 );
415
416 // Open a third buffer with a different associated language server.
417 let json_buffer = project
418 .update(cx, |project, cx| {
419 project.open_local_buffer("/the-root/package.json", cx)
420 })
421 .await
422 .unwrap();
423
424 // A json language server is started up and is only notified about the json buffer.
425 let mut fake_json_server = fake_json_servers.next().await.unwrap();
426 assert_eq!(
427 fake_json_server
428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
429 .await
430 .text_document,
431 lsp::TextDocumentItem {
432 uri: lsp::Uri::from_file_path("/the-root/package.json")
433 .unwrap()
434 .into(),
435 version: 0,
436 text: "{\"a\": 1}".to_string(),
437 language_id: "json".to_string(),
438 }
439 );
440
441 // This buffer is configured based on the second language server's
442 // capabilities.
443 json_buffer.update(cx, |buffer, _| {
444 assert_eq!(buffer.completion_triggers(), &[":".to_string()]);
445 });
446
447 // When opening another buffer whose language server is already running,
448 // it is also configured based on the existing language server's capabilities.
449 let rust_buffer2 = project
450 .update(cx, |project, cx| {
451 project.open_local_buffer("/the-root/test2.rs", cx)
452 })
453 .await
454 .unwrap();
455 rust_buffer2.update(cx, |buffer, _| {
456 assert_eq!(
457 buffer.completion_triggers(),
458 &[".".to_string(), "::".to_string()]
459 );
460 });
461
462 // Changes are reported only to servers matching the buffer's language.
463 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
464 rust_buffer2.update(cx, |buffer, cx| {
465 buffer.edit([(0..0, "let x = 1;")], None, cx)
466 });
467 assert_eq!(
468 fake_rust_server
469 .receive_notification::<lsp::notification::DidChangeTextDocument>()
470 .await
471 .text_document,
472 lsp::VersionedTextDocumentIdentifier::new(
473 lsp::Uri::from_file_path("/the-root/test2.rs")
474 .unwrap()
475 .into(),
476 1
477 )
478 );
479
480 // Save notifications are reported to all servers.
481 project
482 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
483 .await
484 .unwrap();
485 assert_eq!(
486 fake_rust_server
487 .receive_notification::<lsp::notification::DidSaveTextDocument>()
488 .await
489 .text_document,
490 lsp::TextDocumentIdentifier::new(
491 lsp::Uri::from_file_path("/the-root/Cargo.toml")
492 .unwrap()
493 .into()
494 )
495 );
496 assert_eq!(
497 fake_json_server
498 .receive_notification::<lsp::notification::DidSaveTextDocument>()
499 .await
500 .text_document,
501 lsp::TextDocumentIdentifier::new(
502 lsp::Uri::from_file_path("/the-root/Cargo.toml")
503 .unwrap()
504 .into()
505 )
506 );
507
508 // Renames are reported only to servers matching the buffer's language.
509 fs.rename(
510 Path::new("/the-root/test2.rs"),
511 Path::new("/the-root/test3.rs"),
512 Default::default(),
513 )
514 .await
515 .unwrap();
516 assert_eq!(
517 fake_rust_server
518 .receive_notification::<lsp::notification::DidCloseTextDocument>()
519 .await
520 .text_document,
521 lsp::TextDocumentIdentifier::new(
522 lsp::Uri::from_file_path("/the-root/test2.rs")
523 .unwrap()
524 .into()
525 ),
526 );
527 assert_eq!(
528 fake_rust_server
529 .receive_notification::<lsp::notification::DidOpenTextDocument>()
530 .await
531 .text_document,
532 lsp::TextDocumentItem {
533 uri: lsp::Uri::from_file_path("/the-root/test3.rs")
534 .unwrap()
535 .into(),
536 version: 0,
537 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
538 language_id: "rust".to_string(),
539 },
540 );
541
542 rust_buffer2.update(cx, |buffer, cx| {
543 buffer.update_diagnostics(
544 LanguageServerId(0),
545 DiagnosticSet::from_sorted_entries(
546 vec![DiagnosticEntry {
547 diagnostic: Default::default(),
548 range: Anchor::MIN..Anchor::MAX,
549 }],
550 &buffer.snapshot(),
551 ),
552 cx,
553 );
554 assert_eq!(
555 buffer
556 .snapshot()
557 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
558 .count(),
559 1
560 );
561 });
562
563 // When the rename changes the extension of the file, the buffer gets closed on the old
564 // language server and gets opened on the new one.
565 fs.rename(
566 Path::new("/the-root/test3.rs"),
567 Path::new("/the-root/test3.json"),
568 Default::default(),
569 )
570 .await
571 .unwrap();
572 assert_eq!(
573 fake_rust_server
574 .receive_notification::<lsp::notification::DidCloseTextDocument>()
575 .await
576 .text_document,
577 lsp::TextDocumentIdentifier::new(
578 lsp::Uri::from_file_path("/the-root/test3.rs")
579 .unwrap()
580 .into(),
581 ),
582 );
583 assert_eq!(
584 fake_json_server
585 .receive_notification::<lsp::notification::DidOpenTextDocument>()
586 .await
587 .text_document,
588 lsp::TextDocumentItem {
589 uri: lsp::Uri::from_file_path("/the-root/test3.json")
590 .unwrap()
591 .into(),
592 version: 0,
593 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
594 language_id: "json".to_string(),
595 },
596 );
597
598 // We clear the diagnostics, since the language has changed.
599 rust_buffer2.update(cx, |buffer, _| {
600 assert_eq!(
601 buffer
602 .snapshot()
603 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
604 .count(),
605 0
606 );
607 });
608
609 // The renamed file's version resets after changing language server.
610 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
611 assert_eq!(
612 fake_json_server
613 .receive_notification::<lsp::notification::DidChangeTextDocument>()
614 .await
615 .text_document,
616 lsp::VersionedTextDocumentIdentifier::new(
617 lsp::Uri::from_file_path("/the-root/test3.json")
618 .unwrap()
619 .into(),
620 1
621 )
622 );
623
624 // Restart language servers
625 project.update(cx, |project, cx| {
626 project.restart_language_servers_for_buffers(
627 vec![rust_buffer.clone(), json_buffer.clone()],
628 cx,
629 );
630 });
631
632 let mut rust_shutdown_requests = fake_rust_server
633 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
634 let mut json_shutdown_requests = fake_json_server
635 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
636 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
637
638 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
639 let mut fake_json_server = fake_json_servers.next().await.unwrap();
640
641 // Ensure rust document is reopened in new rust language server
642 assert_eq!(
643 fake_rust_server
644 .receive_notification::<lsp::notification::DidOpenTextDocument>()
645 .await
646 .text_document,
647 lsp::TextDocumentItem {
648 uri: lsp::Uri::from_file_path("/the-root/test.rs")
649 .unwrap()
650 .into(),
651 version: 0,
652 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
653 language_id: "rust".to_string(),
654 }
655 );
656
657 // Ensure json documents are reopened in new json language server
658 assert_set_eq!(
659 [
660 fake_json_server
661 .receive_notification::<lsp::notification::DidOpenTextDocument>()
662 .await
663 .text_document,
664 fake_json_server
665 .receive_notification::<lsp::notification::DidOpenTextDocument>()
666 .await
667 .text_document,
668 ],
669 [
670 lsp::TextDocumentItem {
671 uri: lsp::Uri::from_file_path("/the-root/package.json")
672 .unwrap()
673 .into(),
674 version: 0,
675 text: json_buffer.update(cx, |buffer, _| buffer.text()),
676 language_id: "json".to_string(),
677 },
678 lsp::TextDocumentItem {
679 uri: lsp::Uri::from_file_path("/the-root/test3.json")
680 .unwrap()
681 .into(),
682 version: 0,
683 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
684 language_id: "json".to_string(),
685 }
686 ]
687 );
688
689 // Close notifications are reported only to servers matching the buffer's language.
690 cx.update(|_| drop(json_buffer));
691 let close_message = lsp::DidCloseTextDocumentParams {
692 text_document: lsp::TextDocumentIdentifier::new(
693 lsp::Uri::from_file_path("/the-root/package.json")
694 .unwrap()
695 .into(),
696 ),
697 };
698 assert_eq!(
699 fake_json_server
700 .receive_notification::<lsp::notification::DidCloseTextDocument>()
701 .await,
702 close_message,
703 );
704}
705
706#[gpui::test]
707async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
708 init_test(cx);
709
710 let fs = FakeFs::new(cx.executor());
711 fs.insert_tree(
712 "/the-root",
713 json!({
714 ".gitignore": "target\n",
715 "src": {
716 "a.rs": "",
717 "b.rs": "",
718 },
719 "target": {
720 "x": {
721 "out": {
722 "x.rs": ""
723 }
724 },
725 "y": {
726 "out": {
727 "y.rs": "",
728 }
729 },
730 "z": {
731 "out": {
732 "z.rs": ""
733 }
734 }
735 }
736 }),
737 )
738 .await;
739
740 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
741 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
742 language_registry.add(rust_lang());
743 let mut fake_servers = language_registry.register_fake_lsp_adapter(
744 "Rust",
745 FakeLspAdapter {
746 name: "the-language-server",
747 ..Default::default()
748 },
749 );
750
751 cx.executor().run_until_parked();
752
753 // Start the language server by opening a buffer with a compatible file extension.
754 let _buffer = project
755 .update(cx, |project, cx| {
756 project.open_local_buffer("/the-root/src/a.rs", cx)
757 })
758 .await
759 .unwrap();
760
761 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
762 project.update(cx, |project, cx| {
763 let worktree = project.worktrees().next().unwrap();
764 assert_eq!(
765 worktree
766 .read(cx)
767 .snapshot()
768 .entries(true, 0)
769 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
770 .collect::<Vec<_>>(),
771 &[
772 (Path::new(""), false),
773 (Path::new(".gitignore"), false),
774 (Path::new("src"), false),
775 (Path::new("src/a.rs"), false),
776 (Path::new("src/b.rs"), false),
777 (Path::new("target"), true),
778 ]
779 );
780 });
781
782 let prev_read_dir_count = fs.read_dir_call_count();
783
784 // Keep track of the FS events reported to the language server.
785 let fake_server = fake_servers.next().await.unwrap();
786 let file_changes = Arc::new(Mutex::new(Vec::new()));
787 fake_server
788 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
789 registrations: vec![lsp::Registration {
790 id: Default::default(),
791 method: "workspace/didChangeWatchedFiles".to_string(),
792 register_options: serde_json::to_value(
793 lsp::DidChangeWatchedFilesRegistrationOptions {
794 watchers: vec![
795 lsp::FileSystemWatcher {
796 glob_pattern: lsp::GlobPattern::String(
797 "/the-root/Cargo.toml".to_string(),
798 ),
799 kind: None,
800 },
801 lsp::FileSystemWatcher {
802 glob_pattern: lsp::GlobPattern::String(
803 "/the-root/src/*.{rs,c}".to_string(),
804 ),
805 kind: None,
806 },
807 lsp::FileSystemWatcher {
808 glob_pattern: lsp::GlobPattern::String(
809 "/the-root/target/y/**/*.rs".to_string(),
810 ),
811 kind: None,
812 },
813 ],
814 },
815 )
816 .ok(),
817 }],
818 })
819 .await
820 .unwrap();
821 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
822 let file_changes = file_changes.clone();
823 move |params, _| {
824 let mut file_changes = file_changes.lock();
825 file_changes.extend(params.changes);
826 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
827 }
828 });
829
830 cx.executor().run_until_parked();
831 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
832 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
833
834 // Now the language server has asked us to watch an ignored directory path,
835 // so we recursively load it.
836 project.update(cx, |project, cx| {
837 let worktree = project.worktrees().next().unwrap();
838 assert_eq!(
839 worktree
840 .read(cx)
841 .snapshot()
842 .entries(true, 0)
843 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
844 .collect::<Vec<_>>(),
845 &[
846 (Path::new(""), false),
847 (Path::new(".gitignore"), false),
848 (Path::new("src"), false),
849 (Path::new("src/a.rs"), false),
850 (Path::new("src/b.rs"), false),
851 (Path::new("target"), true),
852 (Path::new("target/x"), true),
853 (Path::new("target/y"), true),
854 (Path::new("target/y/out"), true),
855 (Path::new("target/y/out/y.rs"), true),
856 (Path::new("target/z"), true),
857 ]
858 );
859 });
860
861 // Perform some file system mutations, two of which match the watched patterns,
862 // and one of which does not.
863 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
864 .await
865 .unwrap();
866 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
867 .await
868 .unwrap();
869 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
870 .await
871 .unwrap();
872 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
873 .await
874 .unwrap();
875 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
876 .await
877 .unwrap();
878
879 // The language server receives events for the FS mutations that match its watch patterns.
880 cx.executor().run_until_parked();
881 assert_eq!(
882 &*file_changes.lock(),
883 &[
884 lsp::FileEvent {
885 uri: lsp::Uri::from_file_path("/the-root/src/b.rs")
886 .unwrap()
887 .into(),
888 typ: lsp::FileChangeType::DELETED,
889 },
890 lsp::FileEvent {
891 uri: lsp::Uri::from_file_path("/the-root/src/c.rs")
892 .unwrap()
893 .into(),
894 typ: lsp::FileChangeType::CREATED,
895 },
896 lsp::FileEvent {
897 uri: lsp::Uri::from_file_path("/the-root/target/y/out/y2.rs")
898 .unwrap()
899 .into(),
900 typ: lsp::FileChangeType::CREATED,
901 },
902 ]
903 );
904}
905
906#[gpui::test]
907async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
908 init_test(cx);
909
910 let fs = FakeFs::new(cx.executor());
911 fs.insert_tree(
912 "/dir",
913 json!({
914 "a.rs": "let a = 1;",
915 "b.rs": "let b = 2;"
916 }),
917 )
918 .await;
919
920 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
921
922 let buffer_a = project
923 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
924 .await
925 .unwrap();
926 let buffer_b = project
927 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
928 .await
929 .unwrap();
930
931 project.update(cx, |project, cx| {
932 project
933 .update_diagnostics(
934 LanguageServerId(0),
935 lsp::PublishDiagnosticsParams {
936 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
937 version: None,
938 diagnostics: vec![lsp::Diagnostic {
939 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
940 severity: Some(lsp::DiagnosticSeverity::ERROR),
941 message: "error 1".to_string(),
942 ..Default::default()
943 }],
944 },
945 &[],
946 cx,
947 )
948 .unwrap();
949 project
950 .update_diagnostics(
951 LanguageServerId(0),
952 lsp::PublishDiagnosticsParams {
953 uri: Uri::from_file_path("/dir/b.rs").unwrap().into(),
954 version: None,
955 diagnostics: vec![lsp::Diagnostic {
956 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
957 severity: Some(lsp::DiagnosticSeverity::WARNING),
958 message: "error 2".to_string(),
959 ..Default::default()
960 }],
961 },
962 &[],
963 cx,
964 )
965 .unwrap();
966 });
967
968 buffer_a.update(cx, |buffer, _| {
969 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
970 assert_eq!(
971 chunks
972 .iter()
973 .map(|(s, d)| (s.as_str(), *d))
974 .collect::<Vec<_>>(),
975 &[
976 ("let ", None),
977 ("a", Some(DiagnosticSeverity::ERROR)),
978 (" = 1;", None),
979 ]
980 );
981 });
982 buffer_b.update(cx, |buffer, _| {
983 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
984 assert_eq!(
985 chunks
986 .iter()
987 .map(|(s, d)| (s.as_str(), *d))
988 .collect::<Vec<_>>(),
989 &[
990 ("let ", None),
991 ("b", Some(DiagnosticSeverity::WARNING)),
992 (" = 2;", None),
993 ]
994 );
995 });
996}
997
998#[gpui::test]
999async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1000 init_test(cx);
1001
1002 let fs = FakeFs::new(cx.executor());
1003 fs.insert_tree(
1004 "/root",
1005 json!({
1006 "dir": {
1007 ".git": {
1008 "HEAD": "ref: refs/heads/main",
1009 },
1010 ".gitignore": "b.rs",
1011 "a.rs": "let a = 1;",
1012 "b.rs": "let b = 2;",
1013 },
1014 "other.rs": "let b = c;"
1015 }),
1016 )
1017 .await;
1018
1019 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1020 let (worktree, _) = project
1021 .update(cx, |project, cx| {
1022 project.find_or_create_local_worktree("/root/dir", true, cx)
1023 })
1024 .await
1025 .unwrap();
1026 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1027
1028 let (worktree, _) = project
1029 .update(cx, |project, cx| {
1030 project.find_or_create_local_worktree("/root/other.rs", false, cx)
1031 })
1032 .await
1033 .unwrap();
1034 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1035
1036 let server_id = LanguageServerId(0);
1037 project.update(cx, |project, cx| {
1038 project
1039 .update_diagnostics(
1040 server_id,
1041 lsp::PublishDiagnosticsParams {
1042 uri: Uri::from_file_path("/root/dir/b.rs").unwrap().into(),
1043 version: None,
1044 diagnostics: vec![lsp::Diagnostic {
1045 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1046 severity: Some(lsp::DiagnosticSeverity::ERROR),
1047 message: "unused variable 'b'".to_string(),
1048 ..Default::default()
1049 }],
1050 },
1051 &[],
1052 cx,
1053 )
1054 .unwrap();
1055 project
1056 .update_diagnostics(
1057 server_id,
1058 lsp::PublishDiagnosticsParams {
1059 uri: Uri::from_file_path("/root/other.rs").unwrap().into(),
1060 version: None,
1061 diagnostics: vec![lsp::Diagnostic {
1062 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1063 severity: Some(lsp::DiagnosticSeverity::ERROR),
1064 message: "unknown variable 'c'".to_string(),
1065 ..Default::default()
1066 }],
1067 },
1068 &[],
1069 cx,
1070 )
1071 .unwrap();
1072 });
1073
1074 let main_ignored_buffer = project
1075 .update(cx, |project, cx| {
1076 project.open_buffer((main_worktree_id, "b.rs"), cx)
1077 })
1078 .await
1079 .unwrap();
1080 main_ignored_buffer.update(cx, |buffer, _| {
1081 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1082 assert_eq!(
1083 chunks
1084 .iter()
1085 .map(|(s, d)| (s.as_str(), *d))
1086 .collect::<Vec<_>>(),
1087 &[
1088 ("let ", None),
1089 ("b", Some(DiagnosticSeverity::ERROR)),
1090 (" = 2;", None),
1091 ],
1092 "Gigitnored buffers should still get in-buffer diagnostics",
1093 );
1094 });
1095 let other_buffer = project
1096 .update(cx, |project, cx| {
1097 project.open_buffer((other_worktree_id, ""), cx)
1098 })
1099 .await
1100 .unwrap();
1101 other_buffer.update(cx, |buffer, _| {
1102 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1103 assert_eq!(
1104 chunks
1105 .iter()
1106 .map(|(s, d)| (s.as_str(), *d))
1107 .collect::<Vec<_>>(),
1108 &[
1109 ("let b = ", None),
1110 ("c", Some(DiagnosticSeverity::ERROR)),
1111 (";", None),
1112 ],
1113 "Buffers from hidden projects should still get in-buffer diagnostics"
1114 );
1115 });
1116
1117 project.update(cx, |project, cx| {
1118 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1119 assert_eq!(
1120 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1121 vec![(
1122 ProjectPath {
1123 worktree_id: main_worktree_id,
1124 path: Arc::from(Path::new("b.rs")),
1125 },
1126 server_id,
1127 DiagnosticSummary {
1128 error_count: 1,
1129 warning_count: 0,
1130 }
1131 )]
1132 );
1133 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1134 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1135 });
1136}
1137
1138#[gpui::test]
1139async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1140 init_test(cx);
1141
1142 let progress_token = "the-progress-token";
1143
1144 let fs = FakeFs::new(cx.executor());
1145 fs.insert_tree(
1146 "/dir",
1147 json!({
1148 "a.rs": "fn a() { A }",
1149 "b.rs": "const y: i32 = 1",
1150 }),
1151 )
1152 .await;
1153
1154 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1155 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1156
1157 language_registry.add(rust_lang());
1158 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1159 "Rust",
1160 FakeLspAdapter {
1161 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1162 disk_based_diagnostics_sources: vec!["disk".into()],
1163 ..Default::default()
1164 },
1165 );
1166
1167 let worktree_id = project.update(cx, |p, cx| p.worktrees().next().unwrap().read(cx).id());
1168
1169 // Cause worktree to start the fake language server
1170 let _buffer = project
1171 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
1172 .await
1173 .unwrap();
1174
1175 let mut events = cx.events(&project);
1176
1177 let fake_server = fake_servers.next().await.unwrap();
1178 assert_eq!(
1179 events.next().await.unwrap(),
1180 Event::LanguageServerAdded(LanguageServerId(0)),
1181 );
1182
1183 fake_server
1184 .start_progress(format!("{}/0", progress_token))
1185 .await;
1186 assert_eq!(
1187 events.next().await.unwrap(),
1188 Event::DiskBasedDiagnosticsStarted {
1189 language_server_id: LanguageServerId(0),
1190 }
1191 );
1192
1193 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1194 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1195 version: None,
1196 diagnostics: vec![lsp::Diagnostic {
1197 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1198 severity: Some(lsp::DiagnosticSeverity::ERROR),
1199 message: "undefined variable 'A'".to_string(),
1200 ..Default::default()
1201 }],
1202 });
1203 assert_eq!(
1204 events.next().await.unwrap(),
1205 Event::DiagnosticsUpdated {
1206 language_server_id: LanguageServerId(0),
1207 path: (worktree_id, Path::new("a.rs")).into()
1208 }
1209 );
1210
1211 fake_server.end_progress(format!("{}/0", progress_token));
1212 assert_eq!(
1213 events.next().await.unwrap(),
1214 Event::DiskBasedDiagnosticsFinished {
1215 language_server_id: LanguageServerId(0)
1216 }
1217 );
1218
1219 let buffer = project
1220 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1221 .await
1222 .unwrap();
1223
1224 buffer.update(cx, |buffer, _| {
1225 let snapshot = buffer.snapshot();
1226 let diagnostics = snapshot
1227 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1228 .collect::<Vec<_>>();
1229 assert_eq!(
1230 diagnostics,
1231 &[DiagnosticEntry {
1232 range: Point::new(0, 9)..Point::new(0, 10),
1233 diagnostic: Diagnostic {
1234 severity: lsp::DiagnosticSeverity::ERROR,
1235 message: "undefined variable 'A'".to_string(),
1236 group_id: 0,
1237 is_primary: true,
1238 ..Default::default()
1239 }
1240 }]
1241 )
1242 });
1243
1244 // Ensure publishing empty diagnostics twice only results in one update event.
1245 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1246 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1247 version: None,
1248 diagnostics: Default::default(),
1249 });
1250 assert_eq!(
1251 events.next().await.unwrap(),
1252 Event::DiagnosticsUpdated {
1253 language_server_id: LanguageServerId(0),
1254 path: (worktree_id, Path::new("a.rs")).into()
1255 }
1256 );
1257
1258 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1259 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1260 version: None,
1261 diagnostics: Default::default(),
1262 });
1263 cx.executor().run_until_parked();
1264 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1265}
1266
1267#[gpui::test]
1268async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1269 init_test(cx);
1270
1271 let progress_token = "the-progress-token";
1272
1273 let fs = FakeFs::new(cx.executor());
1274 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1275
1276 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1277
1278 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1279 language_registry.add(rust_lang());
1280 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1281 "Rust",
1282 FakeLspAdapter {
1283 name: "the-language-server",
1284 disk_based_diagnostics_sources: vec!["disk".into()],
1285 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1286 ..Default::default()
1287 },
1288 );
1289
1290 let buffer = project
1291 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1292 .await
1293 .unwrap();
1294
1295 // Simulate diagnostics starting to update.
1296 let fake_server = fake_servers.next().await.unwrap();
1297 fake_server.start_progress(progress_token).await;
1298
1299 // Restart the server before the diagnostics finish updating.
1300 project.update(cx, |project, cx| {
1301 project.restart_language_servers_for_buffers([buffer], cx);
1302 });
1303 let mut events = cx.events(&project);
1304
1305 // Simulate the newly started server sending more diagnostics.
1306 let fake_server = fake_servers.next().await.unwrap();
1307 assert_eq!(
1308 events.next().await.unwrap(),
1309 Event::LanguageServerAdded(LanguageServerId(1))
1310 );
1311 fake_server.start_progress(progress_token).await;
1312 assert_eq!(
1313 events.next().await.unwrap(),
1314 Event::DiskBasedDiagnosticsStarted {
1315 language_server_id: LanguageServerId(1)
1316 }
1317 );
1318 project.update(cx, |project, _| {
1319 assert_eq!(
1320 project
1321 .language_servers_running_disk_based_diagnostics()
1322 .collect::<Vec<_>>(),
1323 [LanguageServerId(1)]
1324 );
1325 });
1326
1327 // All diagnostics are considered done, despite the old server's diagnostic
1328 // task never completing.
1329 fake_server.end_progress(progress_token);
1330 assert_eq!(
1331 events.next().await.unwrap(),
1332 Event::DiskBasedDiagnosticsFinished {
1333 language_server_id: LanguageServerId(1)
1334 }
1335 );
1336 project.update(cx, |project, _| {
1337 assert_eq!(
1338 project
1339 .language_servers_running_disk_based_diagnostics()
1340 .collect::<Vec<_>>(),
1341 [] as [language::LanguageServerId; 0]
1342 );
1343 });
1344}
1345
1346#[gpui::test]
1347async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1348 init_test(cx);
1349
1350 let fs = FakeFs::new(cx.executor());
1351 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1352
1353 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1354
1355 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1356 language_registry.add(rust_lang());
1357 let mut fake_servers =
1358 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1359
1360 let buffer = project
1361 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1362 .await
1363 .unwrap();
1364
1365 // Publish diagnostics
1366 let fake_server = fake_servers.next().await.unwrap();
1367 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1368 uri: Uri::from_file_path("/dir/a.rs").unwrap().into(),
1369 version: None,
1370 diagnostics: vec![lsp::Diagnostic {
1371 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1372 severity: Some(lsp::DiagnosticSeverity::ERROR),
1373 message: "the message".to_string(),
1374 ..Default::default()
1375 }],
1376 });
1377
1378 cx.executor().run_until_parked();
1379 buffer.update(cx, |buffer, _| {
1380 assert_eq!(
1381 buffer
1382 .snapshot()
1383 .diagnostics_in_range::<_, usize>(0..1, false)
1384 .map(|entry| entry.diagnostic.message.clone())
1385 .collect::<Vec<_>>(),
1386 ["the message".to_string()]
1387 );
1388 });
1389 project.update(cx, |project, cx| {
1390 assert_eq!(
1391 project.diagnostic_summary(false, cx),
1392 DiagnosticSummary {
1393 error_count: 1,
1394 warning_count: 0,
1395 }
1396 );
1397 });
1398
1399 project.update(cx, |project, cx| {
1400 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1401 });
1402
1403 // The diagnostics are cleared.
1404 cx.executor().run_until_parked();
1405 buffer.update(cx, |buffer, _| {
1406 assert_eq!(
1407 buffer
1408 .snapshot()
1409 .diagnostics_in_range::<_, usize>(0..1, false)
1410 .map(|entry| entry.diagnostic.message.clone())
1411 .collect::<Vec<_>>(),
1412 Vec::<String>::new(),
1413 );
1414 });
1415 project.update(cx, |project, cx| {
1416 assert_eq!(
1417 project.diagnostic_summary(false, cx),
1418 DiagnosticSummary {
1419 error_count: 0,
1420 warning_count: 0,
1421 }
1422 );
1423 });
1424}
1425
1426#[gpui::test]
1427async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1428 init_test(cx);
1429
1430 let fs = FakeFs::new(cx.executor());
1431 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1432
1433 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1434 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1435
1436 language_registry.add(rust_lang());
1437 let mut fake_servers =
1438 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
1439
1440 let buffer = project
1441 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1442 .await
1443 .unwrap();
1444
1445 // Before restarting the server, report diagnostics with an unknown buffer version.
1446 let fake_server = fake_servers.next().await.unwrap();
1447 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1448 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1449 version: Some(10000),
1450 diagnostics: Vec::new(),
1451 });
1452 cx.executor().run_until_parked();
1453
1454 project.update(cx, |project, cx| {
1455 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1456 });
1457 let mut fake_server = fake_servers.next().await.unwrap();
1458 let notification = fake_server
1459 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1460 .await
1461 .text_document;
1462 assert_eq!(notification.version, 0);
1463}
1464
1465#[gpui::test]
1466async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1467 init_test(cx);
1468
1469 let progress_token = "the-progress-token";
1470
1471 let fs = FakeFs::new(cx.executor());
1472 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1473
1474 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1475
1476 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1477 language_registry.add(rust_lang());
1478 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1479 "Rust",
1480 FakeLspAdapter {
1481 name: "the-language-server",
1482 disk_based_diagnostics_sources: vec!["disk".into()],
1483 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1484 ..Default::default()
1485 },
1486 );
1487
1488 let buffer = project
1489 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1490 .await
1491 .unwrap();
1492
1493 // Simulate diagnostics starting to update.
1494 let mut fake_server = fake_servers.next().await.unwrap();
1495 fake_server
1496 .start_progress_with(
1497 "another-token",
1498 lsp::WorkDoneProgressBegin {
1499 cancellable: Some(false),
1500 ..Default::default()
1501 },
1502 )
1503 .await;
1504 fake_server
1505 .start_progress_with(
1506 progress_token,
1507 lsp::WorkDoneProgressBegin {
1508 cancellable: Some(true),
1509 ..Default::default()
1510 },
1511 )
1512 .await;
1513 cx.executor().run_until_parked();
1514
1515 project.update(cx, |project, cx| {
1516 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1517 });
1518
1519 let cancel_notification = fake_server
1520 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1521 .await;
1522 assert_eq!(
1523 cancel_notification.token,
1524 NumberOrString::String(progress_token.into())
1525 );
1526}
1527
1528#[gpui::test]
1529async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1530 init_test(cx);
1531
1532 let fs = FakeFs::new(cx.executor());
1533 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1534 .await;
1535
1536 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1537 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1538
1539 let mut fake_rust_servers = language_registry.register_fake_lsp_adapter(
1540 "Rust",
1541 FakeLspAdapter {
1542 name: "rust-lsp",
1543 ..Default::default()
1544 },
1545 );
1546 let mut fake_js_servers = language_registry.register_fake_lsp_adapter(
1547 "JavaScript",
1548 FakeLspAdapter {
1549 name: "js-lsp",
1550 ..Default::default()
1551 },
1552 );
1553 language_registry.add(rust_lang());
1554 language_registry.add(js_lang());
1555
1556 let _rs_buffer = project
1557 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1558 .await
1559 .unwrap();
1560 let _js_buffer = project
1561 .update(cx, |project, cx| project.open_local_buffer("/dir/b.js", cx))
1562 .await
1563 .unwrap();
1564
1565 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1566 assert_eq!(
1567 fake_rust_server_1
1568 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1569 .await
1570 .text_document
1571 .uri
1572 .as_str(),
1573 "file:///dir/a.rs"
1574 );
1575
1576 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1577 assert_eq!(
1578 fake_js_server
1579 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1580 .await
1581 .text_document
1582 .uri
1583 .as_str(),
1584 "file:///dir/b.js"
1585 );
1586
1587 // Disable Rust language server, ensuring only that server gets stopped.
1588 cx.update(|cx| {
1589 SettingsStore::update_global(cx, |settings, cx| {
1590 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1591 settings.languages.insert(
1592 Arc::from("Rust"),
1593 LanguageSettingsContent {
1594 enable_language_server: Some(false),
1595 ..Default::default()
1596 },
1597 );
1598 });
1599 })
1600 });
1601 fake_rust_server_1
1602 .receive_notification::<lsp::notification::Exit>()
1603 .await;
1604
1605 // Enable Rust and disable JavaScript language servers, ensuring that the
1606 // former gets started again and that the latter stops.
1607 cx.update(|cx| {
1608 SettingsStore::update_global(cx, |settings, cx| {
1609 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1610 settings.languages.insert(
1611 Arc::from("Rust"),
1612 LanguageSettingsContent {
1613 enable_language_server: Some(true),
1614 ..Default::default()
1615 },
1616 );
1617 settings.languages.insert(
1618 Arc::from("JavaScript"),
1619 LanguageSettingsContent {
1620 enable_language_server: Some(false),
1621 ..Default::default()
1622 },
1623 );
1624 });
1625 })
1626 });
1627 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1628 assert_eq!(
1629 fake_rust_server_2
1630 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1631 .await
1632 .text_document
1633 .uri
1634 .as_str(),
1635 "file:///dir/a.rs"
1636 );
1637 fake_js_server
1638 .receive_notification::<lsp::notification::Exit>()
1639 .await;
1640}
1641
1642#[gpui::test(iterations = 3)]
1643async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1644 init_test(cx);
1645
1646 let text = "
1647 fn a() { A }
1648 fn b() { BB }
1649 fn c() { CCC }
1650 "
1651 .unindent();
1652
1653 let fs = FakeFs::new(cx.executor());
1654 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1655
1656 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1657 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1658
1659 language_registry.add(rust_lang());
1660 let mut fake_servers = language_registry.register_fake_lsp_adapter(
1661 "Rust",
1662 FakeLspAdapter {
1663 disk_based_diagnostics_sources: vec!["disk".into()],
1664 ..Default::default()
1665 },
1666 );
1667
1668 let buffer = project
1669 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1670 .await
1671 .unwrap();
1672
1673 let mut fake_server = fake_servers.next().await.unwrap();
1674 let open_notification = fake_server
1675 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1676 .await;
1677
1678 // Edit the buffer, moving the content down
1679 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1680 let change_notification_1 = fake_server
1681 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1682 .await;
1683 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1684
1685 // Report some diagnostics for the initial version of the buffer
1686 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1687 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1688 version: Some(open_notification.text_document.version),
1689 diagnostics: vec![
1690 lsp::Diagnostic {
1691 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1692 severity: Some(DiagnosticSeverity::ERROR),
1693 message: "undefined variable 'A'".to_string(),
1694 source: Some("disk".to_string()),
1695 ..Default::default()
1696 },
1697 lsp::Diagnostic {
1698 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1699 severity: Some(DiagnosticSeverity::ERROR),
1700 message: "undefined variable 'BB'".to_string(),
1701 source: Some("disk".to_string()),
1702 ..Default::default()
1703 },
1704 lsp::Diagnostic {
1705 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1706 severity: Some(DiagnosticSeverity::ERROR),
1707 source: Some("disk".to_string()),
1708 message: "undefined variable 'CCC'".to_string(),
1709 ..Default::default()
1710 },
1711 ],
1712 });
1713
1714 // The diagnostics have moved down since they were created.
1715 cx.executor().run_until_parked();
1716 buffer.update(cx, |buffer, _| {
1717 assert_eq!(
1718 buffer
1719 .snapshot()
1720 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1721 .collect::<Vec<_>>(),
1722 &[
1723 DiagnosticEntry {
1724 range: Point::new(3, 9)..Point::new(3, 11),
1725 diagnostic: Diagnostic {
1726 source: Some("disk".into()),
1727 severity: DiagnosticSeverity::ERROR,
1728 message: "undefined variable 'BB'".to_string(),
1729 is_disk_based: true,
1730 group_id: 1,
1731 is_primary: true,
1732 ..Default::default()
1733 },
1734 },
1735 DiagnosticEntry {
1736 range: Point::new(4, 9)..Point::new(4, 12),
1737 diagnostic: Diagnostic {
1738 source: Some("disk".into()),
1739 severity: DiagnosticSeverity::ERROR,
1740 message: "undefined variable 'CCC'".to_string(),
1741 is_disk_based: true,
1742 group_id: 2,
1743 is_primary: true,
1744 ..Default::default()
1745 }
1746 }
1747 ]
1748 );
1749 assert_eq!(
1750 chunks_with_diagnostics(buffer, 0..buffer.len()),
1751 [
1752 ("\n\nfn a() { ".to_string(), None),
1753 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1754 (" }\nfn b() { ".to_string(), None),
1755 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1756 (" }\nfn c() { ".to_string(), None),
1757 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1758 (" }\n".to_string(), None),
1759 ]
1760 );
1761 assert_eq!(
1762 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1763 [
1764 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1765 (" }\nfn c() { ".to_string(), None),
1766 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1767 ]
1768 );
1769 });
1770
1771 // Ensure overlapping diagnostics are highlighted correctly.
1772 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1773 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1774 version: Some(open_notification.text_document.version),
1775 diagnostics: vec![
1776 lsp::Diagnostic {
1777 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1778 severity: Some(DiagnosticSeverity::ERROR),
1779 message: "undefined variable 'A'".to_string(),
1780 source: Some("disk".to_string()),
1781 ..Default::default()
1782 },
1783 lsp::Diagnostic {
1784 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1785 severity: Some(DiagnosticSeverity::WARNING),
1786 message: "unreachable statement".to_string(),
1787 source: Some("disk".to_string()),
1788 ..Default::default()
1789 },
1790 ],
1791 });
1792
1793 cx.executor().run_until_parked();
1794 buffer.update(cx, |buffer, _| {
1795 assert_eq!(
1796 buffer
1797 .snapshot()
1798 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1799 .collect::<Vec<_>>(),
1800 &[
1801 DiagnosticEntry {
1802 range: Point::new(2, 9)..Point::new(2, 12),
1803 diagnostic: Diagnostic {
1804 source: Some("disk".into()),
1805 severity: DiagnosticSeverity::WARNING,
1806 message: "unreachable statement".to_string(),
1807 is_disk_based: true,
1808 group_id: 4,
1809 is_primary: true,
1810 ..Default::default()
1811 }
1812 },
1813 DiagnosticEntry {
1814 range: Point::new(2, 9)..Point::new(2, 10),
1815 diagnostic: Diagnostic {
1816 source: Some("disk".into()),
1817 severity: DiagnosticSeverity::ERROR,
1818 message: "undefined variable 'A'".to_string(),
1819 is_disk_based: true,
1820 group_id: 3,
1821 is_primary: true,
1822 ..Default::default()
1823 },
1824 }
1825 ]
1826 );
1827 assert_eq!(
1828 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1829 [
1830 ("fn a() { ".to_string(), None),
1831 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1832 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1833 ("\n".to_string(), None),
1834 ]
1835 );
1836 assert_eq!(
1837 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1838 [
1839 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1840 ("\n".to_string(), None),
1841 ]
1842 );
1843 });
1844
1845 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1846 // changes since the last save.
1847 buffer.update(cx, |buffer, cx| {
1848 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1849 buffer.edit(
1850 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1851 None,
1852 cx,
1853 );
1854 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1855 });
1856 let change_notification_2 = fake_server
1857 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1858 .await;
1859 assert!(
1860 change_notification_2.text_document.version > change_notification_1.text_document.version
1861 );
1862
1863 // Handle out-of-order diagnostics
1864 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1865 uri: lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
1866 version: Some(change_notification_2.text_document.version),
1867 diagnostics: vec![
1868 lsp::Diagnostic {
1869 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1870 severity: Some(DiagnosticSeverity::ERROR),
1871 message: "undefined variable 'BB'".to_string(),
1872 source: Some("disk".to_string()),
1873 ..Default::default()
1874 },
1875 lsp::Diagnostic {
1876 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1877 severity: Some(DiagnosticSeverity::WARNING),
1878 message: "undefined variable 'A'".to_string(),
1879 source: Some("disk".to_string()),
1880 ..Default::default()
1881 },
1882 ],
1883 });
1884
1885 cx.executor().run_until_parked();
1886 buffer.update(cx, |buffer, _| {
1887 assert_eq!(
1888 buffer
1889 .snapshot()
1890 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1891 .collect::<Vec<_>>(),
1892 &[
1893 DiagnosticEntry {
1894 range: Point::new(2, 21)..Point::new(2, 22),
1895 diagnostic: Diagnostic {
1896 source: Some("disk".into()),
1897 severity: DiagnosticSeverity::WARNING,
1898 message: "undefined variable 'A'".to_string(),
1899 is_disk_based: true,
1900 group_id: 6,
1901 is_primary: true,
1902 ..Default::default()
1903 }
1904 },
1905 DiagnosticEntry {
1906 range: Point::new(3, 9)..Point::new(3, 14),
1907 diagnostic: Diagnostic {
1908 source: Some("disk".into()),
1909 severity: DiagnosticSeverity::ERROR,
1910 message: "undefined variable 'BB'".to_string(),
1911 is_disk_based: true,
1912 group_id: 5,
1913 is_primary: true,
1914 ..Default::default()
1915 },
1916 }
1917 ]
1918 );
1919 });
1920}
1921
1922#[gpui::test]
1923async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
1924 init_test(cx);
1925
1926 let text = concat!(
1927 "let one = ;\n", //
1928 "let two = \n",
1929 "let three = 3;\n",
1930 );
1931
1932 let fs = FakeFs::new(cx.executor());
1933 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1934
1935 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1936 let buffer = project
1937 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1938 .await
1939 .unwrap();
1940
1941 project.update(cx, |project, cx| {
1942 project
1943 .update_buffer_diagnostics(
1944 &buffer,
1945 LanguageServerId(0),
1946 None,
1947 vec![
1948 DiagnosticEntry {
1949 range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 10)),
1950 diagnostic: Diagnostic {
1951 severity: DiagnosticSeverity::ERROR,
1952 message: "syntax error 1".to_string(),
1953 ..Default::default()
1954 },
1955 },
1956 DiagnosticEntry {
1957 range: Unclipped(PointUtf16::new(1, 10))..Unclipped(PointUtf16::new(1, 10)),
1958 diagnostic: Diagnostic {
1959 severity: DiagnosticSeverity::ERROR,
1960 message: "syntax error 2".to_string(),
1961 ..Default::default()
1962 },
1963 },
1964 ],
1965 cx,
1966 )
1967 .unwrap();
1968 });
1969
1970 // An empty range is extended forward to include the following character.
1971 // At the end of a line, an empty range is extended backward to include
1972 // the preceding character.
1973 buffer.update(cx, |buffer, _| {
1974 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1975 assert_eq!(
1976 chunks
1977 .iter()
1978 .map(|(s, d)| (s.as_str(), *d))
1979 .collect::<Vec<_>>(),
1980 &[
1981 ("let one = ", None),
1982 (";", Some(DiagnosticSeverity::ERROR)),
1983 ("\nlet two =", None),
1984 (" ", Some(DiagnosticSeverity::ERROR)),
1985 ("\nlet three = 3;\n", None)
1986 ]
1987 );
1988 });
1989}
1990
1991#[gpui::test]
1992async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
1993 init_test(cx);
1994
1995 let fs = FakeFs::new(cx.executor());
1996 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
1997 .await;
1998
1999 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2000
2001 project.update(cx, |project, cx| {
2002 project
2003 .update_diagnostic_entries(
2004 LanguageServerId(0),
2005 Path::new("/dir/a.rs").to_owned(),
2006 None,
2007 vec![DiagnosticEntry {
2008 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2009 diagnostic: Diagnostic {
2010 severity: DiagnosticSeverity::ERROR,
2011 is_primary: true,
2012 message: "syntax error a1".to_string(),
2013 ..Default::default()
2014 },
2015 }],
2016 cx,
2017 )
2018 .unwrap();
2019 project
2020 .update_diagnostic_entries(
2021 LanguageServerId(1),
2022 Path::new("/dir/a.rs").to_owned(),
2023 None,
2024 vec![DiagnosticEntry {
2025 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2026 diagnostic: Diagnostic {
2027 severity: DiagnosticSeverity::ERROR,
2028 is_primary: true,
2029 message: "syntax error b1".to_string(),
2030 ..Default::default()
2031 },
2032 }],
2033 cx,
2034 )
2035 .unwrap();
2036
2037 assert_eq!(
2038 project.diagnostic_summary(false, cx),
2039 DiagnosticSummary {
2040 error_count: 2,
2041 warning_count: 0,
2042 }
2043 );
2044 });
2045}
2046
2047#[gpui::test]
2048async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2049 init_test(cx);
2050
2051 let text = "
2052 fn a() {
2053 f1();
2054 }
2055 fn b() {
2056 f2();
2057 }
2058 fn c() {
2059 f3();
2060 }
2061 "
2062 .unindent();
2063
2064 let fs = FakeFs::new(cx.executor());
2065 fs.insert_tree(
2066 "/dir",
2067 json!({
2068 "a.rs": text.clone(),
2069 }),
2070 )
2071 .await;
2072
2073 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2074
2075 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2076 language_registry.add(rust_lang());
2077 let mut fake_servers =
2078 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2079
2080 let buffer = project
2081 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2082 .await
2083 .unwrap();
2084
2085 let mut fake_server = fake_servers.next().await.unwrap();
2086 let lsp_document_version = fake_server
2087 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2088 .await
2089 .text_document
2090 .version;
2091
2092 // Simulate editing the buffer after the language server computes some edits.
2093 buffer.update(cx, |buffer, cx| {
2094 buffer.edit(
2095 [(
2096 Point::new(0, 0)..Point::new(0, 0),
2097 "// above first function\n",
2098 )],
2099 None,
2100 cx,
2101 );
2102 buffer.edit(
2103 [(
2104 Point::new(2, 0)..Point::new(2, 0),
2105 " // inside first function\n",
2106 )],
2107 None,
2108 cx,
2109 );
2110 buffer.edit(
2111 [(
2112 Point::new(6, 4)..Point::new(6, 4),
2113 "// inside second function ",
2114 )],
2115 None,
2116 cx,
2117 );
2118
2119 assert_eq!(
2120 buffer.text(),
2121 "
2122 // above first function
2123 fn a() {
2124 // inside first function
2125 f1();
2126 }
2127 fn b() {
2128 // inside second function f2();
2129 }
2130 fn c() {
2131 f3();
2132 }
2133 "
2134 .unindent()
2135 );
2136 });
2137
2138 let edits = project
2139 .update(cx, |project, cx| {
2140 project.edits_from_lsp(
2141 &buffer,
2142 vec![
2143 // replace body of first function
2144 lsp::TextEdit {
2145 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2146 new_text: "
2147 fn a() {
2148 f10();
2149 }
2150 "
2151 .unindent(),
2152 },
2153 // edit inside second function
2154 lsp::TextEdit {
2155 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2156 new_text: "00".into(),
2157 },
2158 // edit inside third function via two distinct edits
2159 lsp::TextEdit {
2160 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2161 new_text: "4000".into(),
2162 },
2163 lsp::TextEdit {
2164 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2165 new_text: "".into(),
2166 },
2167 ],
2168 LanguageServerId(0),
2169 Some(lsp_document_version),
2170 cx,
2171 )
2172 })
2173 .await
2174 .unwrap();
2175
2176 buffer.update(cx, |buffer, cx| {
2177 for (range, new_text) in edits {
2178 buffer.edit([(range, new_text)], None, cx);
2179 }
2180 assert_eq!(
2181 buffer.text(),
2182 "
2183 // above first function
2184 fn a() {
2185 // inside first function
2186 f10();
2187 }
2188 fn b() {
2189 // inside second function f200();
2190 }
2191 fn c() {
2192 f4000();
2193 }
2194 "
2195 .unindent()
2196 );
2197 });
2198}
2199
2200#[gpui::test]
2201async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2202 init_test(cx);
2203
2204 let text = "
2205 use a::b;
2206 use a::c;
2207
2208 fn f() {
2209 b();
2210 c();
2211 }
2212 "
2213 .unindent();
2214
2215 let fs = FakeFs::new(cx.executor());
2216 fs.insert_tree(
2217 "/dir",
2218 json!({
2219 "a.rs": text.clone(),
2220 }),
2221 )
2222 .await;
2223
2224 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2225 let buffer = project
2226 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2227 .await
2228 .unwrap();
2229
2230 // Simulate the language server sending us a small edit in the form of a very large diff.
2231 // Rust-analyzer does this when performing a merge-imports code action.
2232 let edits = project
2233 .update(cx, |project, cx| {
2234 project.edits_from_lsp(
2235 &buffer,
2236 [
2237 // Replace the first use statement without editing the semicolon.
2238 lsp::TextEdit {
2239 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2240 new_text: "a::{b, c}".into(),
2241 },
2242 // Reinsert the remainder of the file between the semicolon and the final
2243 // newline of the file.
2244 lsp::TextEdit {
2245 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2246 new_text: "\n\n".into(),
2247 },
2248 lsp::TextEdit {
2249 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2250 new_text: "
2251 fn f() {
2252 b();
2253 c();
2254 }"
2255 .unindent(),
2256 },
2257 // Delete everything after the first newline of the file.
2258 lsp::TextEdit {
2259 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2260 new_text: "".into(),
2261 },
2262 ],
2263 LanguageServerId(0),
2264 None,
2265 cx,
2266 )
2267 })
2268 .await
2269 .unwrap();
2270
2271 buffer.update(cx, |buffer, cx| {
2272 let edits = edits
2273 .into_iter()
2274 .map(|(range, text)| {
2275 (
2276 range.start.to_point(buffer)..range.end.to_point(buffer),
2277 text,
2278 )
2279 })
2280 .collect::<Vec<_>>();
2281
2282 assert_eq!(
2283 edits,
2284 [
2285 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2286 (Point::new(1, 0)..Point::new(2, 0), "".into())
2287 ]
2288 );
2289
2290 for (range, new_text) in edits {
2291 buffer.edit([(range, new_text)], None, cx);
2292 }
2293 assert_eq!(
2294 buffer.text(),
2295 "
2296 use a::{b, c};
2297
2298 fn f() {
2299 b();
2300 c();
2301 }
2302 "
2303 .unindent()
2304 );
2305 });
2306}
2307
2308#[gpui::test]
2309async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2310 init_test(cx);
2311
2312 let text = "
2313 use a::b;
2314 use a::c;
2315
2316 fn f() {
2317 b();
2318 c();
2319 }
2320 "
2321 .unindent();
2322
2323 let fs = FakeFs::new(cx.executor());
2324 fs.insert_tree(
2325 "/dir",
2326 json!({
2327 "a.rs": text.clone(),
2328 }),
2329 )
2330 .await;
2331
2332 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2333 let buffer = project
2334 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2335 .await
2336 .unwrap();
2337
2338 // Simulate the language server sending us edits in a non-ordered fashion,
2339 // with ranges sometimes being inverted or pointing to invalid locations.
2340 let edits = project
2341 .update(cx, |project, cx| {
2342 project.edits_from_lsp(
2343 &buffer,
2344 [
2345 lsp::TextEdit {
2346 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2347 new_text: "\n\n".into(),
2348 },
2349 lsp::TextEdit {
2350 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2351 new_text: "a::{b, c}".into(),
2352 },
2353 lsp::TextEdit {
2354 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2355 new_text: "".into(),
2356 },
2357 lsp::TextEdit {
2358 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2359 new_text: "
2360 fn f() {
2361 b();
2362 c();
2363 }"
2364 .unindent(),
2365 },
2366 ],
2367 LanguageServerId(0),
2368 None,
2369 cx,
2370 )
2371 })
2372 .await
2373 .unwrap();
2374
2375 buffer.update(cx, |buffer, cx| {
2376 let edits = edits
2377 .into_iter()
2378 .map(|(range, text)| {
2379 (
2380 range.start.to_point(buffer)..range.end.to_point(buffer),
2381 text,
2382 )
2383 })
2384 .collect::<Vec<_>>();
2385
2386 assert_eq!(
2387 edits,
2388 [
2389 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2390 (Point::new(1, 0)..Point::new(2, 0), "".into())
2391 ]
2392 );
2393
2394 for (range, new_text) in edits {
2395 buffer.edit([(range, new_text)], None, cx);
2396 }
2397 assert_eq!(
2398 buffer.text(),
2399 "
2400 use a::{b, c};
2401
2402 fn f() {
2403 b();
2404 c();
2405 }
2406 "
2407 .unindent()
2408 );
2409 });
2410}
2411
2412fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2413 buffer: &Buffer,
2414 range: Range<T>,
2415) -> Vec<(String, Option<DiagnosticSeverity>)> {
2416 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2417 for chunk in buffer.snapshot().chunks(range, true) {
2418 if chunks.last().map_or(false, |prev_chunk| {
2419 prev_chunk.1 == chunk.diagnostic_severity
2420 }) {
2421 chunks.last_mut().unwrap().0.push_str(chunk.text);
2422 } else {
2423 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2424 }
2425 }
2426 chunks
2427}
2428
2429#[gpui::test(iterations = 10)]
2430async fn test_definition(cx: &mut gpui::TestAppContext) {
2431 init_test(cx);
2432
2433 let fs = FakeFs::new(cx.executor());
2434 fs.insert_tree(
2435 "/dir",
2436 json!({
2437 "a.rs": "const fn a() { A }",
2438 "b.rs": "const y: i32 = crate::a()",
2439 }),
2440 )
2441 .await;
2442
2443 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2444
2445 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2446 language_registry.add(rust_lang());
2447 let mut fake_servers =
2448 language_registry.register_fake_lsp_adapter("Rust", FakeLspAdapter::default());
2449
2450 let buffer = project
2451 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
2452 .await
2453 .unwrap();
2454
2455 let fake_server = fake_servers.next().await.unwrap();
2456 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2457 let params = params.text_document_position_params;
2458 assert_eq!(
2459 Uri::from(params.text_document.uri).to_file_path().unwrap(),
2460 Path::new("/dir/b.rs"),
2461 );
2462 assert_eq!(params.position, lsp::Position::new(0, 22));
2463
2464 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2465 lsp::Location::new(
2466 lsp::Uri::from_file_path("/dir/a.rs").unwrap().into(),
2467 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2468 ),
2469 )))
2470 });
2471
2472 let mut definitions = project
2473 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2474 .await
2475 .unwrap();
2476
2477 // Assert no new language server started
2478 cx.executor().run_until_parked();
2479 assert!(fake_servers.try_next().is_err());
2480
2481 assert_eq!(definitions.len(), 1);
2482 let definition = definitions.pop().unwrap();
2483 cx.update(|cx| {
2484 let target_buffer = definition.target.buffer.read(cx);
2485 assert_eq!(
2486 target_buffer
2487 .file()
2488 .unwrap()
2489 .as_local()
2490 .unwrap()
2491 .abs_path(cx),
2492 Path::new("/dir/a.rs"),
2493 );
2494 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2495 assert_eq!(
2496 list_worktrees(&project, cx),
2497 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2498 );
2499
2500 drop(definition);
2501 });
2502 cx.update(|cx| {
2503 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2504 });
2505
2506 fn list_worktrees<'a>(
2507 project: &'a Model<Project>,
2508 cx: &'a AppContext,
2509 ) -> Vec<(&'a Path, bool)> {
2510 project
2511 .read(cx)
2512 .worktrees()
2513 .map(|worktree| {
2514 let worktree = worktree.read(cx);
2515 (
2516 worktree.as_local().unwrap().abs_path().as_ref(),
2517 worktree.is_visible(),
2518 )
2519 })
2520 .collect::<Vec<_>>()
2521 }
2522}
2523
2524#[gpui::test]
2525async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2526 init_test(cx);
2527
2528 let fs = FakeFs::new(cx.executor());
2529 fs.insert_tree(
2530 "/dir",
2531 json!({
2532 "a.ts": "",
2533 }),
2534 )
2535 .await;
2536
2537 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2538
2539 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2540 language_registry.add(typescript_lang());
2541 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2542 "TypeScript",
2543 FakeLspAdapter {
2544 capabilities: lsp::ServerCapabilities {
2545 completion_provider: Some(lsp::CompletionOptions {
2546 trigger_characters: Some(vec![":".to_string()]),
2547 ..Default::default()
2548 }),
2549 ..Default::default()
2550 },
2551 ..Default::default()
2552 },
2553 );
2554
2555 let buffer = project
2556 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2557 .await
2558 .unwrap();
2559
2560 let fake_server = fake_language_servers.next().await.unwrap();
2561
2562 let text = "let a = b.fqn";
2563 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2564 let completions = project.update(cx, |project, cx| {
2565 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2566 });
2567
2568 fake_server
2569 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2570 Ok(Some(lsp::CompletionResponse::Array(vec![
2571 lsp::CompletionItem {
2572 label: "fullyQualifiedName?".into(),
2573 insert_text: Some("fullyQualifiedName".into()),
2574 ..Default::default()
2575 },
2576 ])))
2577 })
2578 .next()
2579 .await;
2580 let completions = completions.await.unwrap();
2581 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2582 assert_eq!(completions.len(), 1);
2583 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2584 assert_eq!(
2585 completions[0].old_range.to_offset(&snapshot),
2586 text.len() - 3..text.len()
2587 );
2588
2589 let text = "let a = \"atoms/cmp\"";
2590 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2591 let completions = project.update(cx, |project, cx| {
2592 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2593 });
2594
2595 fake_server
2596 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2597 Ok(Some(lsp::CompletionResponse::Array(vec![
2598 lsp::CompletionItem {
2599 label: "component".into(),
2600 ..Default::default()
2601 },
2602 ])))
2603 })
2604 .next()
2605 .await;
2606 let completions = completions.await.unwrap();
2607 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2608 assert_eq!(completions.len(), 1);
2609 assert_eq!(completions[0].new_text, "component");
2610 assert_eq!(
2611 completions[0].old_range.to_offset(&snapshot),
2612 text.len() - 4..text.len() - 1
2613 );
2614}
2615
2616#[gpui::test]
2617async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2618 init_test(cx);
2619
2620 let fs = FakeFs::new(cx.executor());
2621 fs.insert_tree(
2622 "/dir",
2623 json!({
2624 "a.ts": "",
2625 }),
2626 )
2627 .await;
2628
2629 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2630
2631 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2632 language_registry.add(typescript_lang());
2633 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2634 "TypeScript",
2635 FakeLspAdapter {
2636 capabilities: lsp::ServerCapabilities {
2637 completion_provider: Some(lsp::CompletionOptions {
2638 trigger_characters: Some(vec![":".to_string()]),
2639 ..Default::default()
2640 }),
2641 ..Default::default()
2642 },
2643 ..Default::default()
2644 },
2645 );
2646
2647 let buffer = project
2648 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2649 .await
2650 .unwrap();
2651
2652 let fake_server = fake_language_servers.next().await.unwrap();
2653
2654 let text = "let a = b.fqn";
2655 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2656 let completions = project.update(cx, |project, cx| {
2657 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2658 });
2659
2660 fake_server
2661 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2662 Ok(Some(lsp::CompletionResponse::Array(vec![
2663 lsp::CompletionItem {
2664 label: "fullyQualifiedName?".into(),
2665 insert_text: Some("fully\rQualified\r\nName".into()),
2666 ..Default::default()
2667 },
2668 ])))
2669 })
2670 .next()
2671 .await;
2672 let completions = completions.await.unwrap();
2673 assert_eq!(completions.len(), 1);
2674 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2675}
2676
2677#[gpui::test(iterations = 10)]
2678async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2679 init_test(cx);
2680
2681 let fs = FakeFs::new(cx.executor());
2682 fs.insert_tree(
2683 "/dir",
2684 json!({
2685 "a.ts": "a",
2686 }),
2687 )
2688 .await;
2689
2690 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2691
2692 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2693 language_registry.add(typescript_lang());
2694 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
2695 "TypeScript",
2696 FakeLspAdapter {
2697 capabilities: lsp::ServerCapabilities {
2698 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2699 lsp::CodeActionOptions {
2700 resolve_provider: Some(true),
2701 ..lsp::CodeActionOptions::default()
2702 },
2703 )),
2704 ..lsp::ServerCapabilities::default()
2705 },
2706 ..FakeLspAdapter::default()
2707 },
2708 );
2709
2710 let buffer = project
2711 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
2712 .await
2713 .unwrap();
2714
2715 let fake_server = fake_language_servers.next().await.unwrap();
2716
2717 // Language server returns code actions that contain commands, and not edits.
2718 let actions = project.update(cx, |project, cx| project.code_actions(&buffer, 0..0, cx));
2719 fake_server
2720 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2721 Ok(Some(vec![
2722 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2723 title: "The code action".into(),
2724 data: Some(serde_json::json!({
2725 "command": "_the/command",
2726 })),
2727 ..lsp::CodeAction::default()
2728 }),
2729 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2730 title: "two".into(),
2731 ..lsp::CodeAction::default()
2732 }),
2733 ]))
2734 })
2735 .next()
2736 .await;
2737
2738 let action = actions.await[0].clone();
2739 let apply = project.update(cx, |project, cx| {
2740 project.apply_code_action(buffer.clone(), action, true, cx)
2741 });
2742
2743 // Resolving the code action does not populate its edits. In absence of
2744 // edits, we must execute the given command.
2745 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2746 |mut action, _| async move {
2747 if action.data.is_some() {
2748 action.command = Some(lsp::Command {
2749 title: "The command".into(),
2750 command: "_the/command".into(),
2751 arguments: Some(vec![json!("the-argument")]),
2752 });
2753 }
2754 Ok(action)
2755 },
2756 );
2757
2758 // While executing the command, the language server sends the editor
2759 // a `workspaceEdit` request.
2760 fake_server
2761 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2762 let fake = fake_server.clone();
2763 move |params, _| {
2764 assert_eq!(params.command, "_the/command");
2765 let fake = fake.clone();
2766 async move {
2767 fake.server
2768 .request::<lsp::request::ApplyWorkspaceEdit>(
2769 lsp::ApplyWorkspaceEditParams {
2770 label: None,
2771 edit: lsp::WorkspaceEdit {
2772 changes: Some(
2773 [(
2774 lsp::Uri::from_file_path("/dir/a.ts").unwrap().into(),
2775 vec![lsp::TextEdit {
2776 range: lsp::Range::new(
2777 lsp::Position::new(0, 0),
2778 lsp::Position::new(0, 0),
2779 ),
2780 new_text: "X".into(),
2781 }],
2782 )]
2783 .into_iter()
2784 .collect(),
2785 ),
2786 ..Default::default()
2787 },
2788 },
2789 )
2790 .await
2791 .unwrap();
2792 Ok(Some(json!(null)))
2793 }
2794 }
2795 })
2796 .next()
2797 .await;
2798
2799 // Applying the code action returns a project transaction containing the edits
2800 // sent by the language server in its `workspaceEdit` request.
2801 let transaction = apply.await.unwrap();
2802 assert!(transaction.0.contains_key(&buffer));
2803 buffer.update(cx, |buffer, cx| {
2804 assert_eq!(buffer.text(), "Xa");
2805 buffer.undo(cx);
2806 assert_eq!(buffer.text(), "a");
2807 });
2808}
2809
2810#[gpui::test(iterations = 10)]
2811async fn test_save_file(cx: &mut gpui::TestAppContext) {
2812 init_test(cx);
2813
2814 let fs = FakeFs::new(cx.executor());
2815 fs.insert_tree(
2816 "/dir",
2817 json!({
2818 "file1": "the old contents",
2819 }),
2820 )
2821 .await;
2822
2823 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2824 let buffer = project
2825 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2826 .await
2827 .unwrap();
2828 buffer.update(cx, |buffer, cx| {
2829 assert_eq!(buffer.text(), "the old contents");
2830 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2831 });
2832
2833 project
2834 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2835 .await
2836 .unwrap();
2837
2838 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2839 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2840}
2841
2842#[gpui::test(iterations = 30)]
2843async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2844 init_test(cx);
2845
2846 let fs = FakeFs::new(cx.executor().clone());
2847 fs.insert_tree(
2848 "/dir",
2849 json!({
2850 "file1": "the original contents",
2851 }),
2852 )
2853 .await;
2854
2855 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2856 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2857 let buffer = project
2858 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2859 .await
2860 .unwrap();
2861
2862 // Simulate buffer diffs being slow, so that they don't complete before
2863 // the next file change occurs.
2864 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2865
2866 // Change the buffer's file on disk, and then wait for the file change
2867 // to be detected by the worktree, so that the buffer starts reloading.
2868 fs.save(
2869 "/dir/file1".as_ref(),
2870 &"the first contents".into(),
2871 Default::default(),
2872 )
2873 .await
2874 .unwrap();
2875 worktree.next_event(cx).await;
2876
2877 // Change the buffer's file again. Depending on the random seed, the
2878 // previous file change may still be in progress.
2879 fs.save(
2880 "/dir/file1".as_ref(),
2881 &"the second contents".into(),
2882 Default::default(),
2883 )
2884 .await
2885 .unwrap();
2886 worktree.next_event(cx).await;
2887
2888 cx.executor().run_until_parked();
2889 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2890 buffer.read_with(cx, |buffer, _| {
2891 assert_eq!(buffer.text(), on_disk_text);
2892 assert!(!buffer.is_dirty(), "buffer should not be dirty");
2893 assert!(!buffer.has_conflict(), "buffer should not be dirty");
2894 });
2895}
2896
2897#[gpui::test(iterations = 30)]
2898async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
2899 init_test(cx);
2900
2901 let fs = FakeFs::new(cx.executor().clone());
2902 fs.insert_tree(
2903 "/dir",
2904 json!({
2905 "file1": "the original contents",
2906 }),
2907 )
2908 .await;
2909
2910 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2911 let worktree = project.read_with(cx, |project, _| project.worktrees().next().unwrap());
2912 let buffer = project
2913 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2914 .await
2915 .unwrap();
2916
2917 // Simulate buffer diffs being slow, so that they don't complete before
2918 // the next file change occurs.
2919 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2920
2921 // Change the buffer's file on disk, and then wait for the file change
2922 // to be detected by the worktree, so that the buffer starts reloading.
2923 fs.save(
2924 "/dir/file1".as_ref(),
2925 &"the first contents".into(),
2926 Default::default(),
2927 )
2928 .await
2929 .unwrap();
2930 worktree.next_event(cx).await;
2931
2932 cx.executor()
2933 .spawn(cx.executor().simulate_random_delay())
2934 .await;
2935
2936 // Perform a noop edit, causing the buffer's version to increase.
2937 buffer.update(cx, |buffer, cx| {
2938 buffer.edit([(0..0, " ")], None, cx);
2939 buffer.undo(cx);
2940 });
2941
2942 cx.executor().run_until_parked();
2943 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2944 buffer.read_with(cx, |buffer, _| {
2945 let buffer_text = buffer.text();
2946 if buffer_text == on_disk_text {
2947 assert!(
2948 !buffer.is_dirty() && !buffer.has_conflict(),
2949 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
2950 );
2951 }
2952 // If the file change occurred while the buffer was processing the first
2953 // change, the buffer will be in a conflicting state.
2954 else {
2955 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2956 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
2957 }
2958 });
2959}
2960
2961#[gpui::test]
2962async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
2963 init_test(cx);
2964
2965 let fs = FakeFs::new(cx.executor());
2966 fs.insert_tree(
2967 "/dir",
2968 json!({
2969 "file1": "the old contents",
2970 }),
2971 )
2972 .await;
2973
2974 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
2975 let buffer = project
2976 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2977 .await
2978 .unwrap();
2979 buffer.update(cx, |buffer, cx| {
2980 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2981 });
2982
2983 project
2984 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2985 .await
2986 .unwrap();
2987
2988 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2989 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2990}
2991
2992#[gpui::test]
2993async fn test_save_as(cx: &mut gpui::TestAppContext) {
2994 init_test(cx);
2995
2996 let fs = FakeFs::new(cx.executor());
2997 fs.insert_tree("/dir", json!({})).await;
2998
2999 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3000
3001 let languages = project.update(cx, |project, _| project.languages().clone());
3002 languages.add(rust_lang());
3003
3004 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3005 buffer.update(cx, |buffer, cx| {
3006 buffer.edit([(0..0, "abc")], None, cx);
3007 assert!(buffer.is_dirty());
3008 assert!(!buffer.has_conflict());
3009 assert_eq!(buffer.language().unwrap().name().as_ref(), "Plain Text");
3010 });
3011 project
3012 .update(cx, |project, cx| {
3013 let worktree_id = project.worktrees().next().unwrap().read(cx).id();
3014 let path = ProjectPath {
3015 worktree_id,
3016 path: Arc::from(Path::new("file1.rs")),
3017 };
3018 project.save_buffer_as(buffer.clone(), path, cx)
3019 })
3020 .await
3021 .unwrap();
3022 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3023
3024 cx.executor().run_until_parked();
3025 buffer.update(cx, |buffer, cx| {
3026 assert_eq!(
3027 buffer.file().unwrap().full_path(cx),
3028 Path::new("dir/file1.rs")
3029 );
3030 assert!(!buffer.is_dirty());
3031 assert!(!buffer.has_conflict());
3032 assert_eq!(buffer.language().unwrap().name().as_ref(), "Rust");
3033 });
3034
3035 let opened_buffer = project
3036 .update(cx, |project, cx| {
3037 project.open_local_buffer("/dir/file1.rs", cx)
3038 })
3039 .await
3040 .unwrap();
3041 assert_eq!(opened_buffer, buffer);
3042}
3043
3044#[gpui::test(retries = 5)]
3045async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3046 use worktree::WorktreeModelHandle as _;
3047
3048 init_test(cx);
3049 cx.executor().allow_parking();
3050
3051 let dir = temp_tree(json!({
3052 "a": {
3053 "file1": "",
3054 "file2": "",
3055 "file3": "",
3056 },
3057 "b": {
3058 "c": {
3059 "file4": "",
3060 "file5": "",
3061 }
3062 }
3063 }));
3064
3065 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3066
3067 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3068 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3069 async move { buffer.await.unwrap() }
3070 };
3071 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3072 project.update(cx, |project, cx| {
3073 let tree = project.worktrees().next().unwrap();
3074 tree.read(cx)
3075 .entry_for_path(path)
3076 .unwrap_or_else(|| panic!("no entry for path {}", path))
3077 .id
3078 })
3079 };
3080
3081 let buffer2 = buffer_for_path("a/file2", cx).await;
3082 let buffer3 = buffer_for_path("a/file3", cx).await;
3083 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3084 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3085
3086 let file2_id = id_for_path("a/file2", cx);
3087 let file3_id = id_for_path("a/file3", cx);
3088 let file4_id = id_for_path("b/c/file4", cx);
3089
3090 // Create a remote copy of this worktree.
3091 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3092 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3093
3094 let updates = Arc::new(Mutex::new(Vec::new()));
3095 tree.update(cx, |tree, cx| {
3096 let updates = updates.clone();
3097 tree.observe_updates(0, cx, move |update| {
3098 updates.lock().push(update);
3099 async { true }
3100 });
3101 });
3102
3103 let remote = cx.update(|cx| {
3104 Worktree::remote(
3105 0,
3106 1,
3107 metadata,
3108 Box::new(CollabRemoteWorktreeClient(project.read(cx).client())),
3109 cx,
3110 )
3111 });
3112
3113 cx.executor().run_until_parked();
3114
3115 cx.update(|cx| {
3116 assert!(!buffer2.read(cx).is_dirty());
3117 assert!(!buffer3.read(cx).is_dirty());
3118 assert!(!buffer4.read(cx).is_dirty());
3119 assert!(!buffer5.read(cx).is_dirty());
3120 });
3121
3122 // Rename and delete files and directories.
3123 tree.flush_fs_events(cx).await;
3124 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3125 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3126 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3127 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3128 tree.flush_fs_events(cx).await;
3129
3130 let expected_paths = vec![
3131 "a",
3132 "a/file1",
3133 "a/file2.new",
3134 "b",
3135 "d",
3136 "d/file3",
3137 "d/file4",
3138 ];
3139
3140 cx.update(|app| {
3141 assert_eq!(
3142 tree.read(app)
3143 .paths()
3144 .map(|p| p.to_str().unwrap())
3145 .collect::<Vec<_>>(),
3146 expected_paths
3147 );
3148 });
3149
3150 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3151 assert_eq!(id_for_path("d/file3", cx), file3_id);
3152 assert_eq!(id_for_path("d/file4", cx), file4_id);
3153
3154 cx.update(|cx| {
3155 assert_eq!(
3156 buffer2.read(cx).file().unwrap().path().as_ref(),
3157 Path::new("a/file2.new")
3158 );
3159 assert_eq!(
3160 buffer3.read(cx).file().unwrap().path().as_ref(),
3161 Path::new("d/file3")
3162 );
3163 assert_eq!(
3164 buffer4.read(cx).file().unwrap().path().as_ref(),
3165 Path::new("d/file4")
3166 );
3167 assert_eq!(
3168 buffer5.read(cx).file().unwrap().path().as_ref(),
3169 Path::new("b/c/file5")
3170 );
3171
3172 assert!(!buffer2.read(cx).file().unwrap().is_deleted());
3173 assert!(!buffer3.read(cx).file().unwrap().is_deleted());
3174 assert!(!buffer4.read(cx).file().unwrap().is_deleted());
3175 assert!(buffer5.read(cx).file().unwrap().is_deleted());
3176 });
3177
3178 // Update the remote worktree. Check that it becomes consistent with the
3179 // local worktree.
3180 cx.executor().run_until_parked();
3181
3182 remote.update(cx, |remote, _| {
3183 for update in updates.lock().drain(..) {
3184 remote.as_remote_mut().unwrap().update_from_remote(update);
3185 }
3186 });
3187 cx.executor().run_until_parked();
3188 remote.update(cx, |remote, _| {
3189 assert_eq!(
3190 remote
3191 .paths()
3192 .map(|p| p.to_str().unwrap())
3193 .collect::<Vec<_>>(),
3194 expected_paths
3195 );
3196 });
3197}
3198
3199#[gpui::test(iterations = 10)]
3200async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3201 init_test(cx);
3202
3203 let fs = FakeFs::new(cx.executor());
3204 fs.insert_tree(
3205 "/dir",
3206 json!({
3207 "a": {
3208 "file1": "",
3209 }
3210 }),
3211 )
3212 .await;
3213
3214 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3215 let tree = project.update(cx, |project, _| project.worktrees().next().unwrap());
3216 let tree_id = tree.update(cx, |tree, _| tree.id());
3217
3218 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3219 project.update(cx, |project, cx| {
3220 let tree = project.worktrees().next().unwrap();
3221 tree.read(cx)
3222 .entry_for_path(path)
3223 .unwrap_or_else(|| panic!("no entry for path {}", path))
3224 .id
3225 })
3226 };
3227
3228 let dir_id = id_for_path("a", cx);
3229 let file_id = id_for_path("a/file1", cx);
3230 let buffer = project
3231 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3232 .await
3233 .unwrap();
3234 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3235
3236 project
3237 .update(cx, |project, cx| {
3238 project.rename_entry(dir_id, Path::new("b"), cx)
3239 })
3240 .unwrap()
3241 .await
3242 .to_included()
3243 .unwrap();
3244 cx.executor().run_until_parked();
3245
3246 assert_eq!(id_for_path("b", cx), dir_id);
3247 assert_eq!(id_for_path("b/file1", cx), file_id);
3248 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3249}
3250
3251#[gpui::test]
3252async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3253 init_test(cx);
3254
3255 let fs = FakeFs::new(cx.executor());
3256 fs.insert_tree(
3257 "/dir",
3258 json!({
3259 "a.txt": "a-contents",
3260 "b.txt": "b-contents",
3261 }),
3262 )
3263 .await;
3264
3265 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3266
3267 // Spawn multiple tasks to open paths, repeating some paths.
3268 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3269 (
3270 p.open_local_buffer("/dir/a.txt", cx),
3271 p.open_local_buffer("/dir/b.txt", cx),
3272 p.open_local_buffer("/dir/a.txt", cx),
3273 )
3274 });
3275
3276 let buffer_a_1 = buffer_a_1.await.unwrap();
3277 let buffer_a_2 = buffer_a_2.await.unwrap();
3278 let buffer_b = buffer_b.await.unwrap();
3279 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3280 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3281
3282 // There is only one buffer per path.
3283 let buffer_a_id = buffer_a_1.entity_id();
3284 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3285
3286 // Open the same path again while it is still open.
3287 drop(buffer_a_1);
3288 let buffer_a_3 = project
3289 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3290 .await
3291 .unwrap();
3292
3293 // There's still only one buffer per path.
3294 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3295}
3296
3297#[gpui::test]
3298async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3299 init_test(cx);
3300
3301 let fs = FakeFs::new(cx.executor());
3302 fs.insert_tree(
3303 "/dir",
3304 json!({
3305 "file1": "abc",
3306 "file2": "def",
3307 "file3": "ghi",
3308 }),
3309 )
3310 .await;
3311
3312 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3313
3314 let buffer1 = project
3315 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3316 .await
3317 .unwrap();
3318 let events = Arc::new(Mutex::new(Vec::new()));
3319
3320 // initially, the buffer isn't dirty.
3321 buffer1.update(cx, |buffer, cx| {
3322 cx.subscribe(&buffer1, {
3323 let events = events.clone();
3324 move |_, _, event, _| match event {
3325 BufferEvent::Operation(_) => {}
3326 _ => events.lock().push(event.clone()),
3327 }
3328 })
3329 .detach();
3330
3331 assert!(!buffer.is_dirty());
3332 assert!(events.lock().is_empty());
3333
3334 buffer.edit([(1..2, "")], None, cx);
3335 });
3336
3337 // after the first edit, the buffer is dirty, and emits a dirtied event.
3338 buffer1.update(cx, |buffer, cx| {
3339 assert!(buffer.text() == "ac");
3340 assert!(buffer.is_dirty());
3341 assert_eq!(
3342 *events.lock(),
3343 &[language::Event::Edited, language::Event::DirtyChanged]
3344 );
3345 events.lock().clear();
3346 buffer.did_save(buffer.version(), buffer.file().unwrap().mtime(), cx);
3347 });
3348
3349 // after saving, the buffer is not dirty, and emits a saved event.
3350 buffer1.update(cx, |buffer, cx| {
3351 assert!(!buffer.is_dirty());
3352 assert_eq!(*events.lock(), &[language::Event::Saved]);
3353 events.lock().clear();
3354
3355 buffer.edit([(1..1, "B")], None, cx);
3356 buffer.edit([(2..2, "D")], None, cx);
3357 });
3358
3359 // after editing again, the buffer is dirty, and emits another dirty event.
3360 buffer1.update(cx, |buffer, cx| {
3361 assert!(buffer.text() == "aBDc");
3362 assert!(buffer.is_dirty());
3363 assert_eq!(
3364 *events.lock(),
3365 &[
3366 language::Event::Edited,
3367 language::Event::DirtyChanged,
3368 language::Event::Edited,
3369 ],
3370 );
3371 events.lock().clear();
3372
3373 // After restoring the buffer to its previously-saved state,
3374 // the buffer is not considered dirty anymore.
3375 buffer.edit([(1..3, "")], None, cx);
3376 assert!(buffer.text() == "ac");
3377 assert!(!buffer.is_dirty());
3378 });
3379
3380 assert_eq!(
3381 *events.lock(),
3382 &[language::Event::Edited, language::Event::DirtyChanged]
3383 );
3384
3385 // When a file is deleted, the buffer is considered dirty.
3386 let events = Arc::new(Mutex::new(Vec::new()));
3387 let buffer2 = project
3388 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3389 .await
3390 .unwrap();
3391 buffer2.update(cx, |_, cx| {
3392 cx.subscribe(&buffer2, {
3393 let events = events.clone();
3394 move |_, _, event, _| events.lock().push(event.clone())
3395 })
3396 .detach();
3397 });
3398
3399 fs.remove_file("/dir/file2".as_ref(), Default::default())
3400 .await
3401 .unwrap();
3402 cx.executor().run_until_parked();
3403 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3404 assert_eq!(
3405 *events.lock(),
3406 &[
3407 language::Event::DirtyChanged,
3408 language::Event::FileHandleChanged
3409 ]
3410 );
3411
3412 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3413 let events = Arc::new(Mutex::new(Vec::new()));
3414 let buffer3 = project
3415 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3416 .await
3417 .unwrap();
3418 buffer3.update(cx, |_, cx| {
3419 cx.subscribe(&buffer3, {
3420 let events = events.clone();
3421 move |_, _, event, _| events.lock().push(event.clone())
3422 })
3423 .detach();
3424 });
3425
3426 buffer3.update(cx, |buffer, cx| {
3427 buffer.edit([(0..0, "x")], None, cx);
3428 });
3429 events.lock().clear();
3430 fs.remove_file("/dir/file3".as_ref(), Default::default())
3431 .await
3432 .unwrap();
3433 cx.executor().run_until_parked();
3434 assert_eq!(*events.lock(), &[language::Event::FileHandleChanged]);
3435 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3436}
3437
3438#[gpui::test]
3439async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3440 init_test(cx);
3441
3442 let initial_contents = "aaa\nbbbbb\nc\n";
3443 let fs = FakeFs::new(cx.executor());
3444 fs.insert_tree(
3445 "/dir",
3446 json!({
3447 "the-file": initial_contents,
3448 }),
3449 )
3450 .await;
3451 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3452 let buffer = project
3453 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3454 .await
3455 .unwrap();
3456
3457 let anchors = (0..3)
3458 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3459 .collect::<Vec<_>>();
3460
3461 // Change the file on disk, adding two new lines of text, and removing
3462 // one line.
3463 buffer.update(cx, |buffer, _| {
3464 assert!(!buffer.is_dirty());
3465 assert!(!buffer.has_conflict());
3466 });
3467 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3468 fs.save(
3469 "/dir/the-file".as_ref(),
3470 &new_contents.into(),
3471 LineEnding::Unix,
3472 )
3473 .await
3474 .unwrap();
3475
3476 // Because the buffer was not modified, it is reloaded from disk. Its
3477 // contents are edited according to the diff between the old and new
3478 // file contents.
3479 cx.executor().run_until_parked();
3480 buffer.update(cx, |buffer, _| {
3481 assert_eq!(buffer.text(), new_contents);
3482 assert!(!buffer.is_dirty());
3483 assert!(!buffer.has_conflict());
3484
3485 let anchor_positions = anchors
3486 .iter()
3487 .map(|anchor| anchor.to_point(&*buffer))
3488 .collect::<Vec<_>>();
3489 assert_eq!(
3490 anchor_positions,
3491 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3492 );
3493 });
3494
3495 // Modify the buffer
3496 buffer.update(cx, |buffer, cx| {
3497 buffer.edit([(0..0, " ")], None, cx);
3498 assert!(buffer.is_dirty());
3499 assert!(!buffer.has_conflict());
3500 });
3501
3502 // Change the file on disk again, adding blank lines to the beginning.
3503 fs.save(
3504 "/dir/the-file".as_ref(),
3505 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3506 LineEnding::Unix,
3507 )
3508 .await
3509 .unwrap();
3510
3511 // Because the buffer is modified, it doesn't reload from disk, but is
3512 // marked as having a conflict.
3513 cx.executor().run_until_parked();
3514 buffer.update(cx, |buffer, _| {
3515 assert!(buffer.has_conflict());
3516 });
3517}
3518
3519#[gpui::test]
3520async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3521 init_test(cx);
3522
3523 let fs = FakeFs::new(cx.executor());
3524 fs.insert_tree(
3525 "/dir",
3526 json!({
3527 "file1": "a\nb\nc\n",
3528 "file2": "one\r\ntwo\r\nthree\r\n",
3529 }),
3530 )
3531 .await;
3532
3533 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3534 let buffer1 = project
3535 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3536 .await
3537 .unwrap();
3538 let buffer2 = project
3539 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3540 .await
3541 .unwrap();
3542
3543 buffer1.update(cx, |buffer, _| {
3544 assert_eq!(buffer.text(), "a\nb\nc\n");
3545 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3546 });
3547 buffer2.update(cx, |buffer, _| {
3548 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3549 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3550 });
3551
3552 // Change a file's line endings on disk from unix to windows. The buffer's
3553 // state updates correctly.
3554 fs.save(
3555 "/dir/file1".as_ref(),
3556 &"aaa\nb\nc\n".into(),
3557 LineEnding::Windows,
3558 )
3559 .await
3560 .unwrap();
3561 cx.executor().run_until_parked();
3562 buffer1.update(cx, |buffer, _| {
3563 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3564 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3565 });
3566
3567 // Save a file with windows line endings. The file is written correctly.
3568 buffer2.update(cx, |buffer, cx| {
3569 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3570 });
3571 project
3572 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3573 .await
3574 .unwrap();
3575 assert_eq!(
3576 fs.load("/dir/file2".as_ref()).await.unwrap(),
3577 "one\r\ntwo\r\nthree\r\nfour\r\n",
3578 );
3579}
3580
3581#[gpui::test]
3582async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3583 init_test(cx);
3584
3585 let fs = FakeFs::new(cx.executor());
3586 fs.insert_tree(
3587 "/the-dir",
3588 json!({
3589 "a.rs": "
3590 fn foo(mut v: Vec<usize>) {
3591 for x in &v {
3592 v.push(1);
3593 }
3594 }
3595 "
3596 .unindent(),
3597 }),
3598 )
3599 .await;
3600
3601 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3602 let buffer = project
3603 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3604 .await
3605 .unwrap();
3606
3607 let buffer_uri = Uri::from_file_path("/the-dir/a.rs").unwrap();
3608 let message = lsp::PublishDiagnosticsParams {
3609 uri: buffer_uri.clone().into(),
3610 diagnostics: vec![
3611 lsp::Diagnostic {
3612 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3613 severity: Some(DiagnosticSeverity::WARNING),
3614 message: "error 1".to_string(),
3615 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3616 location: lsp::Location {
3617 uri: buffer_uri.clone().into(),
3618 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3619 },
3620 message: "error 1 hint 1".to_string(),
3621 }]),
3622 ..Default::default()
3623 },
3624 lsp::Diagnostic {
3625 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3626 severity: Some(DiagnosticSeverity::HINT),
3627 message: "error 1 hint 1".to_string(),
3628 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3629 location: lsp::Location {
3630 uri: buffer_uri.clone().into(),
3631 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3632 },
3633 message: "original diagnostic".to_string(),
3634 }]),
3635 ..Default::default()
3636 },
3637 lsp::Diagnostic {
3638 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3639 severity: Some(DiagnosticSeverity::ERROR),
3640 message: "error 2".to_string(),
3641 related_information: Some(vec![
3642 lsp::DiagnosticRelatedInformation {
3643 location: lsp::Location {
3644 uri: buffer_uri.clone().into(),
3645 range: lsp::Range::new(
3646 lsp::Position::new(1, 13),
3647 lsp::Position::new(1, 15),
3648 ),
3649 },
3650 message: "error 2 hint 1".to_string(),
3651 },
3652 lsp::DiagnosticRelatedInformation {
3653 location: lsp::Location {
3654 uri: buffer_uri.clone().into(),
3655 range: lsp::Range::new(
3656 lsp::Position::new(1, 13),
3657 lsp::Position::new(1, 15),
3658 ),
3659 },
3660 message: "error 2 hint 2".to_string(),
3661 },
3662 ]),
3663 ..Default::default()
3664 },
3665 lsp::Diagnostic {
3666 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3667 severity: Some(DiagnosticSeverity::HINT),
3668 message: "error 2 hint 1".to_string(),
3669 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3670 location: lsp::Location {
3671 uri: buffer_uri.clone().into(),
3672 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3673 },
3674 message: "original diagnostic".to_string(),
3675 }]),
3676 ..Default::default()
3677 },
3678 lsp::Diagnostic {
3679 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3680 severity: Some(DiagnosticSeverity::HINT),
3681 message: "error 2 hint 2".to_string(),
3682 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3683 location: lsp::Location {
3684 uri: buffer_uri.into(),
3685 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3686 },
3687 message: "original diagnostic".to_string(),
3688 }]),
3689 ..Default::default()
3690 },
3691 ],
3692 version: None,
3693 };
3694
3695 project
3696 .update(cx, |p, cx| {
3697 p.update_diagnostics(LanguageServerId(0), message, &[], cx)
3698 })
3699 .unwrap();
3700 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3701
3702 assert_eq!(
3703 buffer
3704 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3705 .collect::<Vec<_>>(),
3706 &[
3707 DiagnosticEntry {
3708 range: Point::new(1, 8)..Point::new(1, 9),
3709 diagnostic: Diagnostic {
3710 severity: DiagnosticSeverity::WARNING,
3711 message: "error 1".to_string(),
3712 group_id: 1,
3713 is_primary: true,
3714 ..Default::default()
3715 }
3716 },
3717 DiagnosticEntry {
3718 range: Point::new(1, 8)..Point::new(1, 9),
3719 diagnostic: Diagnostic {
3720 severity: DiagnosticSeverity::HINT,
3721 message: "error 1 hint 1".to_string(),
3722 group_id: 1,
3723 is_primary: false,
3724 ..Default::default()
3725 }
3726 },
3727 DiagnosticEntry {
3728 range: Point::new(1, 13)..Point::new(1, 15),
3729 diagnostic: Diagnostic {
3730 severity: DiagnosticSeverity::HINT,
3731 message: "error 2 hint 1".to_string(),
3732 group_id: 0,
3733 is_primary: false,
3734 ..Default::default()
3735 }
3736 },
3737 DiagnosticEntry {
3738 range: Point::new(1, 13)..Point::new(1, 15),
3739 diagnostic: Diagnostic {
3740 severity: DiagnosticSeverity::HINT,
3741 message: "error 2 hint 2".to_string(),
3742 group_id: 0,
3743 is_primary: false,
3744 ..Default::default()
3745 }
3746 },
3747 DiagnosticEntry {
3748 range: Point::new(2, 8)..Point::new(2, 17),
3749 diagnostic: Diagnostic {
3750 severity: DiagnosticSeverity::ERROR,
3751 message: "error 2".to_string(),
3752 group_id: 0,
3753 is_primary: true,
3754 ..Default::default()
3755 }
3756 }
3757 ]
3758 );
3759
3760 assert_eq!(
3761 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3762 &[
3763 DiagnosticEntry {
3764 range: Point::new(1, 13)..Point::new(1, 15),
3765 diagnostic: Diagnostic {
3766 severity: DiagnosticSeverity::HINT,
3767 message: "error 2 hint 1".to_string(),
3768 group_id: 0,
3769 is_primary: false,
3770 ..Default::default()
3771 }
3772 },
3773 DiagnosticEntry {
3774 range: Point::new(1, 13)..Point::new(1, 15),
3775 diagnostic: Diagnostic {
3776 severity: DiagnosticSeverity::HINT,
3777 message: "error 2 hint 2".to_string(),
3778 group_id: 0,
3779 is_primary: false,
3780 ..Default::default()
3781 }
3782 },
3783 DiagnosticEntry {
3784 range: Point::new(2, 8)..Point::new(2, 17),
3785 diagnostic: Diagnostic {
3786 severity: DiagnosticSeverity::ERROR,
3787 message: "error 2".to_string(),
3788 group_id: 0,
3789 is_primary: true,
3790 ..Default::default()
3791 }
3792 }
3793 ]
3794 );
3795
3796 assert_eq!(
3797 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3798 &[
3799 DiagnosticEntry {
3800 range: Point::new(1, 8)..Point::new(1, 9),
3801 diagnostic: Diagnostic {
3802 severity: DiagnosticSeverity::WARNING,
3803 message: "error 1".to_string(),
3804 group_id: 1,
3805 is_primary: true,
3806 ..Default::default()
3807 }
3808 },
3809 DiagnosticEntry {
3810 range: Point::new(1, 8)..Point::new(1, 9),
3811 diagnostic: Diagnostic {
3812 severity: DiagnosticSeverity::HINT,
3813 message: "error 1 hint 1".to_string(),
3814 group_id: 1,
3815 is_primary: false,
3816 ..Default::default()
3817 }
3818 },
3819 ]
3820 );
3821}
3822
3823#[gpui::test]
3824async fn test_rename(cx: &mut gpui::TestAppContext) {
3825 // hi
3826 init_test(cx);
3827
3828 let fs = FakeFs::new(cx.executor());
3829 fs.insert_tree(
3830 "/dir",
3831 json!({
3832 "one.rs": "const ONE: usize = 1;",
3833 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3834 }),
3835 )
3836 .await;
3837
3838 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3839
3840 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3841 language_registry.add(rust_lang());
3842 let mut fake_servers = language_registry.register_fake_lsp_adapter(
3843 "Rust",
3844 FakeLspAdapter {
3845 capabilities: lsp::ServerCapabilities {
3846 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
3847 prepare_provider: Some(true),
3848 work_done_progress_options: Default::default(),
3849 })),
3850 ..Default::default()
3851 },
3852 ..Default::default()
3853 },
3854 );
3855
3856 let buffer = project
3857 .update(cx, |project, cx| {
3858 project.open_local_buffer("/dir/one.rs", cx)
3859 })
3860 .await
3861 .unwrap();
3862
3863 let fake_server = fake_servers.next().await.unwrap();
3864
3865 let response = project.update(cx, |project, cx| {
3866 project.prepare_rename(buffer.clone(), 7, cx)
3867 });
3868 fake_server
3869 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
3870 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
3871 assert_eq!(params.position, lsp::Position::new(0, 7));
3872 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
3873 lsp::Position::new(0, 6),
3874 lsp::Position::new(0, 9),
3875 ))))
3876 })
3877 .next()
3878 .await
3879 .unwrap();
3880 let range = response.await.unwrap().unwrap();
3881 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
3882 assert_eq!(range, 6..9);
3883
3884 let response = project.update(cx, |project, cx| {
3885 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), true, cx)
3886 });
3887 fake_server
3888 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
3889 assert_eq!(
3890 params.text_document_position.text_document.uri.as_str(),
3891 "file:///dir/one.rs"
3892 );
3893 assert_eq!(
3894 params.text_document_position.position,
3895 lsp::Position::new(0, 7)
3896 );
3897 assert_eq!(params.new_name, "THREE");
3898 Ok(Some(lsp::WorkspaceEdit {
3899 changes: Some(
3900 [
3901 (
3902 lsp::Uri::from_file_path("/dir/one.rs").unwrap().into(),
3903 vec![lsp::TextEdit::new(
3904 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
3905 "THREE".to_string(),
3906 )],
3907 ),
3908 (
3909 lsp::Uri::from_file_path("/dir/two.rs").unwrap().into(),
3910 vec![
3911 lsp::TextEdit::new(
3912 lsp::Range::new(
3913 lsp::Position::new(0, 24),
3914 lsp::Position::new(0, 27),
3915 ),
3916 "THREE".to_string(),
3917 ),
3918 lsp::TextEdit::new(
3919 lsp::Range::new(
3920 lsp::Position::new(0, 35),
3921 lsp::Position::new(0, 38),
3922 ),
3923 "THREE".to_string(),
3924 ),
3925 ],
3926 ),
3927 ]
3928 .into_iter()
3929 .collect(),
3930 ),
3931 ..Default::default()
3932 }))
3933 })
3934 .next()
3935 .await
3936 .unwrap();
3937 let mut transaction = response.await.unwrap().0;
3938 assert_eq!(transaction.len(), 2);
3939 assert_eq!(
3940 transaction
3941 .remove_entry(&buffer)
3942 .unwrap()
3943 .0
3944 .update(cx, |buffer, _| buffer.text()),
3945 "const THREE: usize = 1;"
3946 );
3947 assert_eq!(
3948 transaction
3949 .into_keys()
3950 .next()
3951 .unwrap()
3952 .update(cx, |buffer, _| buffer.text()),
3953 "const TWO: usize = one::THREE + one::THREE;"
3954 );
3955}
3956
3957#[gpui::test]
3958async fn test_search(cx: &mut gpui::TestAppContext) {
3959 init_test(cx);
3960
3961 let fs = FakeFs::new(cx.executor());
3962 fs.insert_tree(
3963 "/dir",
3964 json!({
3965 "one.rs": "const ONE: usize = 1;",
3966 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
3967 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
3968 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
3969 }),
3970 )
3971 .await;
3972 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3973 assert_eq!(
3974 search(
3975 &project,
3976 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
3977 cx
3978 )
3979 .await
3980 .unwrap(),
3981 HashMap::from_iter([
3982 ("dir/two.rs".to_string(), vec![6..9]),
3983 ("dir/three.rs".to_string(), vec![37..40])
3984 ])
3985 );
3986
3987 let buffer_4 = project
3988 .update(cx, |project, cx| {
3989 project.open_local_buffer("/dir/four.rs", cx)
3990 })
3991 .await
3992 .unwrap();
3993 buffer_4.update(cx, |buffer, cx| {
3994 let text = "two::TWO";
3995 buffer.edit([(20..28, text), (31..43, text)], None, cx);
3996 });
3997
3998 assert_eq!(
3999 search(
4000 &project,
4001 SearchQuery::text("TWO", false, true, false, Vec::new(), Vec::new()).unwrap(),
4002 cx
4003 )
4004 .await
4005 .unwrap(),
4006 HashMap::from_iter([
4007 ("dir/two.rs".to_string(), vec![6..9]),
4008 ("dir/three.rs".to_string(), vec![37..40]),
4009 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4010 ])
4011 );
4012}
4013
4014#[gpui::test]
4015async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4016 init_test(cx);
4017
4018 let search_query = "file";
4019
4020 let fs = FakeFs::new(cx.executor());
4021 fs.insert_tree(
4022 "/dir",
4023 json!({
4024 "one.rs": r#"// Rust file one"#,
4025 "one.ts": r#"// TypeScript file one"#,
4026 "two.rs": r#"// Rust file two"#,
4027 "two.ts": r#"// TypeScript file two"#,
4028 }),
4029 )
4030 .await;
4031 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4032
4033 assert!(
4034 search(
4035 &project,
4036 SearchQuery::text(
4037 search_query,
4038 false,
4039 true,
4040 false,
4041 vec![PathMatcher::new("*.odd").unwrap()],
4042 Vec::new()
4043 )
4044 .unwrap(),
4045 cx
4046 )
4047 .await
4048 .unwrap()
4049 .is_empty(),
4050 "If no inclusions match, no files should be returned"
4051 );
4052
4053 assert_eq!(
4054 search(
4055 &project,
4056 SearchQuery::text(
4057 search_query,
4058 false,
4059 true,
4060 false,
4061 vec![PathMatcher::new("*.rs").unwrap()],
4062 Vec::new()
4063 )
4064 .unwrap(),
4065 cx
4066 )
4067 .await
4068 .unwrap(),
4069 HashMap::from_iter([
4070 ("dir/one.rs".to_string(), vec![8..12]),
4071 ("dir/two.rs".to_string(), vec![8..12]),
4072 ]),
4073 "Rust only search should give only Rust files"
4074 );
4075
4076 assert_eq!(
4077 search(
4078 &project,
4079 SearchQuery::text(
4080 search_query,
4081 false,
4082 true,
4083 false,
4084 vec![
4085 PathMatcher::new("*.ts").unwrap(),
4086 PathMatcher::new("*.odd").unwrap(),
4087 ],
4088 Vec::new()
4089 ).unwrap(),
4090 cx
4091 )
4092 .await
4093 .unwrap(),
4094 HashMap::from_iter([
4095 ("dir/one.ts".to_string(), vec![14..18]),
4096 ("dir/two.ts".to_string(), vec![14..18]),
4097 ]),
4098 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4099 );
4100
4101 assert_eq!(
4102 search(
4103 &project,
4104 SearchQuery::text(
4105 search_query,
4106 false,
4107 true,
4108 false,
4109 vec![
4110 PathMatcher::new("*.rs").unwrap(),
4111 PathMatcher::new("*.ts").unwrap(),
4112 PathMatcher::new("*.odd").unwrap(),
4113 ],
4114 Vec::new()
4115 ).unwrap(),
4116 cx
4117 )
4118 .await
4119 .unwrap(),
4120 HashMap::from_iter([
4121 ("dir/two.ts".to_string(), vec![14..18]),
4122 ("dir/one.rs".to_string(), vec![8..12]),
4123 ("dir/one.ts".to_string(), vec![14..18]),
4124 ("dir/two.rs".to_string(), vec![8..12]),
4125 ]),
4126 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4127 );
4128}
4129
4130#[gpui::test]
4131async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4132 init_test(cx);
4133
4134 let search_query = "file";
4135
4136 let fs = FakeFs::new(cx.executor());
4137 fs.insert_tree(
4138 "/dir",
4139 json!({
4140 "one.rs": r#"// Rust file one"#,
4141 "one.ts": r#"// TypeScript file one"#,
4142 "two.rs": r#"// Rust file two"#,
4143 "two.ts": r#"// TypeScript file two"#,
4144 }),
4145 )
4146 .await;
4147 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4148
4149 assert_eq!(
4150 search(
4151 &project,
4152 SearchQuery::text(
4153 search_query,
4154 false,
4155 true,
4156 false,
4157 Vec::new(),
4158 vec![PathMatcher::new("*.odd").unwrap()],
4159 )
4160 .unwrap(),
4161 cx
4162 )
4163 .await
4164 .unwrap(),
4165 HashMap::from_iter([
4166 ("dir/one.rs".to_string(), vec![8..12]),
4167 ("dir/one.ts".to_string(), vec![14..18]),
4168 ("dir/two.rs".to_string(), vec![8..12]),
4169 ("dir/two.ts".to_string(), vec![14..18]),
4170 ]),
4171 "If no exclusions match, all files should be returned"
4172 );
4173
4174 assert_eq!(
4175 search(
4176 &project,
4177 SearchQuery::text(
4178 search_query,
4179 false,
4180 true,
4181 false,
4182 Vec::new(),
4183 vec![PathMatcher::new("*.rs").unwrap()],
4184 )
4185 .unwrap(),
4186 cx
4187 )
4188 .await
4189 .unwrap(),
4190 HashMap::from_iter([
4191 ("dir/one.ts".to_string(), vec![14..18]),
4192 ("dir/two.ts".to_string(), vec![14..18]),
4193 ]),
4194 "Rust exclusion search should give only TypeScript files"
4195 );
4196
4197 assert_eq!(
4198 search(
4199 &project,
4200 SearchQuery::text(
4201 search_query,
4202 false,
4203 true,
4204 false,
4205 Vec::new(),
4206 vec![
4207 PathMatcher::new("*.ts").unwrap(),
4208 PathMatcher::new("*.odd").unwrap(),
4209 ],
4210 ).unwrap(),
4211 cx
4212 )
4213 .await
4214 .unwrap(),
4215 HashMap::from_iter([
4216 ("dir/one.rs".to_string(), vec![8..12]),
4217 ("dir/two.rs".to_string(), vec![8..12]),
4218 ]),
4219 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4220 );
4221
4222 assert!(
4223 search(
4224 &project,
4225 SearchQuery::text(
4226 search_query,
4227 false,
4228 true,
4229 false,
4230 Vec::new(),
4231 vec![
4232 PathMatcher::new("*.rs").unwrap(),
4233 PathMatcher::new("*.ts").unwrap(),
4234 PathMatcher::new("*.odd").unwrap(),
4235 ],
4236 ).unwrap(),
4237 cx
4238 )
4239 .await
4240 .unwrap().is_empty(),
4241 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4242 );
4243}
4244
4245#[gpui::test]
4246async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4247 init_test(cx);
4248
4249 let search_query = "file";
4250
4251 let fs = FakeFs::new(cx.executor());
4252 fs.insert_tree(
4253 "/dir",
4254 json!({
4255 "one.rs": r#"// Rust file one"#,
4256 "one.ts": r#"// TypeScript file one"#,
4257 "two.rs": r#"// Rust file two"#,
4258 "two.ts": r#"// TypeScript file two"#,
4259 }),
4260 )
4261 .await;
4262 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4263
4264 assert!(
4265 search(
4266 &project,
4267 SearchQuery::text(
4268 search_query,
4269 false,
4270 true,
4271 false,
4272 vec![PathMatcher::new("*.odd").unwrap()],
4273 vec![PathMatcher::new("*.odd").unwrap()],
4274 )
4275 .unwrap(),
4276 cx
4277 )
4278 .await
4279 .unwrap()
4280 .is_empty(),
4281 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4282 );
4283
4284 assert!(
4285 search(
4286 &project,
4287 SearchQuery::text(
4288 search_query,
4289 false,
4290 true,
4291 false,
4292 vec![PathMatcher::new("*.ts").unwrap()],
4293 vec![PathMatcher::new("*.ts").unwrap()],
4294 ).unwrap(),
4295 cx
4296 )
4297 .await
4298 .unwrap()
4299 .is_empty(),
4300 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4301 );
4302
4303 assert!(
4304 search(
4305 &project,
4306 SearchQuery::text(
4307 search_query,
4308 false,
4309 true,
4310 false,
4311 vec![
4312 PathMatcher::new("*.ts").unwrap(),
4313 PathMatcher::new("*.odd").unwrap()
4314 ],
4315 vec![
4316 PathMatcher::new("*.ts").unwrap(),
4317 PathMatcher::new("*.odd").unwrap()
4318 ],
4319 )
4320 .unwrap(),
4321 cx
4322 )
4323 .await
4324 .unwrap()
4325 .is_empty(),
4326 "Non-matching inclusions and exclusions should not change that."
4327 );
4328
4329 assert_eq!(
4330 search(
4331 &project,
4332 SearchQuery::text(
4333 search_query,
4334 false,
4335 true,
4336 false,
4337 vec![
4338 PathMatcher::new("*.ts").unwrap(),
4339 PathMatcher::new("*.odd").unwrap()
4340 ],
4341 vec![
4342 PathMatcher::new("*.rs").unwrap(),
4343 PathMatcher::new("*.odd").unwrap()
4344 ],
4345 )
4346 .unwrap(),
4347 cx
4348 )
4349 .await
4350 .unwrap(),
4351 HashMap::from_iter([
4352 ("dir/one.ts".to_string(), vec![14..18]),
4353 ("dir/two.ts".to_string(), vec![14..18]),
4354 ]),
4355 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4356 );
4357}
4358
4359#[gpui::test]
4360async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4361 init_test(cx);
4362
4363 let fs = FakeFs::new(cx.executor());
4364 fs.insert_tree(
4365 "/worktree-a",
4366 json!({
4367 "haystack.rs": r#"// NEEDLE"#,
4368 "haystack.ts": r#"// NEEDLE"#,
4369 }),
4370 )
4371 .await;
4372 fs.insert_tree(
4373 "/worktree-b",
4374 json!({
4375 "haystack.rs": r#"// NEEDLE"#,
4376 "haystack.ts": r#"// NEEDLE"#,
4377 }),
4378 )
4379 .await;
4380
4381 let project = Project::test(
4382 fs.clone(),
4383 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4384 cx,
4385 )
4386 .await;
4387
4388 assert_eq!(
4389 search(
4390 &project,
4391 SearchQuery::text(
4392 "NEEDLE",
4393 false,
4394 true,
4395 false,
4396 vec![PathMatcher::new("worktree-a/*.rs").unwrap()],
4397 Vec::new()
4398 )
4399 .unwrap(),
4400 cx
4401 )
4402 .await
4403 .unwrap(),
4404 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4405 "should only return results from included worktree"
4406 );
4407 assert_eq!(
4408 search(
4409 &project,
4410 SearchQuery::text(
4411 "NEEDLE",
4412 false,
4413 true,
4414 false,
4415 vec![PathMatcher::new("worktree-b/*.rs").unwrap()],
4416 Vec::new()
4417 )
4418 .unwrap(),
4419 cx
4420 )
4421 .await
4422 .unwrap(),
4423 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4424 "should only return results from included worktree"
4425 );
4426
4427 assert_eq!(
4428 search(
4429 &project,
4430 SearchQuery::text(
4431 "NEEDLE",
4432 false,
4433 true,
4434 false,
4435 vec![PathMatcher::new("*.ts").unwrap()],
4436 Vec::new()
4437 )
4438 .unwrap(),
4439 cx
4440 )
4441 .await
4442 .unwrap(),
4443 HashMap::from_iter([
4444 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4445 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4446 ]),
4447 "should return results from both worktrees"
4448 );
4449}
4450
4451#[gpui::test]
4452async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4453 init_test(cx);
4454
4455 let fs = FakeFs::new(cx.background_executor.clone());
4456 fs.insert_tree(
4457 "/dir",
4458 json!({
4459 ".git": {},
4460 ".gitignore": "**/target\n/node_modules\n",
4461 "target": {
4462 "index.txt": "index_key:index_value"
4463 },
4464 "node_modules": {
4465 "eslint": {
4466 "index.ts": "const eslint_key = 'eslint value'",
4467 "package.json": r#"{ "some_key": "some value" }"#,
4468 },
4469 "prettier": {
4470 "index.ts": "const prettier_key = 'prettier value'",
4471 "package.json": r#"{ "other_key": "other value" }"#,
4472 },
4473 },
4474 "package.json": r#"{ "main_key": "main value" }"#,
4475 }),
4476 )
4477 .await;
4478 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4479
4480 let query = "key";
4481 assert_eq!(
4482 search(
4483 &project,
4484 SearchQuery::text(query, false, false, false, Vec::new(), Vec::new()).unwrap(),
4485 cx
4486 )
4487 .await
4488 .unwrap(),
4489 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4490 "Only one non-ignored file should have the query"
4491 );
4492
4493 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4494 assert_eq!(
4495 search(
4496 &project,
4497 SearchQuery::text(query, false, false, true, Vec::new(), Vec::new()).unwrap(),
4498 cx
4499 )
4500 .await
4501 .unwrap(),
4502 HashMap::from_iter([
4503 ("dir/package.json".to_string(), vec![8..11]),
4504 ("dir/target/index.txt".to_string(), vec![6..9]),
4505 (
4506 "dir/node_modules/prettier/package.json".to_string(),
4507 vec![9..12]
4508 ),
4509 (
4510 "dir/node_modules/prettier/index.ts".to_string(),
4511 vec![15..18]
4512 ),
4513 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4514 (
4515 "dir/node_modules/eslint/package.json".to_string(),
4516 vec![8..11]
4517 ),
4518 ]),
4519 "Unrestricted search with ignored directories should find every file with the query"
4520 );
4521
4522 let files_to_include = vec![PathMatcher::new("/dir/node_modules/prettier/**").unwrap()];
4523 let files_to_exclude = vec![PathMatcher::new("*.ts").unwrap()];
4524 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4525 assert_eq!(
4526 search(
4527 &project,
4528 SearchQuery::text(
4529 query,
4530 false,
4531 false,
4532 true,
4533 files_to_include,
4534 files_to_exclude,
4535 )
4536 .unwrap(),
4537 cx
4538 )
4539 .await
4540 .unwrap(),
4541 HashMap::from_iter([(
4542 "dir/node_modules/prettier/package.json".to_string(),
4543 vec![9..12]
4544 )]),
4545 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4546 );
4547}
4548
4549#[test]
4550fn test_glob_literal_prefix() {
4551 assert_eq!(glob_literal_prefix("**/*.js"), "");
4552 assert_eq!(glob_literal_prefix("node_modules/**/*.js"), "node_modules");
4553 assert_eq!(glob_literal_prefix("foo/{bar,baz}.js"), "foo");
4554 assert_eq!(glob_literal_prefix("foo/bar/baz.js"), "foo/bar/baz.js");
4555}
4556
4557#[gpui::test]
4558async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4559 init_test(cx);
4560
4561 let fs = FakeFs::new(cx.executor().clone());
4562 fs.insert_tree(
4563 "/one/two",
4564 json!({
4565 "three": {
4566 "a.txt": "",
4567 "four": {}
4568 },
4569 "c.rs": ""
4570 }),
4571 )
4572 .await;
4573
4574 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4575 project
4576 .update(cx, |project, cx| {
4577 let id = project.worktrees().next().unwrap().read(cx).id();
4578 project.create_entry((id, "b.."), true, cx)
4579 })
4580 .unwrap()
4581 .await
4582 .to_included()
4583 .unwrap();
4584
4585 // Can't create paths outside the project
4586 let result = project
4587 .update(cx, |project, cx| {
4588 let id = project.worktrees().next().unwrap().read(cx).id();
4589 project.create_entry((id, "../../boop"), true, cx)
4590 })
4591 .await;
4592 assert!(result.is_err());
4593
4594 // Can't create paths with '..'
4595 let result = project
4596 .update(cx, |project, cx| {
4597 let id = project.worktrees().next().unwrap().read(cx).id();
4598 project.create_entry((id, "four/../beep"), true, cx)
4599 })
4600 .await;
4601 assert!(result.is_err());
4602
4603 assert_eq!(
4604 fs.paths(true),
4605 vec![
4606 PathBuf::from("/"),
4607 PathBuf::from("/one"),
4608 PathBuf::from("/one/two"),
4609 PathBuf::from("/one/two/c.rs"),
4610 PathBuf::from("/one/two/three"),
4611 PathBuf::from("/one/two/three/a.txt"),
4612 PathBuf::from("/one/two/three/b.."),
4613 PathBuf::from("/one/two/three/four"),
4614 ]
4615 );
4616
4617 // And we cannot open buffers with '..'
4618 let result = project
4619 .update(cx, |project, cx| {
4620 let id = project.worktrees().next().unwrap().read(cx).id();
4621 project.open_buffer((id, "../c.rs"), cx)
4622 })
4623 .await;
4624 assert!(result.is_err())
4625}
4626
4627#[gpui::test]
4628async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4629 init_test(cx);
4630
4631 let fs = FakeFs::new(cx.executor());
4632 fs.insert_tree(
4633 "/dir",
4634 json!({
4635 "a.tsx": "a",
4636 }),
4637 )
4638 .await;
4639
4640 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4641
4642 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4643 language_registry.add(tsx_lang());
4644 let language_server_names = [
4645 "TypeScriptServer",
4646 "TailwindServer",
4647 "ESLintServer",
4648 "NoHoverCapabilitiesServer",
4649 ];
4650 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4651 "tsx",
4652 true,
4653 FakeLspAdapter {
4654 name: &language_server_names[0],
4655 capabilities: lsp::ServerCapabilities {
4656 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4657 ..lsp::ServerCapabilities::default()
4658 },
4659 ..FakeLspAdapter::default()
4660 },
4661 );
4662 let _a = language_registry.register_specific_fake_lsp_adapter(
4663 "tsx",
4664 false,
4665 FakeLspAdapter {
4666 name: &language_server_names[1],
4667 capabilities: lsp::ServerCapabilities {
4668 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4669 ..lsp::ServerCapabilities::default()
4670 },
4671 ..FakeLspAdapter::default()
4672 },
4673 );
4674 let _b = language_registry.register_specific_fake_lsp_adapter(
4675 "tsx",
4676 false,
4677 FakeLspAdapter {
4678 name: &language_server_names[2],
4679 capabilities: lsp::ServerCapabilities {
4680 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4681 ..lsp::ServerCapabilities::default()
4682 },
4683 ..FakeLspAdapter::default()
4684 },
4685 );
4686 let _c = language_registry.register_specific_fake_lsp_adapter(
4687 "tsx",
4688 false,
4689 FakeLspAdapter {
4690 name: &language_server_names[3],
4691 capabilities: lsp::ServerCapabilities {
4692 hover_provider: None,
4693 ..lsp::ServerCapabilities::default()
4694 },
4695 ..FakeLspAdapter::default()
4696 },
4697 );
4698
4699 let buffer = project
4700 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4701 .await
4702 .unwrap();
4703 cx.executor().run_until_parked();
4704
4705 let mut servers_with_hover_requests = HashMap::default();
4706 for i in 0..language_server_names.len() {
4707 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4708 panic!(
4709 "Failed to get language server #{i} with name {}",
4710 &language_server_names[i]
4711 )
4712 });
4713 let new_server_name = new_server.server.name();
4714 assert!(
4715 !servers_with_hover_requests.contains_key(new_server_name),
4716 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4717 );
4718 let new_server_name = new_server_name.to_string();
4719 match new_server_name.as_str() {
4720 "TailwindServer" | "TypeScriptServer" => {
4721 servers_with_hover_requests.insert(
4722 new_server_name.clone(),
4723 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
4724 let name = new_server_name.clone();
4725 async move {
4726 Ok(Some(lsp::Hover {
4727 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
4728 format!("{name} hover"),
4729 )),
4730 range: None,
4731 }))
4732 }
4733 }),
4734 );
4735 }
4736 "ESLintServer" => {
4737 servers_with_hover_requests.insert(
4738 new_server_name,
4739 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4740 |_, _| async move { Ok(None) },
4741 ),
4742 );
4743 }
4744 "NoHoverCapabilitiesServer" => {
4745 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
4746 |_, _| async move {
4747 panic!(
4748 "Should not call for hovers server with no corresponding capabilities"
4749 )
4750 },
4751 );
4752 }
4753 unexpected => panic!("Unexpected server name: {unexpected}"),
4754 }
4755 }
4756
4757 let hover_task = project.update(cx, |project, cx| {
4758 project.hover(&buffer, Point::new(0, 0), cx)
4759 });
4760 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
4761 |mut hover_request| async move {
4762 hover_request
4763 .next()
4764 .await
4765 .expect("All hover requests should have been triggered")
4766 },
4767 ))
4768 .await;
4769 assert_eq!(
4770 vec!["TailwindServer hover", "TypeScriptServer hover"],
4771 hover_task
4772 .await
4773 .into_iter()
4774 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4775 .sorted()
4776 .collect::<Vec<_>>(),
4777 "Should receive hover responses from all related servers with hover capabilities"
4778 );
4779}
4780
4781#[gpui::test]
4782async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
4783 init_test(cx);
4784
4785 let fs = FakeFs::new(cx.executor());
4786 fs.insert_tree(
4787 "/dir",
4788 json!({
4789 "a.ts": "a",
4790 }),
4791 )
4792 .await;
4793
4794 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4795
4796 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4797 language_registry.add(typescript_lang());
4798 let mut fake_language_servers = language_registry.register_fake_lsp_adapter(
4799 "TypeScript",
4800 FakeLspAdapter {
4801 capabilities: lsp::ServerCapabilities {
4802 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4803 ..lsp::ServerCapabilities::default()
4804 },
4805 ..FakeLspAdapter::default()
4806 },
4807 );
4808
4809 let buffer = project
4810 .update(cx, |p, cx| p.open_local_buffer("/dir/a.ts", cx))
4811 .await
4812 .unwrap();
4813 cx.executor().run_until_parked();
4814
4815 let fake_server = fake_language_servers
4816 .next()
4817 .await
4818 .expect("failed to get the language server");
4819
4820 let mut request_handled =
4821 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
4822 Ok(Some(lsp::Hover {
4823 contents: lsp::HoverContents::Array(vec![
4824 lsp::MarkedString::String("".to_string()),
4825 lsp::MarkedString::String(" ".to_string()),
4826 lsp::MarkedString::String("\n\n\n".to_string()),
4827 ]),
4828 range: None,
4829 }))
4830 });
4831
4832 let hover_task = project.update(cx, |project, cx| {
4833 project.hover(&buffer, Point::new(0, 0), cx)
4834 });
4835 let () = request_handled
4836 .next()
4837 .await
4838 .expect("All hover requests should have been triggered");
4839 assert_eq!(
4840 Vec::<String>::new(),
4841 hover_task
4842 .await
4843 .into_iter()
4844 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
4845 .sorted()
4846 .collect::<Vec<_>>(),
4847 "Empty hover parts should be ignored"
4848 );
4849}
4850
4851#[gpui::test]
4852async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
4853 init_test(cx);
4854
4855 let fs = FakeFs::new(cx.executor());
4856 fs.insert_tree(
4857 "/dir",
4858 json!({
4859 "a.tsx": "a",
4860 }),
4861 )
4862 .await;
4863
4864 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4865
4866 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4867 language_registry.add(tsx_lang());
4868 let language_server_names = [
4869 "TypeScriptServer",
4870 "TailwindServer",
4871 "ESLintServer",
4872 "NoActionsCapabilitiesServer",
4873 ];
4874 let mut fake_tsx_language_servers = language_registry.register_specific_fake_lsp_adapter(
4875 "tsx",
4876 true,
4877 FakeLspAdapter {
4878 name: &language_server_names[0],
4879 capabilities: lsp::ServerCapabilities {
4880 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4881 ..lsp::ServerCapabilities::default()
4882 },
4883 ..FakeLspAdapter::default()
4884 },
4885 );
4886 let _a = language_registry.register_specific_fake_lsp_adapter(
4887 "tsx",
4888 false,
4889 FakeLspAdapter {
4890 name: &language_server_names[1],
4891 capabilities: lsp::ServerCapabilities {
4892 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4893 ..lsp::ServerCapabilities::default()
4894 },
4895 ..FakeLspAdapter::default()
4896 },
4897 );
4898 let _b = language_registry.register_specific_fake_lsp_adapter(
4899 "tsx",
4900 false,
4901 FakeLspAdapter {
4902 name: &language_server_names[2],
4903 capabilities: lsp::ServerCapabilities {
4904 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
4905 ..lsp::ServerCapabilities::default()
4906 },
4907 ..FakeLspAdapter::default()
4908 },
4909 );
4910 let _c = language_registry.register_specific_fake_lsp_adapter(
4911 "tsx",
4912 false,
4913 FakeLspAdapter {
4914 name: &language_server_names[3],
4915 capabilities: lsp::ServerCapabilities {
4916 code_action_provider: None,
4917 ..lsp::ServerCapabilities::default()
4918 },
4919 ..FakeLspAdapter::default()
4920 },
4921 );
4922
4923 let buffer = project
4924 .update(cx, |p, cx| p.open_local_buffer("/dir/a.tsx", cx))
4925 .await
4926 .unwrap();
4927 cx.executor().run_until_parked();
4928
4929 let mut servers_with_actions_requests = HashMap::default();
4930 for i in 0..language_server_names.len() {
4931 let new_server = fake_tsx_language_servers.next().await.unwrap_or_else(|| {
4932 panic!(
4933 "Failed to get language server #{i} with name {}",
4934 &language_server_names[i]
4935 )
4936 });
4937 let new_server_name = new_server.server.name();
4938 assert!(
4939 !servers_with_actions_requests.contains_key(new_server_name),
4940 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4941 );
4942 let new_server_name = new_server_name.to_string();
4943 match new_server_name.as_str() {
4944 "TailwindServer" | "TypeScriptServer" => {
4945 servers_with_actions_requests.insert(
4946 new_server_name.clone(),
4947 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4948 move |_, _| {
4949 let name = new_server_name.clone();
4950 async move {
4951 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
4952 lsp::CodeAction {
4953 title: format!("{name} code action"),
4954 ..lsp::CodeAction::default()
4955 },
4956 )]))
4957 }
4958 },
4959 ),
4960 );
4961 }
4962 "ESLintServer" => {
4963 servers_with_actions_requests.insert(
4964 new_server_name,
4965 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
4966 |_, _| async move { Ok(None) },
4967 ),
4968 );
4969 }
4970 "NoActionsCapabilitiesServer" => {
4971 let _never_handled = new_server
4972 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
4973 panic!(
4974 "Should not call for code actions server with no corresponding capabilities"
4975 )
4976 });
4977 }
4978 unexpected => panic!("Unexpected server name: {unexpected}"),
4979 }
4980 }
4981
4982 let code_actions_task = project.update(cx, |project, cx| {
4983 project.code_actions(&buffer, 0..buffer.read(cx).len(), cx)
4984 });
4985 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
4986 |mut code_actions_request| async move {
4987 code_actions_request
4988 .next()
4989 .await
4990 .expect("All code actions requests should have been triggered")
4991 },
4992 ))
4993 .await;
4994 assert_eq!(
4995 vec!["TailwindServer code action", "TypeScriptServer code action"],
4996 code_actions_task
4997 .await
4998 .into_iter()
4999 .map(|code_action| code_action.lsp_action.title)
5000 .sorted()
5001 .collect::<Vec<_>>(),
5002 "Should receive code actions responses from all related servers with hover capabilities"
5003 );
5004}
5005
5006#[gpui::test]
5007async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5008 init_test(cx);
5009
5010 let fs = FakeFs::new(cx.executor());
5011 fs.insert_tree(
5012 "/dir",
5013 json!({
5014 "a.rs": "let a = 1;",
5015 "b.rs": "let b = 2;",
5016 "c.rs": "let c = 2;",
5017 }),
5018 )
5019 .await;
5020
5021 let project = Project::test(
5022 fs,
5023 [
5024 "/dir/a.rs".as_ref(),
5025 "/dir/b.rs".as_ref(),
5026 "/dir/c.rs".as_ref(),
5027 ],
5028 cx,
5029 )
5030 .await;
5031
5032 // check the initial state and get the worktrees
5033 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5034 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5035 assert_eq!(worktrees.len(), 3);
5036
5037 let worktree_a = worktrees[0].read(cx);
5038 let worktree_b = worktrees[1].read(cx);
5039 let worktree_c = worktrees[2].read(cx);
5040
5041 // check they start in the right order
5042 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5043 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5044 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5045
5046 (
5047 worktrees[0].clone(),
5048 worktrees[1].clone(),
5049 worktrees[2].clone(),
5050 )
5051 });
5052
5053 // move first worktree to after the second
5054 // [a, b, c] -> [b, a, c]
5055 project
5056 .update(cx, |project, cx| {
5057 let first = worktree_a.read(cx);
5058 let second = worktree_b.read(cx);
5059 project.move_worktree(first.id(), second.id(), cx)
5060 })
5061 .expect("moving first after second");
5062
5063 // check the state after moving
5064 project.update(cx, |project, cx| {
5065 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5066 assert_eq!(worktrees.len(), 3);
5067
5068 let first = worktrees[0].read(cx);
5069 let second = worktrees[1].read(cx);
5070 let third = worktrees[2].read(cx);
5071
5072 // check they are now in the right order
5073 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5074 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5075 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5076 });
5077
5078 // move the second worktree to before the first
5079 // [b, a, c] -> [a, b, c]
5080 project
5081 .update(cx, |project, cx| {
5082 let second = worktree_a.read(cx);
5083 let first = worktree_b.read(cx);
5084 project.move_worktree(first.id(), second.id(), cx)
5085 })
5086 .expect("moving second before first");
5087
5088 // check the state after moving
5089 project.update(cx, |project, cx| {
5090 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5091 assert_eq!(worktrees.len(), 3);
5092
5093 let first = worktrees[0].read(cx);
5094 let second = worktrees[1].read(cx);
5095 let third = worktrees[2].read(cx);
5096
5097 // check they are now in the right order
5098 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5099 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5100 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5101 });
5102
5103 // move the second worktree to after the third
5104 // [a, b, c] -> [a, c, b]
5105 project
5106 .update(cx, |project, cx| {
5107 let second = worktree_b.read(cx);
5108 let third = worktree_c.read(cx);
5109 project.move_worktree(second.id(), third.id(), cx)
5110 })
5111 .expect("moving second after third");
5112
5113 // check the state after moving
5114 project.update(cx, |project, cx| {
5115 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5116 assert_eq!(worktrees.len(), 3);
5117
5118 let first = worktrees[0].read(cx);
5119 let second = worktrees[1].read(cx);
5120 let third = worktrees[2].read(cx);
5121
5122 // check they are now in the right order
5123 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5124 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5125 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5126 });
5127
5128 // move the third worktree to before the second
5129 // [a, c, b] -> [a, b, c]
5130 project
5131 .update(cx, |project, cx| {
5132 let third = worktree_c.read(cx);
5133 let second = worktree_b.read(cx);
5134 project.move_worktree(third.id(), second.id(), cx)
5135 })
5136 .expect("moving third before second");
5137
5138 // check the state after moving
5139 project.update(cx, |project, cx| {
5140 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5141 assert_eq!(worktrees.len(), 3);
5142
5143 let first = worktrees[0].read(cx);
5144 let second = worktrees[1].read(cx);
5145 let third = worktrees[2].read(cx);
5146
5147 // check they are now in the right order
5148 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5149 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5150 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5151 });
5152
5153 // move the first worktree to after the third
5154 // [a, b, c] -> [b, c, a]
5155 project
5156 .update(cx, |project, cx| {
5157 let first = worktree_a.read(cx);
5158 let third = worktree_c.read(cx);
5159 project.move_worktree(first.id(), third.id(), cx)
5160 })
5161 .expect("moving first after third");
5162
5163 // check the state after moving
5164 project.update(cx, |project, cx| {
5165 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5166 assert_eq!(worktrees.len(), 3);
5167
5168 let first = worktrees[0].read(cx);
5169 let second = worktrees[1].read(cx);
5170 let third = worktrees[2].read(cx);
5171
5172 // check they are now in the right order
5173 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5174 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5175 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5176 });
5177
5178 // move the third worktree to before the first
5179 // [b, c, a] -> [a, b, c]
5180 project
5181 .update(cx, |project, cx| {
5182 let third = worktree_a.read(cx);
5183 let first = worktree_b.read(cx);
5184 project.move_worktree(third.id(), first.id(), cx)
5185 })
5186 .expect("moving third before first");
5187
5188 // check the state after moving
5189 project.update(cx, |project, cx| {
5190 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5191 assert_eq!(worktrees.len(), 3);
5192
5193 let first = worktrees[0].read(cx);
5194 let second = worktrees[1].read(cx);
5195 let third = worktrees[2].read(cx);
5196
5197 // check they are now in the right order
5198 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5199 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5200 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5201 });
5202}
5203
5204async fn search(
5205 project: &Model<Project>,
5206 query: SearchQuery,
5207 cx: &mut gpui::TestAppContext,
5208) -> Result<HashMap<String, Vec<Range<usize>>>> {
5209 let mut search_rx = project.update(cx, |project, cx| project.search(query, cx));
5210 let mut results = HashMap::default();
5211 while let Some(search_result) = search_rx.next().await {
5212 match search_result {
5213 SearchResult::Buffer { buffer, ranges } => {
5214 results.entry(buffer).or_insert(ranges);
5215 }
5216 SearchResult::LimitReached => {}
5217 }
5218 }
5219 Ok(results
5220 .into_iter()
5221 .map(|(buffer, ranges)| {
5222 buffer.update(cx, |buffer, cx| {
5223 let path = buffer
5224 .file()
5225 .unwrap()
5226 .full_path(cx)
5227 .to_string_lossy()
5228 .to_string();
5229 let ranges = ranges
5230 .into_iter()
5231 .map(|range| range.to_offset(buffer))
5232 .collect::<Vec<_>>();
5233 (path, ranges)
5234 })
5235 })
5236 .collect())
5237}
5238
5239fn init_test(cx: &mut gpui::TestAppContext) {
5240 if std::env::var("RUST_LOG").is_ok() {
5241 env_logger::try_init().ok();
5242 }
5243
5244 cx.update(|cx| {
5245 let settings_store = SettingsStore::test(cx);
5246 cx.set_global(settings_store);
5247 release_channel::init(SemanticVersion::default(), cx);
5248 language::init(cx);
5249 Project::init_settings(cx);
5250 });
5251}
5252
5253fn json_lang() -> Arc<Language> {
5254 Arc::new(Language::new(
5255 LanguageConfig {
5256 name: "JSON".into(),
5257 matcher: LanguageMatcher {
5258 path_suffixes: vec!["json".to_string()],
5259 ..Default::default()
5260 },
5261 ..Default::default()
5262 },
5263 None,
5264 ))
5265}
5266
5267fn js_lang() -> Arc<Language> {
5268 Arc::new(Language::new(
5269 LanguageConfig {
5270 name: Arc::from("JavaScript"),
5271 matcher: LanguageMatcher {
5272 path_suffixes: vec!["js".to_string()],
5273 ..Default::default()
5274 },
5275 ..Default::default()
5276 },
5277 None,
5278 ))
5279}
5280
5281fn rust_lang() -> Arc<Language> {
5282 Arc::new(Language::new(
5283 LanguageConfig {
5284 name: "Rust".into(),
5285 matcher: LanguageMatcher {
5286 path_suffixes: vec!["rs".to_string()],
5287 ..Default::default()
5288 },
5289 ..Default::default()
5290 },
5291 Some(tree_sitter_rust::language()),
5292 ))
5293}
5294
5295fn typescript_lang() -> Arc<Language> {
5296 Arc::new(Language::new(
5297 LanguageConfig {
5298 name: "TypeScript".into(),
5299 matcher: LanguageMatcher {
5300 path_suffixes: vec!["ts".to_string()],
5301 ..Default::default()
5302 },
5303 ..Default::default()
5304 },
5305 Some(tree_sitter_typescript::language_typescript()),
5306 ))
5307}
5308
5309fn tsx_lang() -> Arc<Language> {
5310 Arc::new(Language::new(
5311 LanguageConfig {
5312 name: "tsx".into(),
5313 matcher: LanguageMatcher {
5314 path_suffixes: vec!["tsx".to_string()],
5315 ..Default::default()
5316 },
5317 ..Default::default()
5318 },
5319 Some(tree_sitter_typescript::language_tsx()),
5320 ))
5321}
5322
5323fn get_all_tasks(
5324 project: &Model<Project>,
5325 worktree_id: Option<WorktreeId>,
5326 task_context: &TaskContext,
5327 cx: &mut AppContext,
5328) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
5329 let resolved_tasks = project.update(cx, |project, cx| {
5330 project
5331 .task_inventory()
5332 .read(cx)
5333 .used_and_current_resolved_tasks(None, worktree_id, None, task_context, cx)
5334 });
5335
5336 cx.spawn(|_| async move {
5337 let (mut old, new) = resolved_tasks.await;
5338 old.extend(new);
5339 old
5340 })
5341}