1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use git::diff::assert_hunks;
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
28
29#[gpui::test]
30async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
31 cx.executor().allow_parking();
32
33 let (tx, mut rx) = futures::channel::mpsc::unbounded();
34 let _thread = std::thread::spawn(move || {
35 std::fs::metadata("/tmp").unwrap();
36 std::thread::sleep(Duration::from_millis(1000));
37 tx.unbounded_send(1).unwrap();
38 });
39 rx.next().await.unwrap();
40}
41
42#[gpui::test]
43async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
44 cx.executor().allow_parking();
45
46 let io_task = smol::unblock(move || {
47 println!("sleeping on thread {:?}", std::thread::current().id());
48 std::thread::sleep(Duration::from_millis(10));
49 1
50 });
51
52 let task = cx.foreground_executor().spawn(async move {
53 io_task.await;
54 });
55
56 task.await;
57}
58
59#[cfg(not(windows))]
60#[gpui::test]
61async fn test_symlinks(cx: &mut gpui::TestAppContext) {
62 init_test(cx);
63 cx.executor().allow_parking();
64
65 let dir = temp_tree(json!({
66 "root": {
67 "apple": "",
68 "banana": {
69 "carrot": {
70 "date": "",
71 "endive": "",
72 }
73 },
74 "fennel": {
75 "grape": "",
76 }
77 }
78 }));
79
80 let root_link_path = dir.path().join("root_link");
81 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
82 os::unix::fs::symlink(
83 dir.path().join("root/fennel"),
84 dir.path().join("root/finnochio"),
85 )
86 .unwrap();
87
88 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
89
90 project.update(cx, |project, cx| {
91 let tree = project.worktrees(cx).next().unwrap().read(cx);
92 assert_eq!(tree.file_count(), 5);
93 assert_eq!(
94 tree.inode_for_path("fennel/grape"),
95 tree.inode_for_path("finnochio/grape")
96 );
97 });
98}
99
100#[gpui::test]
101async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
102 init_test(cx);
103
104 let dir = temp_tree(json!({
105 ".editorconfig": r#"
106 root = true
107 [*.rs]
108 indent_style = tab
109 indent_size = 3
110 end_of_line = lf
111 insert_final_newline = true
112 trim_trailing_whitespace = true
113 [*.js]
114 tab_width = 10
115 "#,
116 ".zed": {
117 "settings.json": r#"{
118 "tab_size": 8,
119 "hard_tabs": false,
120 "ensure_final_newline_on_save": false,
121 "remove_trailing_whitespace_on_save": false,
122 "soft_wrap": "editor_width"
123 }"#,
124 },
125 "a.rs": "fn a() {\n A\n}",
126 "b": {
127 ".editorconfig": r#"
128 [*.rs]
129 indent_size = 2
130 "#,
131 "b.rs": "fn b() {\n B\n}",
132 },
133 "c.js": "def c\n C\nend",
134 "README.json": "tabs are better\n",
135 }));
136
137 let path = dir.path();
138 let fs = FakeFs::new(cx.executor());
139 fs.insert_tree_from_real_fs(path, path).await;
140 let project = Project::test(fs, [path], cx).await;
141
142 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
143 language_registry.add(js_lang());
144 language_registry.add(json_lang());
145 language_registry.add(rust_lang());
146
147 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
148
149 cx.executor().run_until_parked();
150
151 cx.update(|cx| {
152 let tree = worktree.read(cx);
153 let settings_for = |path: &str| {
154 let file_entry = tree.entry_for_path(path).unwrap().clone();
155 let file = File::for_entry(file_entry, worktree.clone());
156 let file_language = project
157 .read(cx)
158 .languages()
159 .language_for_file_path(file.path.as_ref());
160 let file_language = cx
161 .background_executor()
162 .block(file_language)
163 .expect("Failed to get file language");
164 let file = file as _;
165 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
166 };
167
168 let settings_a = settings_for("a.rs");
169 let settings_b = settings_for("b/b.rs");
170 let settings_c = settings_for("c.js");
171 let settings_readme = settings_for("README.json");
172
173 // .editorconfig overrides .zed/settings
174 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
175 assert_eq!(settings_a.hard_tabs, true);
176 assert_eq!(settings_a.ensure_final_newline_on_save, true);
177 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
178
179 // .editorconfig in b/ overrides .editorconfig in root
180 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
181
182 // "indent_size" is not set, so "tab_width" is used
183 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
184
185 // README.md should not be affected by .editorconfig's globe "*.rs"
186 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
187 });
188}
189
190#[gpui::test]
191async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 TaskStore::init(None);
194
195 let fs = FakeFs::new(cx.executor());
196 fs.insert_tree(
197 "/the-root",
198 json!({
199 ".zed": {
200 "settings.json": r#"{ "tab_size": 8 }"#,
201 "tasks.json": r#"[{
202 "label": "cargo check all",
203 "command": "cargo",
204 "args": ["check", "--all"]
205 },]"#,
206 },
207 "a": {
208 "a.rs": "fn a() {\n A\n}"
209 },
210 "b": {
211 ".zed": {
212 "settings.json": r#"{ "tab_size": 2 }"#,
213 "tasks.json": r#"[{
214 "label": "cargo check",
215 "command": "cargo",
216 "args": ["check"]
217 },]"#,
218 },
219 "b.rs": "fn b() {\n B\n}"
220 }
221 }),
222 )
223 .await;
224
225 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
226 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
227 let task_context = TaskContext::default();
228
229 cx.executor().run_until_parked();
230 let worktree_id = cx.update(|cx| {
231 project.update(cx, |project, cx| {
232 project.worktrees(cx).next().unwrap().read(cx).id()
233 })
234 });
235 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
236 id: worktree_id,
237 directory_in_worktree: PathBuf::from(".zed"),
238 id_base: "local worktree tasks from directory \".zed\"".into(),
239 };
240
241 let all_tasks = cx
242 .update(|cx| {
243 let tree = worktree.read(cx);
244
245 let file_a = File::for_entry(
246 tree.entry_for_path("a/a.rs").unwrap().clone(),
247 worktree.clone(),
248 ) as _;
249 let settings_a = language_settings(None, Some(&file_a), cx);
250 let file_b = File::for_entry(
251 tree.entry_for_path("b/b.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_b = language_settings(None, Some(&file_b), cx);
255
256 assert_eq!(settings_a.tab_size.get(), 8);
257 assert_eq!(settings_b.tab_size.get(), 2);
258
259 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
260 })
261 .into_iter()
262 .map(|(source_kind, task)| {
263 let resolved = task.resolved.unwrap();
264 (
265 source_kind,
266 task.resolved_label,
267 resolved.args,
268 resolved.env,
269 )
270 })
271 .collect::<Vec<_>>();
272 assert_eq!(
273 all_tasks,
274 vec![
275 (
276 TaskSourceKind::Worktree {
277 id: worktree_id,
278 directory_in_worktree: PathBuf::from("b/.zed"),
279 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
280 },
281 "cargo check".to_string(),
282 vec!["check".to_string()],
283 HashMap::default(),
284 ),
285 (
286 topmost_local_task_source_kind.clone(),
287 "cargo check all".to_string(),
288 vec!["check".to_string(), "--all".to_string()],
289 HashMap::default(),
290 ),
291 ]
292 );
293
294 let (_, resolved_task) = cx
295 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
296 .into_iter()
297 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
298 .expect("should have one global task");
299 project.update(cx, |project, cx| {
300 let task_inventory = project
301 .task_store
302 .read(cx)
303 .task_inventory()
304 .cloned()
305 .unwrap();
306 task_inventory.update(cx, |inventory, _| {
307 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
308 inventory
309 .update_file_based_tasks(
310 None,
311 Some(
312 &json!([{
313 "label": "cargo check unstable",
314 "command": "cargo",
315 "args": [
316 "check",
317 "--all",
318 "--all-targets"
319 ],
320 "env": {
321 "RUSTFLAGS": "-Zunstable-options"
322 }
323 }])
324 .to_string(),
325 ),
326 )
327 .unwrap();
328 });
329 });
330 cx.run_until_parked();
331
332 let all_tasks = cx
333 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
334 .into_iter()
335 .map(|(source_kind, task)| {
336 let resolved = task.resolved.unwrap();
337 (
338 source_kind,
339 task.resolved_label,
340 resolved.args,
341 resolved.env,
342 )
343 })
344 .collect::<Vec<_>>();
345 assert_eq!(
346 all_tasks,
347 vec![
348 (
349 topmost_local_task_source_kind.clone(),
350 "cargo check all".to_string(),
351 vec!["check".to_string(), "--all".to_string()],
352 HashMap::default(),
353 ),
354 (
355 TaskSourceKind::Worktree {
356 id: worktree_id,
357 directory_in_worktree: PathBuf::from("b/.zed"),
358 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
359 },
360 "cargo check".to_string(),
361 vec!["check".to_string()],
362 HashMap::default(),
363 ),
364 (
365 TaskSourceKind::AbsPath {
366 abs_path: paths::tasks_file().clone(),
367 id_base: "global tasks.json".into(),
368 },
369 "cargo check unstable".to_string(),
370 vec![
371 "check".to_string(),
372 "--all".to_string(),
373 "--all-targets".to_string(),
374 ],
375 HashMap::from_iter(Some((
376 "RUSTFLAGS".to_string(),
377 "-Zunstable-options".to_string()
378 ))),
379 ),
380 ]
381 );
382}
383
384#[gpui::test]
385async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
386 init_test(cx);
387
388 let fs = FakeFs::new(cx.executor());
389 fs.insert_tree(
390 "/the-root",
391 json!({
392 "test.rs": "const A: i32 = 1;",
393 "test2.rs": "",
394 "Cargo.toml": "a = 1",
395 "package.json": "{\"a\": 1}",
396 }),
397 )
398 .await;
399
400 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
401 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
402
403 let mut fake_rust_servers = language_registry.register_fake_lsp(
404 "Rust",
405 FakeLspAdapter {
406 name: "the-rust-language-server",
407 capabilities: lsp::ServerCapabilities {
408 completion_provider: Some(lsp::CompletionOptions {
409 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
410 ..Default::default()
411 }),
412 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
413 lsp::TextDocumentSyncOptions {
414 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
415 ..Default::default()
416 },
417 )),
418 ..Default::default()
419 },
420 ..Default::default()
421 },
422 );
423 let mut fake_json_servers = language_registry.register_fake_lsp(
424 "JSON",
425 FakeLspAdapter {
426 name: "the-json-language-server",
427 capabilities: lsp::ServerCapabilities {
428 completion_provider: Some(lsp::CompletionOptions {
429 trigger_characters: Some(vec![":".to_string()]),
430 ..Default::default()
431 }),
432 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
433 lsp::TextDocumentSyncOptions {
434 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
435 ..Default::default()
436 },
437 )),
438 ..Default::default()
439 },
440 ..Default::default()
441 },
442 );
443
444 // Open a buffer without an associated language server.
445 let (toml_buffer, _handle) = project
446 .update(cx, |project, cx| {
447 project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
448 })
449 .await
450 .unwrap();
451
452 // Open a buffer with an associated language server before the language for it has been loaded.
453 let (rust_buffer, _handle2) = project
454 .update(cx, |project, cx| {
455 project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
456 })
457 .await
458 .unwrap();
459 rust_buffer.update(cx, |buffer, _| {
460 assert_eq!(buffer.language().map(|l| l.name()), None);
461 });
462
463 // Now we add the languages to the project, and ensure they get assigned to all
464 // the relevant open buffers.
465 language_registry.add(json_lang());
466 language_registry.add(rust_lang());
467 cx.executor().run_until_parked();
468 rust_buffer.update(cx, |buffer, _| {
469 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
470 });
471
472 // A server is started up, and it is notified about Rust files.
473 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
474 assert_eq!(
475 fake_rust_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 lsp::TextDocumentItem {
480 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
481 version: 0,
482 text: "const A: i32 = 1;".to_string(),
483 language_id: "rust".to_string(),
484 }
485 );
486
487 // The buffer is configured based on the language server's capabilities.
488 rust_buffer.update(cx, |buffer, _| {
489 assert_eq!(
490 buffer
491 .completion_triggers()
492 .into_iter()
493 .cloned()
494 .collect::<Vec<_>>(),
495 &[".".to_string(), "::".to_string()]
496 );
497 });
498 toml_buffer.update(cx, |buffer, _| {
499 assert!(buffer.completion_triggers().is_empty());
500 });
501
502 // Edit a buffer. The changes are reported to the language server.
503 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
504 assert_eq!(
505 fake_rust_server
506 .receive_notification::<lsp::notification::DidChangeTextDocument>()
507 .await
508 .text_document,
509 lsp::VersionedTextDocumentIdentifier::new(
510 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
511 1
512 )
513 );
514
515 // Open a third buffer with a different associated language server.
516 let (json_buffer, _json_handle) = project
517 .update(cx, |project, cx| {
518 project.open_local_buffer_with_lsp("/the-root/package.json", cx)
519 })
520 .await
521 .unwrap();
522
523 // A json language server is started up and is only notified about the json buffer.
524 let mut fake_json_server = fake_json_servers.next().await.unwrap();
525 assert_eq!(
526 fake_json_server
527 .receive_notification::<lsp::notification::DidOpenTextDocument>()
528 .await
529 .text_document,
530 lsp::TextDocumentItem {
531 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
532 version: 0,
533 text: "{\"a\": 1}".to_string(),
534 language_id: "json".to_string(),
535 }
536 );
537
538 // This buffer is configured based on the second language server's
539 // capabilities.
540 json_buffer.update(cx, |buffer, _| {
541 assert_eq!(
542 buffer
543 .completion_triggers()
544 .into_iter()
545 .cloned()
546 .collect::<Vec<_>>(),
547 &[":".to_string()]
548 );
549 });
550
551 // When opening another buffer whose language server is already running,
552 // it is also configured based on the existing language server's capabilities.
553 let (rust_buffer2, _handle4) = project
554 .update(cx, |project, cx| {
555 project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
556 })
557 .await
558 .unwrap();
559 rust_buffer2.update(cx, |buffer, _| {
560 assert_eq!(
561 buffer
562 .completion_triggers()
563 .into_iter()
564 .cloned()
565 .collect::<Vec<_>>(),
566 &[".".to_string(), "::".to_string()]
567 );
568 });
569
570 // Changes are reported only to servers matching the buffer's language.
571 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
572 rust_buffer2.update(cx, |buffer, cx| {
573 buffer.edit([(0..0, "let x = 1;")], None, cx)
574 });
575 assert_eq!(
576 fake_rust_server
577 .receive_notification::<lsp::notification::DidChangeTextDocument>()
578 .await
579 .text_document,
580 lsp::VersionedTextDocumentIdentifier::new(
581 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
582 1
583 )
584 );
585
586 // Save notifications are reported to all servers.
587 project
588 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
589 .await
590 .unwrap();
591 assert_eq!(
592 fake_rust_server
593 .receive_notification::<lsp::notification::DidSaveTextDocument>()
594 .await
595 .text_document,
596 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
597 );
598 assert_eq!(
599 fake_json_server
600 .receive_notification::<lsp::notification::DidSaveTextDocument>()
601 .await
602 .text_document,
603 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
604 );
605
606 // Renames are reported only to servers matching the buffer's language.
607 fs.rename(
608 Path::new("/the-root/test2.rs"),
609 Path::new("/the-root/test3.rs"),
610 Default::default(),
611 )
612 .await
613 .unwrap();
614 assert_eq!(
615 fake_rust_server
616 .receive_notification::<lsp::notification::DidCloseTextDocument>()
617 .await
618 .text_document,
619 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
620 );
621 assert_eq!(
622 fake_rust_server
623 .receive_notification::<lsp::notification::DidOpenTextDocument>()
624 .await
625 .text_document,
626 lsp::TextDocumentItem {
627 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
628 version: 0,
629 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
630 language_id: "rust".to_string(),
631 },
632 );
633
634 rust_buffer2.update(cx, |buffer, cx| {
635 buffer.update_diagnostics(
636 LanguageServerId(0),
637 DiagnosticSet::from_sorted_entries(
638 vec![DiagnosticEntry {
639 diagnostic: Default::default(),
640 range: Anchor::MIN..Anchor::MAX,
641 }],
642 &buffer.snapshot(),
643 ),
644 cx,
645 );
646 assert_eq!(
647 buffer
648 .snapshot()
649 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
650 .count(),
651 1
652 );
653 });
654
655 // When the rename changes the extension of the file, the buffer gets closed on the old
656 // language server and gets opened on the new one.
657 fs.rename(
658 Path::new("/the-root/test3.rs"),
659 Path::new("/the-root/test3.json"),
660 Default::default(),
661 )
662 .await
663 .unwrap();
664 assert_eq!(
665 fake_rust_server
666 .receive_notification::<lsp::notification::DidCloseTextDocument>()
667 .await
668 .text_document,
669 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
670 );
671 assert_eq!(
672 fake_json_server
673 .receive_notification::<lsp::notification::DidOpenTextDocument>()
674 .await
675 .text_document,
676 lsp::TextDocumentItem {
677 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
678 version: 0,
679 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
680 language_id: "json".to_string(),
681 },
682 );
683
684 // We clear the diagnostics, since the language has changed.
685 rust_buffer2.update(cx, |buffer, _| {
686 assert_eq!(
687 buffer
688 .snapshot()
689 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
690 .count(),
691 0
692 );
693 });
694
695 // The renamed file's version resets after changing language server.
696 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
697 assert_eq!(
698 fake_json_server
699 .receive_notification::<lsp::notification::DidChangeTextDocument>()
700 .await
701 .text_document,
702 lsp::VersionedTextDocumentIdentifier::new(
703 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
704 1
705 )
706 );
707
708 // Restart language servers
709 project.update(cx, |project, cx| {
710 project.restart_language_servers_for_buffers(
711 vec![rust_buffer.clone(), json_buffer.clone()],
712 cx,
713 );
714 });
715
716 let mut rust_shutdown_requests = fake_rust_server
717 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
718 let mut json_shutdown_requests = fake_json_server
719 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
720 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
721
722 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
723 let mut fake_json_server = fake_json_servers.next().await.unwrap();
724
725 // Ensure rust document is reopened in new rust language server
726 assert_eq!(
727 fake_rust_server
728 .receive_notification::<lsp::notification::DidOpenTextDocument>()
729 .await
730 .text_document,
731 lsp::TextDocumentItem {
732 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
733 version: 0,
734 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
735 language_id: "rust".to_string(),
736 }
737 );
738
739 // Ensure json documents are reopened in new json language server
740 assert_set_eq!(
741 [
742 fake_json_server
743 .receive_notification::<lsp::notification::DidOpenTextDocument>()
744 .await
745 .text_document,
746 fake_json_server
747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
748 .await
749 .text_document,
750 ],
751 [
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
754 version: 0,
755 text: json_buffer.update(cx, |buffer, _| buffer.text()),
756 language_id: "json".to_string(),
757 },
758 lsp::TextDocumentItem {
759 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
760 version: 0,
761 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
762 language_id: "json".to_string(),
763 }
764 ]
765 );
766
767 // Close notifications are reported only to servers matching the buffer's language.
768 cx.update(|_| drop(_json_handle));
769 let close_message = lsp::DidCloseTextDocumentParams {
770 text_document: lsp::TextDocumentIdentifier::new(
771 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
772 ),
773 };
774 assert_eq!(
775 fake_json_server
776 .receive_notification::<lsp::notification::DidCloseTextDocument>()
777 .await,
778 close_message,
779 );
780}
781
782#[gpui::test]
783async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
784 init_test(cx);
785
786 let fs = FakeFs::new(cx.executor());
787 fs.insert_tree(
788 "/the-root",
789 json!({
790 ".gitignore": "target\n",
791 "src": {
792 "a.rs": "",
793 "b.rs": "",
794 },
795 "target": {
796 "x": {
797 "out": {
798 "x.rs": ""
799 }
800 },
801 "y": {
802 "out": {
803 "y.rs": "",
804 }
805 },
806 "z": {
807 "out": {
808 "z.rs": ""
809 }
810 }
811 }
812 }),
813 )
814 .await;
815
816 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
817 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
818 language_registry.add(rust_lang());
819 let mut fake_servers = language_registry.register_fake_lsp(
820 "Rust",
821 FakeLspAdapter {
822 name: "the-language-server",
823 ..Default::default()
824 },
825 );
826
827 cx.executor().run_until_parked();
828
829 // Start the language server by opening a buffer with a compatible file extension.
830 let _ = project
831 .update(cx, |project, cx| {
832 project.open_local_buffer_with_lsp("/the-root/src/a.rs", cx)
833 })
834 .await
835 .unwrap();
836
837 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
838 project.update(cx, |project, cx| {
839 let worktree = project.worktrees(cx).next().unwrap();
840 assert_eq!(
841 worktree
842 .read(cx)
843 .snapshot()
844 .entries(true, 0)
845 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
846 .collect::<Vec<_>>(),
847 &[
848 (Path::new(""), false),
849 (Path::new(".gitignore"), false),
850 (Path::new("src"), false),
851 (Path::new("src/a.rs"), false),
852 (Path::new("src/b.rs"), false),
853 (Path::new("target"), true),
854 ]
855 );
856 });
857
858 let prev_read_dir_count = fs.read_dir_call_count();
859
860 // Keep track of the FS events reported to the language server.
861 let fake_server = fake_servers.next().await.unwrap();
862 let file_changes = Arc::new(Mutex::new(Vec::new()));
863 fake_server
864 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
865 registrations: vec![lsp::Registration {
866 id: Default::default(),
867 method: "workspace/didChangeWatchedFiles".to_string(),
868 register_options: serde_json::to_value(
869 lsp::DidChangeWatchedFilesRegistrationOptions {
870 watchers: vec![
871 lsp::FileSystemWatcher {
872 glob_pattern: lsp::GlobPattern::String(
873 "/the-root/Cargo.toml".to_string(),
874 ),
875 kind: None,
876 },
877 lsp::FileSystemWatcher {
878 glob_pattern: lsp::GlobPattern::String(
879 "/the-root/src/*.{rs,c}".to_string(),
880 ),
881 kind: None,
882 },
883 lsp::FileSystemWatcher {
884 glob_pattern: lsp::GlobPattern::String(
885 "/the-root/target/y/**/*.rs".to_string(),
886 ),
887 kind: None,
888 },
889 ],
890 },
891 )
892 .ok(),
893 }],
894 })
895 .await
896 .unwrap();
897 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
898 let file_changes = file_changes.clone();
899 move |params, _| {
900 let mut file_changes = file_changes.lock();
901 file_changes.extend(params.changes);
902 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
903 }
904 });
905
906 cx.executor().run_until_parked();
907 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
908 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
909
910 // Now the language server has asked us to watch an ignored directory path,
911 // so we recursively load it.
912 project.update(cx, |project, cx| {
913 let worktree = project.worktrees(cx).next().unwrap();
914 assert_eq!(
915 worktree
916 .read(cx)
917 .snapshot()
918 .entries(true, 0)
919 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
920 .collect::<Vec<_>>(),
921 &[
922 (Path::new(""), false),
923 (Path::new(".gitignore"), false),
924 (Path::new("src"), false),
925 (Path::new("src/a.rs"), false),
926 (Path::new("src/b.rs"), false),
927 (Path::new("target"), true),
928 (Path::new("target/x"), true),
929 (Path::new("target/y"), true),
930 (Path::new("target/y/out"), true),
931 (Path::new("target/y/out/y.rs"), true),
932 (Path::new("target/z"), true),
933 ]
934 );
935 });
936
937 // Perform some file system mutations, two of which match the watched patterns,
938 // and one of which does not.
939 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
940 .await
941 .unwrap();
942 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
943 .await
944 .unwrap();
945 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
946 .await
947 .unwrap();
948 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
949 .await
950 .unwrap();
951 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
952 .await
953 .unwrap();
954
955 // The language server receives events for the FS mutations that match its watch patterns.
956 cx.executor().run_until_parked();
957 assert_eq!(
958 &*file_changes.lock(),
959 &[
960 lsp::FileEvent {
961 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
962 typ: lsp::FileChangeType::DELETED,
963 },
964 lsp::FileEvent {
965 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
966 typ: lsp::FileChangeType::CREATED,
967 },
968 lsp::FileEvent {
969 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
970 typ: lsp::FileChangeType::CREATED,
971 },
972 ]
973 );
974}
975
976#[gpui::test]
977async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
978 init_test(cx);
979
980 let fs = FakeFs::new(cx.executor());
981 fs.insert_tree(
982 "/dir",
983 json!({
984 "a.rs": "let a = 1;",
985 "b.rs": "let b = 2;"
986 }),
987 )
988 .await;
989
990 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
991 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
992
993 let buffer_a = project
994 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
995 .await
996 .unwrap();
997 let buffer_b = project
998 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
999 .await
1000 .unwrap();
1001
1002 lsp_store.update(cx, |lsp_store, cx| {
1003 lsp_store
1004 .update_diagnostics(
1005 LanguageServerId(0),
1006 lsp::PublishDiagnosticsParams {
1007 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1008 version: None,
1009 diagnostics: vec![lsp::Diagnostic {
1010 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1011 severity: Some(lsp::DiagnosticSeverity::ERROR),
1012 message: "error 1".to_string(),
1013 ..Default::default()
1014 }],
1015 },
1016 &[],
1017 cx,
1018 )
1019 .unwrap();
1020 lsp_store
1021 .update_diagnostics(
1022 LanguageServerId(0),
1023 lsp::PublishDiagnosticsParams {
1024 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1025 version: None,
1026 diagnostics: vec![lsp::Diagnostic {
1027 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1028 severity: Some(DiagnosticSeverity::WARNING),
1029 message: "error 2".to_string(),
1030 ..Default::default()
1031 }],
1032 },
1033 &[],
1034 cx,
1035 )
1036 .unwrap();
1037 });
1038
1039 buffer_a.update(cx, |buffer, _| {
1040 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1041 assert_eq!(
1042 chunks
1043 .iter()
1044 .map(|(s, d)| (s.as_str(), *d))
1045 .collect::<Vec<_>>(),
1046 &[
1047 ("let ", None),
1048 ("a", Some(DiagnosticSeverity::ERROR)),
1049 (" = 1;", None),
1050 ]
1051 );
1052 });
1053 buffer_b.update(cx, |buffer, _| {
1054 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1055 assert_eq!(
1056 chunks
1057 .iter()
1058 .map(|(s, d)| (s.as_str(), *d))
1059 .collect::<Vec<_>>(),
1060 &[
1061 ("let ", None),
1062 ("b", Some(DiagnosticSeverity::WARNING)),
1063 (" = 2;", None),
1064 ]
1065 );
1066 });
1067}
1068
1069#[gpui::test]
1070async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1071 init_test(cx);
1072
1073 let fs = FakeFs::new(cx.executor());
1074 fs.insert_tree(
1075 "/root",
1076 json!({
1077 "dir": {
1078 ".git": {
1079 "HEAD": "ref: refs/heads/main",
1080 },
1081 ".gitignore": "b.rs",
1082 "a.rs": "let a = 1;",
1083 "b.rs": "let b = 2;",
1084 },
1085 "other.rs": "let b = c;"
1086 }),
1087 )
1088 .await;
1089
1090 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1091 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1092 let (worktree, _) = project
1093 .update(cx, |project, cx| {
1094 project.find_or_create_worktree("/root/dir", true, cx)
1095 })
1096 .await
1097 .unwrap();
1098 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1099
1100 let (worktree, _) = project
1101 .update(cx, |project, cx| {
1102 project.find_or_create_worktree("/root/other.rs", false, cx)
1103 })
1104 .await
1105 .unwrap();
1106 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1107
1108 let server_id = LanguageServerId(0);
1109 lsp_store.update(cx, |lsp_store, cx| {
1110 lsp_store
1111 .update_diagnostics(
1112 server_id,
1113 lsp::PublishDiagnosticsParams {
1114 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1115 version: None,
1116 diagnostics: vec![lsp::Diagnostic {
1117 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1118 severity: Some(lsp::DiagnosticSeverity::ERROR),
1119 message: "unused variable 'b'".to_string(),
1120 ..Default::default()
1121 }],
1122 },
1123 &[],
1124 cx,
1125 )
1126 .unwrap();
1127 lsp_store
1128 .update_diagnostics(
1129 server_id,
1130 lsp::PublishDiagnosticsParams {
1131 uri: Url::from_file_path("/root/other.rs").unwrap(),
1132 version: None,
1133 diagnostics: vec![lsp::Diagnostic {
1134 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1135 severity: Some(lsp::DiagnosticSeverity::ERROR),
1136 message: "unknown variable 'c'".to_string(),
1137 ..Default::default()
1138 }],
1139 },
1140 &[],
1141 cx,
1142 )
1143 .unwrap();
1144 });
1145
1146 let main_ignored_buffer = project
1147 .update(cx, |project, cx| {
1148 project.open_buffer((main_worktree_id, "b.rs"), cx)
1149 })
1150 .await
1151 .unwrap();
1152 main_ignored_buffer.update(cx, |buffer, _| {
1153 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1154 assert_eq!(
1155 chunks
1156 .iter()
1157 .map(|(s, d)| (s.as_str(), *d))
1158 .collect::<Vec<_>>(),
1159 &[
1160 ("let ", None),
1161 ("b", Some(DiagnosticSeverity::ERROR)),
1162 (" = 2;", None),
1163 ],
1164 "Gigitnored buffers should still get in-buffer diagnostics",
1165 );
1166 });
1167 let other_buffer = project
1168 .update(cx, |project, cx| {
1169 project.open_buffer((other_worktree_id, ""), cx)
1170 })
1171 .await
1172 .unwrap();
1173 other_buffer.update(cx, |buffer, _| {
1174 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1175 assert_eq!(
1176 chunks
1177 .iter()
1178 .map(|(s, d)| (s.as_str(), *d))
1179 .collect::<Vec<_>>(),
1180 &[
1181 ("let b = ", None),
1182 ("c", Some(DiagnosticSeverity::ERROR)),
1183 (";", None),
1184 ],
1185 "Buffers from hidden projects should still get in-buffer diagnostics"
1186 );
1187 });
1188
1189 project.update(cx, |project, cx| {
1190 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1191 assert_eq!(
1192 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1193 vec![(
1194 ProjectPath {
1195 worktree_id: main_worktree_id,
1196 path: Arc::from(Path::new("b.rs")),
1197 },
1198 server_id,
1199 DiagnosticSummary {
1200 error_count: 1,
1201 warning_count: 0,
1202 }
1203 )]
1204 );
1205 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1206 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1207 });
1208}
1209
1210#[gpui::test]
1211async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let progress_token = "the-progress-token";
1215
1216 let fs = FakeFs::new(cx.executor());
1217 fs.insert_tree(
1218 "/dir",
1219 json!({
1220 "a.rs": "fn a() { A }",
1221 "b.rs": "const y: i32 = 1",
1222 }),
1223 )
1224 .await;
1225
1226 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1227 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1228
1229 language_registry.add(rust_lang());
1230 let mut fake_servers = language_registry.register_fake_lsp(
1231 "Rust",
1232 FakeLspAdapter {
1233 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1234 disk_based_diagnostics_sources: vec!["disk".into()],
1235 ..Default::default()
1236 },
1237 );
1238
1239 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1240
1241 // Cause worktree to start the fake language server
1242 let _ = project
1243 .update(cx, |project, cx| {
1244 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
1245 })
1246 .await
1247 .unwrap();
1248
1249 let mut events = cx.events(&project);
1250
1251 let fake_server = fake_servers.next().await.unwrap();
1252 assert_eq!(
1253 events.next().await.unwrap(),
1254 Event::LanguageServerAdded(
1255 LanguageServerId(0),
1256 fake_server.server.name(),
1257 Some(worktree_id)
1258 ),
1259 );
1260
1261 fake_server
1262 .start_progress(format!("{}/0", progress_token))
1263 .await;
1264 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1265 assert_eq!(
1266 events.next().await.unwrap(),
1267 Event::DiskBasedDiagnosticsStarted {
1268 language_server_id: LanguageServerId(0),
1269 }
1270 );
1271
1272 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1273 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1274 version: None,
1275 diagnostics: vec![lsp::Diagnostic {
1276 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1277 severity: Some(lsp::DiagnosticSeverity::ERROR),
1278 message: "undefined variable 'A'".to_string(),
1279 ..Default::default()
1280 }],
1281 });
1282 assert_eq!(
1283 events.next().await.unwrap(),
1284 Event::DiagnosticsUpdated {
1285 language_server_id: LanguageServerId(0),
1286 path: (worktree_id, Path::new("a.rs")).into()
1287 }
1288 );
1289
1290 fake_server.end_progress(format!("{}/0", progress_token));
1291 assert_eq!(
1292 events.next().await.unwrap(),
1293 Event::DiskBasedDiagnosticsFinished {
1294 language_server_id: LanguageServerId(0)
1295 }
1296 );
1297
1298 let buffer = project
1299 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1300 .await
1301 .unwrap();
1302
1303 buffer.update(cx, |buffer, _| {
1304 let snapshot = buffer.snapshot();
1305 let diagnostics = snapshot
1306 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1307 .collect::<Vec<_>>();
1308 assert_eq!(
1309 diagnostics,
1310 &[DiagnosticEntry {
1311 range: Point::new(0, 9)..Point::new(0, 10),
1312 diagnostic: Diagnostic {
1313 severity: lsp::DiagnosticSeverity::ERROR,
1314 message: "undefined variable 'A'".to_string(),
1315 group_id: 0,
1316 is_primary: true,
1317 ..Default::default()
1318 }
1319 }]
1320 )
1321 });
1322
1323 // Ensure publishing empty diagnostics twice only results in one update event.
1324 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1325 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1326 version: None,
1327 diagnostics: Default::default(),
1328 });
1329 assert_eq!(
1330 events.next().await.unwrap(),
1331 Event::DiagnosticsUpdated {
1332 language_server_id: LanguageServerId(0),
1333 path: (worktree_id, Path::new("a.rs")).into()
1334 }
1335 );
1336
1337 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1338 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1339 version: None,
1340 diagnostics: Default::default(),
1341 });
1342 cx.executor().run_until_parked();
1343 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1344}
1345
1346#[gpui::test]
1347async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1348 init_test(cx);
1349
1350 let progress_token = "the-progress-token";
1351
1352 let fs = FakeFs::new(cx.executor());
1353 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1354
1355 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1356
1357 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1358 language_registry.add(rust_lang());
1359 let mut fake_servers = language_registry.register_fake_lsp(
1360 "Rust",
1361 FakeLspAdapter {
1362 name: "the-language-server",
1363 disk_based_diagnostics_sources: vec!["disk".into()],
1364 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1365 ..Default::default()
1366 },
1367 );
1368
1369 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1370
1371 let (buffer, _handle) = project
1372 .update(cx, |project, cx| {
1373 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1374 })
1375 .await
1376 .unwrap();
1377
1378 // Simulate diagnostics starting to update.
1379 let fake_server = fake_servers.next().await.unwrap();
1380 fake_server.start_progress(progress_token).await;
1381
1382 // Restart the server before the diagnostics finish updating.
1383 project.update(cx, |project, cx| {
1384 project.restart_language_servers_for_buffers([buffer], cx);
1385 });
1386 let mut events = cx.events(&project);
1387
1388 // Simulate the newly started server sending more diagnostics.
1389 let fake_server = fake_servers.next().await.unwrap();
1390 assert_eq!(
1391 events.next().await.unwrap(),
1392 Event::LanguageServerAdded(
1393 LanguageServerId(1),
1394 fake_server.server.name(),
1395 Some(worktree_id)
1396 )
1397 );
1398 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1399 fake_server.start_progress(progress_token).await;
1400 assert_eq!(
1401 events.next().await.unwrap(),
1402 Event::DiskBasedDiagnosticsStarted {
1403 language_server_id: LanguageServerId(1)
1404 }
1405 );
1406 project.update(cx, |project, cx| {
1407 assert_eq!(
1408 project
1409 .language_servers_running_disk_based_diagnostics(cx)
1410 .collect::<Vec<_>>(),
1411 [LanguageServerId(1)]
1412 );
1413 });
1414
1415 // All diagnostics are considered done, despite the old server's diagnostic
1416 // task never completing.
1417 fake_server.end_progress(progress_token);
1418 assert_eq!(
1419 events.next().await.unwrap(),
1420 Event::DiskBasedDiagnosticsFinished {
1421 language_server_id: LanguageServerId(1)
1422 }
1423 );
1424 project.update(cx, |project, cx| {
1425 assert_eq!(
1426 project
1427 .language_servers_running_disk_based_diagnostics(cx)
1428 .collect::<Vec<_>>(),
1429 [] as [language::LanguageServerId; 0]
1430 );
1431 });
1432}
1433
1434#[gpui::test]
1435async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1436 init_test(cx);
1437
1438 let fs = FakeFs::new(cx.executor());
1439 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1440
1441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1442
1443 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1444 language_registry.add(rust_lang());
1445 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1446
1447 let (buffer, _) = project
1448 .update(cx, |project, cx| {
1449 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1450 })
1451 .await
1452 .unwrap();
1453
1454 // Publish diagnostics
1455 let fake_server = fake_servers.next().await.unwrap();
1456 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1457 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1458 version: None,
1459 diagnostics: vec![lsp::Diagnostic {
1460 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1461 severity: Some(lsp::DiagnosticSeverity::ERROR),
1462 message: "the message".to_string(),
1463 ..Default::default()
1464 }],
1465 });
1466
1467 cx.executor().run_until_parked();
1468 buffer.update(cx, |buffer, _| {
1469 assert_eq!(
1470 buffer
1471 .snapshot()
1472 .diagnostics_in_range::<_, usize>(0..1, false)
1473 .map(|entry| entry.diagnostic.message.clone())
1474 .collect::<Vec<_>>(),
1475 ["the message".to_string()]
1476 );
1477 });
1478 project.update(cx, |project, cx| {
1479 assert_eq!(
1480 project.diagnostic_summary(false, cx),
1481 DiagnosticSummary {
1482 error_count: 1,
1483 warning_count: 0,
1484 }
1485 );
1486 });
1487
1488 project.update(cx, |project, cx| {
1489 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1490 });
1491
1492 // The diagnostics are cleared.
1493 cx.executor().run_until_parked();
1494 buffer.update(cx, |buffer, _| {
1495 assert_eq!(
1496 buffer
1497 .snapshot()
1498 .diagnostics_in_range::<_, usize>(0..1, false)
1499 .map(|entry| entry.diagnostic.message.clone())
1500 .collect::<Vec<_>>(),
1501 Vec::<String>::new(),
1502 );
1503 });
1504 project.update(cx, |project, cx| {
1505 assert_eq!(
1506 project.diagnostic_summary(false, cx),
1507 DiagnosticSummary {
1508 error_count: 0,
1509 warning_count: 0,
1510 }
1511 );
1512 });
1513}
1514
1515#[gpui::test]
1516async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1517 init_test(cx);
1518
1519 let fs = FakeFs::new(cx.executor());
1520 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1521
1522 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1524
1525 language_registry.add(rust_lang());
1526 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1527
1528 let (buffer, _handle) = project
1529 .update(cx, |project, cx| {
1530 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1531 })
1532 .await
1533 .unwrap();
1534
1535 // Before restarting the server, report diagnostics with an unknown buffer version.
1536 let fake_server = fake_servers.next().await.unwrap();
1537 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1538 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1539 version: Some(10000),
1540 diagnostics: Vec::new(),
1541 });
1542 cx.executor().run_until_parked();
1543
1544 project.update(cx, |project, cx| {
1545 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1546 });
1547 let mut fake_server = fake_servers.next().await.unwrap();
1548 let notification = fake_server
1549 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1550 .await
1551 .text_document;
1552 assert_eq!(notification.version, 0);
1553}
1554
1555#[gpui::test]
1556async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1557 init_test(cx);
1558
1559 let progress_token = "the-progress-token";
1560
1561 let fs = FakeFs::new(cx.executor());
1562 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1563
1564 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1565
1566 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1567 language_registry.add(rust_lang());
1568 let mut fake_servers = language_registry.register_fake_lsp(
1569 "Rust",
1570 FakeLspAdapter {
1571 name: "the-language-server",
1572 disk_based_diagnostics_sources: vec!["disk".into()],
1573 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1574 ..Default::default()
1575 },
1576 );
1577
1578 let (buffer, _handle) = project
1579 .update(cx, |project, cx| {
1580 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1581 })
1582 .await
1583 .unwrap();
1584
1585 // Simulate diagnostics starting to update.
1586 let mut fake_server = fake_servers.next().await.unwrap();
1587 fake_server
1588 .start_progress_with(
1589 "another-token",
1590 lsp::WorkDoneProgressBegin {
1591 cancellable: Some(false),
1592 ..Default::default()
1593 },
1594 )
1595 .await;
1596 fake_server
1597 .start_progress_with(
1598 progress_token,
1599 lsp::WorkDoneProgressBegin {
1600 cancellable: Some(true),
1601 ..Default::default()
1602 },
1603 )
1604 .await;
1605 cx.executor().run_until_parked();
1606
1607 project.update(cx, |project, cx| {
1608 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1609 });
1610
1611 let cancel_notification = fake_server
1612 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1613 .await;
1614 assert_eq!(
1615 cancel_notification.token,
1616 NumberOrString::String(progress_token.into())
1617 );
1618}
1619
1620#[gpui::test]
1621async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1622 init_test(cx);
1623
1624 let fs = FakeFs::new(cx.executor());
1625 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1626 .await;
1627
1628 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1629 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1630
1631 let mut fake_rust_servers = language_registry.register_fake_lsp(
1632 "Rust",
1633 FakeLspAdapter {
1634 name: "rust-lsp",
1635 ..Default::default()
1636 },
1637 );
1638 let mut fake_js_servers = language_registry.register_fake_lsp(
1639 "JavaScript",
1640 FakeLspAdapter {
1641 name: "js-lsp",
1642 ..Default::default()
1643 },
1644 );
1645 language_registry.add(rust_lang());
1646 language_registry.add(js_lang());
1647
1648 let _rs_buffer = project
1649 .update(cx, |project, cx| {
1650 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1651 })
1652 .await
1653 .unwrap();
1654 let _js_buffer = project
1655 .update(cx, |project, cx| {
1656 project.open_local_buffer_with_lsp("/dir/b.js", cx)
1657 })
1658 .await
1659 .unwrap();
1660
1661 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1662 assert_eq!(
1663 fake_rust_server_1
1664 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1665 .await
1666 .text_document
1667 .uri
1668 .as_str(),
1669 "file:///dir/a.rs"
1670 );
1671
1672 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1673 assert_eq!(
1674 fake_js_server
1675 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1676 .await
1677 .text_document
1678 .uri
1679 .as_str(),
1680 "file:///dir/b.js"
1681 );
1682
1683 // Disable Rust language server, ensuring only that server gets stopped.
1684 cx.update(|cx| {
1685 SettingsStore::update_global(cx, |settings, cx| {
1686 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1687 settings.languages.insert(
1688 "Rust".into(),
1689 LanguageSettingsContent {
1690 enable_language_server: Some(false),
1691 ..Default::default()
1692 },
1693 );
1694 });
1695 })
1696 });
1697 fake_rust_server_1
1698 .receive_notification::<lsp::notification::Exit>()
1699 .await;
1700
1701 // Enable Rust and disable JavaScript language servers, ensuring that the
1702 // former gets started again and that the latter stops.
1703 cx.update(|cx| {
1704 SettingsStore::update_global(cx, |settings, cx| {
1705 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1706 settings.languages.insert(
1707 LanguageName::new("Rust"),
1708 LanguageSettingsContent {
1709 enable_language_server: Some(true),
1710 ..Default::default()
1711 },
1712 );
1713 settings.languages.insert(
1714 LanguageName::new("JavaScript"),
1715 LanguageSettingsContent {
1716 enable_language_server: Some(false),
1717 ..Default::default()
1718 },
1719 );
1720 });
1721 })
1722 });
1723 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1724 assert_eq!(
1725 fake_rust_server_2
1726 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1727 .await
1728 .text_document
1729 .uri
1730 .as_str(),
1731 "file:///dir/a.rs"
1732 );
1733 fake_js_server
1734 .receive_notification::<lsp::notification::Exit>()
1735 .await;
1736}
1737
1738#[gpui::test(iterations = 3)]
1739async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1740 init_test(cx);
1741
1742 let text = "
1743 fn a() { A }
1744 fn b() { BB }
1745 fn c() { CCC }
1746 "
1747 .unindent();
1748
1749 let fs = FakeFs::new(cx.executor());
1750 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1751
1752 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1753 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1754 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1755
1756 language_registry.add(rust_lang());
1757 let mut fake_servers = language_registry.register_fake_lsp(
1758 "Rust",
1759 FakeLspAdapter {
1760 disk_based_diagnostics_sources: vec!["disk".into()],
1761 ..Default::default()
1762 },
1763 );
1764
1765 let buffer = project
1766 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1767 .await
1768 .unwrap();
1769
1770 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1771 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1772 });
1773
1774 let mut fake_server = fake_servers.next().await.unwrap();
1775 let open_notification = fake_server
1776 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1777 .await;
1778
1779 // Edit the buffer, moving the content down
1780 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1781 let change_notification_1 = fake_server
1782 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1783 .await;
1784 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1785
1786 // Report some diagnostics for the initial version of the buffer
1787 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1788 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1789 version: Some(open_notification.text_document.version),
1790 diagnostics: vec![
1791 lsp::Diagnostic {
1792 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1793 severity: Some(DiagnosticSeverity::ERROR),
1794 message: "undefined variable 'A'".to_string(),
1795 source: Some("disk".to_string()),
1796 ..Default::default()
1797 },
1798 lsp::Diagnostic {
1799 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1800 severity: Some(DiagnosticSeverity::ERROR),
1801 message: "undefined variable 'BB'".to_string(),
1802 source: Some("disk".to_string()),
1803 ..Default::default()
1804 },
1805 lsp::Diagnostic {
1806 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1807 severity: Some(DiagnosticSeverity::ERROR),
1808 source: Some("disk".to_string()),
1809 message: "undefined variable 'CCC'".to_string(),
1810 ..Default::default()
1811 },
1812 ],
1813 });
1814
1815 // The diagnostics have moved down since they were created.
1816 cx.executor().run_until_parked();
1817 buffer.update(cx, |buffer, _| {
1818 assert_eq!(
1819 buffer
1820 .snapshot()
1821 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1822 .collect::<Vec<_>>(),
1823 &[
1824 DiagnosticEntry {
1825 range: Point::new(3, 9)..Point::new(3, 11),
1826 diagnostic: Diagnostic {
1827 source: Some("disk".into()),
1828 severity: DiagnosticSeverity::ERROR,
1829 message: "undefined variable 'BB'".to_string(),
1830 is_disk_based: true,
1831 group_id: 1,
1832 is_primary: true,
1833 ..Default::default()
1834 },
1835 },
1836 DiagnosticEntry {
1837 range: Point::new(4, 9)..Point::new(4, 12),
1838 diagnostic: Diagnostic {
1839 source: Some("disk".into()),
1840 severity: DiagnosticSeverity::ERROR,
1841 message: "undefined variable 'CCC'".to_string(),
1842 is_disk_based: true,
1843 group_id: 2,
1844 is_primary: true,
1845 ..Default::default()
1846 }
1847 }
1848 ]
1849 );
1850 assert_eq!(
1851 chunks_with_diagnostics(buffer, 0..buffer.len()),
1852 [
1853 ("\n\nfn a() { ".to_string(), None),
1854 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1855 (" }\nfn b() { ".to_string(), None),
1856 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1857 (" }\nfn c() { ".to_string(), None),
1858 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1859 (" }\n".to_string(), None),
1860 ]
1861 );
1862 assert_eq!(
1863 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1864 [
1865 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1866 (" }\nfn c() { ".to_string(), None),
1867 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1868 ]
1869 );
1870 });
1871
1872 // Ensure overlapping diagnostics are highlighted correctly.
1873 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1874 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1875 version: Some(open_notification.text_document.version),
1876 diagnostics: vec![
1877 lsp::Diagnostic {
1878 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1879 severity: Some(DiagnosticSeverity::ERROR),
1880 message: "undefined variable 'A'".to_string(),
1881 source: Some("disk".to_string()),
1882 ..Default::default()
1883 },
1884 lsp::Diagnostic {
1885 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1886 severity: Some(DiagnosticSeverity::WARNING),
1887 message: "unreachable statement".to_string(),
1888 source: Some("disk".to_string()),
1889 ..Default::default()
1890 },
1891 ],
1892 });
1893
1894 cx.executor().run_until_parked();
1895 buffer.update(cx, |buffer, _| {
1896 assert_eq!(
1897 buffer
1898 .snapshot()
1899 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1900 .collect::<Vec<_>>(),
1901 &[
1902 DiagnosticEntry {
1903 range: Point::new(2, 9)..Point::new(2, 12),
1904 diagnostic: Diagnostic {
1905 source: Some("disk".into()),
1906 severity: DiagnosticSeverity::WARNING,
1907 message: "unreachable statement".to_string(),
1908 is_disk_based: true,
1909 group_id: 4,
1910 is_primary: true,
1911 ..Default::default()
1912 }
1913 },
1914 DiagnosticEntry {
1915 range: Point::new(2, 9)..Point::new(2, 10),
1916 diagnostic: Diagnostic {
1917 source: Some("disk".into()),
1918 severity: DiagnosticSeverity::ERROR,
1919 message: "undefined variable 'A'".to_string(),
1920 is_disk_based: true,
1921 group_id: 3,
1922 is_primary: true,
1923 ..Default::default()
1924 },
1925 }
1926 ]
1927 );
1928 assert_eq!(
1929 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1930 [
1931 ("fn a() { ".to_string(), None),
1932 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1933 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1934 ("\n".to_string(), None),
1935 ]
1936 );
1937 assert_eq!(
1938 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1939 [
1940 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1941 ("\n".to_string(), None),
1942 ]
1943 );
1944 });
1945
1946 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1947 // changes since the last save.
1948 buffer.update(cx, |buffer, cx| {
1949 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1950 buffer.edit(
1951 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1952 None,
1953 cx,
1954 );
1955 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1956 });
1957 let change_notification_2 = fake_server
1958 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1959 .await;
1960 assert!(
1961 change_notification_2.text_document.version > change_notification_1.text_document.version
1962 );
1963
1964 // Handle out-of-order diagnostics
1965 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1966 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1967 version: Some(change_notification_2.text_document.version),
1968 diagnostics: vec![
1969 lsp::Diagnostic {
1970 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1971 severity: Some(DiagnosticSeverity::ERROR),
1972 message: "undefined variable 'BB'".to_string(),
1973 source: Some("disk".to_string()),
1974 ..Default::default()
1975 },
1976 lsp::Diagnostic {
1977 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1978 severity: Some(DiagnosticSeverity::WARNING),
1979 message: "undefined variable 'A'".to_string(),
1980 source: Some("disk".to_string()),
1981 ..Default::default()
1982 },
1983 ],
1984 });
1985
1986 cx.executor().run_until_parked();
1987 buffer.update(cx, |buffer, _| {
1988 assert_eq!(
1989 buffer
1990 .snapshot()
1991 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1992 .collect::<Vec<_>>(),
1993 &[
1994 DiagnosticEntry {
1995 range: Point::new(2, 21)..Point::new(2, 22),
1996 diagnostic: Diagnostic {
1997 source: Some("disk".into()),
1998 severity: DiagnosticSeverity::WARNING,
1999 message: "undefined variable 'A'".to_string(),
2000 is_disk_based: true,
2001 group_id: 6,
2002 is_primary: true,
2003 ..Default::default()
2004 }
2005 },
2006 DiagnosticEntry {
2007 range: Point::new(3, 9)..Point::new(3, 14),
2008 diagnostic: Diagnostic {
2009 source: Some("disk".into()),
2010 severity: DiagnosticSeverity::ERROR,
2011 message: "undefined variable 'BB'".to_string(),
2012 is_disk_based: true,
2013 group_id: 5,
2014 is_primary: true,
2015 ..Default::default()
2016 },
2017 }
2018 ]
2019 );
2020 });
2021}
2022
2023#[gpui::test]
2024async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2025 init_test(cx);
2026
2027 let text = concat!(
2028 "let one = ;\n", //
2029 "let two = \n",
2030 "let three = 3;\n",
2031 );
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2035
2036 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2037 let buffer = project
2038 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2039 .await
2040 .unwrap();
2041
2042 project.update(cx, |project, cx| {
2043 project.lsp_store.update(cx, |lsp_store, cx| {
2044 lsp_store
2045 .update_diagnostic_entries(
2046 LanguageServerId(0),
2047 PathBuf::from("/dir/a.rs"),
2048 None,
2049 vec![
2050 DiagnosticEntry {
2051 range: Unclipped(PointUtf16::new(0, 10))
2052 ..Unclipped(PointUtf16::new(0, 10)),
2053 diagnostic: Diagnostic {
2054 severity: DiagnosticSeverity::ERROR,
2055 message: "syntax error 1".to_string(),
2056 ..Default::default()
2057 },
2058 },
2059 DiagnosticEntry {
2060 range: Unclipped(PointUtf16::new(1, 10))
2061 ..Unclipped(PointUtf16::new(1, 10)),
2062 diagnostic: Diagnostic {
2063 severity: DiagnosticSeverity::ERROR,
2064 message: "syntax error 2".to_string(),
2065 ..Default::default()
2066 },
2067 },
2068 ],
2069 cx,
2070 )
2071 .unwrap();
2072 })
2073 });
2074
2075 // An empty range is extended forward to include the following character.
2076 // At the end of a line, an empty range is extended backward to include
2077 // the preceding character.
2078 buffer.update(cx, |buffer, _| {
2079 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2080 assert_eq!(
2081 chunks
2082 .iter()
2083 .map(|(s, d)| (s.as_str(), *d))
2084 .collect::<Vec<_>>(),
2085 &[
2086 ("let one = ", None),
2087 (";", Some(DiagnosticSeverity::ERROR)),
2088 ("\nlet two =", None),
2089 (" ", Some(DiagnosticSeverity::ERROR)),
2090 ("\nlet three = 3;\n", None)
2091 ]
2092 );
2093 });
2094}
2095
2096#[gpui::test]
2097async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2098 init_test(cx);
2099
2100 let fs = FakeFs::new(cx.executor());
2101 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2102 .await;
2103
2104 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2105 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2106
2107 lsp_store.update(cx, |lsp_store, cx| {
2108 lsp_store
2109 .update_diagnostic_entries(
2110 LanguageServerId(0),
2111 Path::new("/dir/a.rs").to_owned(),
2112 None,
2113 vec![DiagnosticEntry {
2114 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2115 diagnostic: Diagnostic {
2116 severity: DiagnosticSeverity::ERROR,
2117 is_primary: true,
2118 message: "syntax error a1".to_string(),
2119 ..Default::default()
2120 },
2121 }],
2122 cx,
2123 )
2124 .unwrap();
2125 lsp_store
2126 .update_diagnostic_entries(
2127 LanguageServerId(1),
2128 Path::new("/dir/a.rs").to_owned(),
2129 None,
2130 vec![DiagnosticEntry {
2131 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2132 diagnostic: Diagnostic {
2133 severity: DiagnosticSeverity::ERROR,
2134 is_primary: true,
2135 message: "syntax error b1".to_string(),
2136 ..Default::default()
2137 },
2138 }],
2139 cx,
2140 )
2141 .unwrap();
2142
2143 assert_eq!(
2144 lsp_store.diagnostic_summary(false, cx),
2145 DiagnosticSummary {
2146 error_count: 2,
2147 warning_count: 0,
2148 }
2149 );
2150 });
2151}
2152
2153#[gpui::test]
2154async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2155 init_test(cx);
2156
2157 let text = "
2158 fn a() {
2159 f1();
2160 }
2161 fn b() {
2162 f2();
2163 }
2164 fn c() {
2165 f3();
2166 }
2167 "
2168 .unindent();
2169
2170 let fs = FakeFs::new(cx.executor());
2171 fs.insert_tree(
2172 "/dir",
2173 json!({
2174 "a.rs": text.clone(),
2175 }),
2176 )
2177 .await;
2178
2179 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2180 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2181
2182 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2183 language_registry.add(rust_lang());
2184 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2185
2186 let (buffer, _handle) = project
2187 .update(cx, |project, cx| {
2188 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
2189 })
2190 .await
2191 .unwrap();
2192
2193 let mut fake_server = fake_servers.next().await.unwrap();
2194 let lsp_document_version = fake_server
2195 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2196 .await
2197 .text_document
2198 .version;
2199
2200 // Simulate editing the buffer after the language server computes some edits.
2201 buffer.update(cx, |buffer, cx| {
2202 buffer.edit(
2203 [(
2204 Point::new(0, 0)..Point::new(0, 0),
2205 "// above first function\n",
2206 )],
2207 None,
2208 cx,
2209 );
2210 buffer.edit(
2211 [(
2212 Point::new(2, 0)..Point::new(2, 0),
2213 " // inside first function\n",
2214 )],
2215 None,
2216 cx,
2217 );
2218 buffer.edit(
2219 [(
2220 Point::new(6, 4)..Point::new(6, 4),
2221 "// inside second function ",
2222 )],
2223 None,
2224 cx,
2225 );
2226
2227 assert_eq!(
2228 buffer.text(),
2229 "
2230 // above first function
2231 fn a() {
2232 // inside first function
2233 f1();
2234 }
2235 fn b() {
2236 // inside second function f2();
2237 }
2238 fn c() {
2239 f3();
2240 }
2241 "
2242 .unindent()
2243 );
2244 });
2245
2246 let edits = lsp_store
2247 .update(cx, |lsp_store, cx| {
2248 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2249 &buffer,
2250 vec![
2251 // replace body of first function
2252 lsp::TextEdit {
2253 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2254 new_text: "
2255 fn a() {
2256 f10();
2257 }
2258 "
2259 .unindent(),
2260 },
2261 // edit inside second function
2262 lsp::TextEdit {
2263 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2264 new_text: "00".into(),
2265 },
2266 // edit inside third function via two distinct edits
2267 lsp::TextEdit {
2268 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2269 new_text: "4000".into(),
2270 },
2271 lsp::TextEdit {
2272 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2273 new_text: "".into(),
2274 },
2275 ],
2276 LanguageServerId(0),
2277 Some(lsp_document_version),
2278 cx,
2279 )
2280 })
2281 .await
2282 .unwrap();
2283
2284 buffer.update(cx, |buffer, cx| {
2285 for (range, new_text) in edits {
2286 buffer.edit([(range, new_text)], None, cx);
2287 }
2288 assert_eq!(
2289 buffer.text(),
2290 "
2291 // above first function
2292 fn a() {
2293 // inside first function
2294 f10();
2295 }
2296 fn b() {
2297 // inside second function f200();
2298 }
2299 fn c() {
2300 f4000();
2301 }
2302 "
2303 .unindent()
2304 );
2305 });
2306}
2307
2308#[gpui::test]
2309async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2310 init_test(cx);
2311
2312 let text = "
2313 use a::b;
2314 use a::c;
2315
2316 fn f() {
2317 b();
2318 c();
2319 }
2320 "
2321 .unindent();
2322
2323 let fs = FakeFs::new(cx.executor());
2324 fs.insert_tree(
2325 "/dir",
2326 json!({
2327 "a.rs": text.clone(),
2328 }),
2329 )
2330 .await;
2331
2332 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2333 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2334 let buffer = project
2335 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2336 .await
2337 .unwrap();
2338
2339 // Simulate the language server sending us a small edit in the form of a very large diff.
2340 // Rust-analyzer does this when performing a merge-imports code action.
2341 let edits = lsp_store
2342 .update(cx, |lsp_store, cx| {
2343 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2344 &buffer,
2345 [
2346 // Replace the first use statement without editing the semicolon.
2347 lsp::TextEdit {
2348 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2349 new_text: "a::{b, c}".into(),
2350 },
2351 // Reinsert the remainder of the file between the semicolon and the final
2352 // newline of the file.
2353 lsp::TextEdit {
2354 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2355 new_text: "\n\n".into(),
2356 },
2357 lsp::TextEdit {
2358 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2359 new_text: "
2360 fn f() {
2361 b();
2362 c();
2363 }"
2364 .unindent(),
2365 },
2366 // Delete everything after the first newline of the file.
2367 lsp::TextEdit {
2368 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2369 new_text: "".into(),
2370 },
2371 ],
2372 LanguageServerId(0),
2373 None,
2374 cx,
2375 )
2376 })
2377 .await
2378 .unwrap();
2379
2380 buffer.update(cx, |buffer, cx| {
2381 let edits = edits
2382 .into_iter()
2383 .map(|(range, text)| {
2384 (
2385 range.start.to_point(buffer)..range.end.to_point(buffer),
2386 text,
2387 )
2388 })
2389 .collect::<Vec<_>>();
2390
2391 assert_eq!(
2392 edits,
2393 [
2394 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2395 (Point::new(1, 0)..Point::new(2, 0), "".into())
2396 ]
2397 );
2398
2399 for (range, new_text) in edits {
2400 buffer.edit([(range, new_text)], None, cx);
2401 }
2402 assert_eq!(
2403 buffer.text(),
2404 "
2405 use a::{b, c};
2406
2407 fn f() {
2408 b();
2409 c();
2410 }
2411 "
2412 .unindent()
2413 );
2414 });
2415}
2416
2417#[gpui::test]
2418async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2419 init_test(cx);
2420
2421 let text = "
2422 use a::b;
2423 use a::c;
2424
2425 fn f() {
2426 b();
2427 c();
2428 }
2429 "
2430 .unindent();
2431
2432 let fs = FakeFs::new(cx.executor());
2433 fs.insert_tree(
2434 "/dir",
2435 json!({
2436 "a.rs": text.clone(),
2437 }),
2438 )
2439 .await;
2440
2441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2442 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2443 let buffer = project
2444 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2445 .await
2446 .unwrap();
2447
2448 // Simulate the language server sending us edits in a non-ordered fashion,
2449 // with ranges sometimes being inverted or pointing to invalid locations.
2450 let edits = lsp_store
2451 .update(cx, |lsp_store, cx| {
2452 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2453 &buffer,
2454 [
2455 lsp::TextEdit {
2456 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2457 new_text: "\n\n".into(),
2458 },
2459 lsp::TextEdit {
2460 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2461 new_text: "a::{b, c}".into(),
2462 },
2463 lsp::TextEdit {
2464 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2465 new_text: "".into(),
2466 },
2467 lsp::TextEdit {
2468 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2469 new_text: "
2470 fn f() {
2471 b();
2472 c();
2473 }"
2474 .unindent(),
2475 },
2476 ],
2477 LanguageServerId(0),
2478 None,
2479 cx,
2480 )
2481 })
2482 .await
2483 .unwrap();
2484
2485 buffer.update(cx, |buffer, cx| {
2486 let edits = edits
2487 .into_iter()
2488 .map(|(range, text)| {
2489 (
2490 range.start.to_point(buffer)..range.end.to_point(buffer),
2491 text,
2492 )
2493 })
2494 .collect::<Vec<_>>();
2495
2496 assert_eq!(
2497 edits,
2498 [
2499 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2500 (Point::new(1, 0)..Point::new(2, 0), "".into())
2501 ]
2502 );
2503
2504 for (range, new_text) in edits {
2505 buffer.edit([(range, new_text)], None, cx);
2506 }
2507 assert_eq!(
2508 buffer.text(),
2509 "
2510 use a::{b, c};
2511
2512 fn f() {
2513 b();
2514 c();
2515 }
2516 "
2517 .unindent()
2518 );
2519 });
2520}
2521
2522fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2523 buffer: &Buffer,
2524 range: Range<T>,
2525) -> Vec<(String, Option<DiagnosticSeverity>)> {
2526 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2527 for chunk in buffer.snapshot().chunks(range, true) {
2528 if chunks.last().map_or(false, |prev_chunk| {
2529 prev_chunk.1 == chunk.diagnostic_severity
2530 }) {
2531 chunks.last_mut().unwrap().0.push_str(chunk.text);
2532 } else {
2533 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2534 }
2535 }
2536 chunks
2537}
2538
2539#[gpui::test(iterations = 10)]
2540async fn test_definition(cx: &mut gpui::TestAppContext) {
2541 init_test(cx);
2542
2543 let fs = FakeFs::new(cx.executor());
2544 fs.insert_tree(
2545 "/dir",
2546 json!({
2547 "a.rs": "const fn a() { A }",
2548 "b.rs": "const y: i32 = crate::a()",
2549 }),
2550 )
2551 .await;
2552
2553 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2554
2555 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2556 language_registry.add(rust_lang());
2557 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2558
2559 let (buffer, _handle) = project
2560 .update(cx, |project, cx| {
2561 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
2562 })
2563 .await
2564 .unwrap();
2565
2566 let fake_server = fake_servers.next().await.unwrap();
2567 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2568 let params = params.text_document_position_params;
2569 assert_eq!(
2570 params.text_document.uri.to_file_path().unwrap(),
2571 Path::new("/dir/b.rs"),
2572 );
2573 assert_eq!(params.position, lsp::Position::new(0, 22));
2574
2575 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2576 lsp::Location::new(
2577 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2578 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2579 ),
2580 )))
2581 });
2582
2583 let mut definitions = project
2584 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2585 .await
2586 .unwrap();
2587
2588 // Assert no new language server started
2589 cx.executor().run_until_parked();
2590 assert!(fake_servers.try_next().is_err());
2591
2592 assert_eq!(definitions.len(), 1);
2593 let definition = definitions.pop().unwrap();
2594 cx.update(|cx| {
2595 let target_buffer = definition.target.buffer.read(cx);
2596 assert_eq!(
2597 target_buffer
2598 .file()
2599 .unwrap()
2600 .as_local()
2601 .unwrap()
2602 .abs_path(cx),
2603 Path::new("/dir/a.rs"),
2604 );
2605 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2606 assert_eq!(
2607 list_worktrees(&project, cx),
2608 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2609 );
2610
2611 drop(definition);
2612 });
2613 cx.update(|cx| {
2614 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2615 });
2616
2617 fn list_worktrees<'a>(
2618 project: &'a Model<Project>,
2619 cx: &'a AppContext,
2620 ) -> Vec<(&'a Path, bool)> {
2621 project
2622 .read(cx)
2623 .worktrees(cx)
2624 .map(|worktree| {
2625 let worktree = worktree.read(cx);
2626 (
2627 worktree.as_local().unwrap().abs_path().as_ref(),
2628 worktree.is_visible(),
2629 )
2630 })
2631 .collect::<Vec<_>>()
2632 }
2633}
2634
2635#[gpui::test]
2636async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2637 init_test(cx);
2638
2639 let fs = FakeFs::new(cx.executor());
2640 fs.insert_tree(
2641 "/dir",
2642 json!({
2643 "a.ts": "",
2644 }),
2645 )
2646 .await;
2647
2648 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2649
2650 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2651 language_registry.add(typescript_lang());
2652 let mut fake_language_servers = language_registry.register_fake_lsp(
2653 "TypeScript",
2654 FakeLspAdapter {
2655 capabilities: lsp::ServerCapabilities {
2656 completion_provider: Some(lsp::CompletionOptions {
2657 trigger_characters: Some(vec![":".to_string()]),
2658 ..Default::default()
2659 }),
2660 ..Default::default()
2661 },
2662 ..Default::default()
2663 },
2664 );
2665
2666 let (buffer, _handle) = project
2667 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2668 .await
2669 .unwrap();
2670
2671 let fake_server = fake_language_servers.next().await.unwrap();
2672
2673 let text = "let a = b.fqn";
2674 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2675 let completions = project.update(cx, |project, cx| {
2676 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2677 });
2678
2679 fake_server
2680 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2681 Ok(Some(lsp::CompletionResponse::Array(vec![
2682 lsp::CompletionItem {
2683 label: "fullyQualifiedName?".into(),
2684 insert_text: Some("fullyQualifiedName".into()),
2685 ..Default::default()
2686 },
2687 ])))
2688 })
2689 .next()
2690 .await;
2691 let completions = completions.await.unwrap();
2692 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2693 assert_eq!(completions.len(), 1);
2694 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2695 assert_eq!(
2696 completions[0].old_range.to_offset(&snapshot),
2697 text.len() - 3..text.len()
2698 );
2699
2700 let text = "let a = \"atoms/cmp\"";
2701 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2702 let completions = project.update(cx, |project, cx| {
2703 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2704 });
2705
2706 fake_server
2707 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2708 Ok(Some(lsp::CompletionResponse::Array(vec![
2709 lsp::CompletionItem {
2710 label: "component".into(),
2711 ..Default::default()
2712 },
2713 ])))
2714 })
2715 .next()
2716 .await;
2717 let completions = completions.await.unwrap();
2718 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2719 assert_eq!(completions.len(), 1);
2720 assert_eq!(completions[0].new_text, "component");
2721 assert_eq!(
2722 completions[0].old_range.to_offset(&snapshot),
2723 text.len() - 4..text.len() - 1
2724 );
2725}
2726
2727#[gpui::test]
2728async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2729 init_test(cx);
2730
2731 let fs = FakeFs::new(cx.executor());
2732 fs.insert_tree(
2733 "/dir",
2734 json!({
2735 "a.ts": "",
2736 }),
2737 )
2738 .await;
2739
2740 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2741
2742 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2743 language_registry.add(typescript_lang());
2744 let mut fake_language_servers = language_registry.register_fake_lsp(
2745 "TypeScript",
2746 FakeLspAdapter {
2747 capabilities: lsp::ServerCapabilities {
2748 completion_provider: Some(lsp::CompletionOptions {
2749 trigger_characters: Some(vec![":".to_string()]),
2750 ..Default::default()
2751 }),
2752 ..Default::default()
2753 },
2754 ..Default::default()
2755 },
2756 );
2757
2758 let (buffer, _handle) = project
2759 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2760 .await
2761 .unwrap();
2762
2763 let fake_server = fake_language_servers.next().await.unwrap();
2764
2765 let text = "let a = b.fqn";
2766 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2767 let completions = project.update(cx, |project, cx| {
2768 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2769 });
2770
2771 fake_server
2772 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2773 Ok(Some(lsp::CompletionResponse::Array(vec![
2774 lsp::CompletionItem {
2775 label: "fullyQualifiedName?".into(),
2776 insert_text: Some("fully\rQualified\r\nName".into()),
2777 ..Default::default()
2778 },
2779 ])))
2780 })
2781 .next()
2782 .await;
2783 let completions = completions.await.unwrap();
2784 assert_eq!(completions.len(), 1);
2785 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2786}
2787
2788#[gpui::test(iterations = 10)]
2789async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2790 init_test(cx);
2791
2792 let fs = FakeFs::new(cx.executor());
2793 fs.insert_tree(
2794 "/dir",
2795 json!({
2796 "a.ts": "a",
2797 }),
2798 )
2799 .await;
2800
2801 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2802
2803 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2804 language_registry.add(typescript_lang());
2805 let mut fake_language_servers = language_registry.register_fake_lsp(
2806 "TypeScript",
2807 FakeLspAdapter {
2808 capabilities: lsp::ServerCapabilities {
2809 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2810 lsp::CodeActionOptions {
2811 resolve_provider: Some(true),
2812 ..lsp::CodeActionOptions::default()
2813 },
2814 )),
2815 ..lsp::ServerCapabilities::default()
2816 },
2817 ..FakeLspAdapter::default()
2818 },
2819 );
2820
2821 let (buffer, _handle) = project
2822 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2823 .await
2824 .unwrap();
2825
2826 let fake_server = fake_language_servers.next().await.unwrap();
2827
2828 // Language server returns code actions that contain commands, and not edits.
2829 let actions = project.update(cx, |project, cx| {
2830 project.code_actions(&buffer, 0..0, None, cx)
2831 });
2832 fake_server
2833 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2834 Ok(Some(vec![
2835 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2836 title: "The code action".into(),
2837 data: Some(serde_json::json!({
2838 "command": "_the/command",
2839 })),
2840 ..lsp::CodeAction::default()
2841 }),
2842 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2843 title: "two".into(),
2844 ..lsp::CodeAction::default()
2845 }),
2846 ]))
2847 })
2848 .next()
2849 .await;
2850
2851 let action = actions.await.unwrap()[0].clone();
2852 let apply = project.update(cx, |project, cx| {
2853 project.apply_code_action(buffer.clone(), action, true, cx)
2854 });
2855
2856 // Resolving the code action does not populate its edits. In absence of
2857 // edits, we must execute the given command.
2858 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2859 |mut action, _| async move {
2860 if action.data.is_some() {
2861 action.command = Some(lsp::Command {
2862 title: "The command".into(),
2863 command: "_the/command".into(),
2864 arguments: Some(vec![json!("the-argument")]),
2865 });
2866 }
2867 Ok(action)
2868 },
2869 );
2870
2871 // While executing the command, the language server sends the editor
2872 // a `workspaceEdit` request.
2873 fake_server
2874 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2875 let fake = fake_server.clone();
2876 move |params, _| {
2877 assert_eq!(params.command, "_the/command");
2878 let fake = fake.clone();
2879 async move {
2880 fake.server
2881 .request::<lsp::request::ApplyWorkspaceEdit>(
2882 lsp::ApplyWorkspaceEditParams {
2883 label: None,
2884 edit: lsp::WorkspaceEdit {
2885 changes: Some(
2886 [(
2887 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2888 vec![lsp::TextEdit {
2889 range: lsp::Range::new(
2890 lsp::Position::new(0, 0),
2891 lsp::Position::new(0, 0),
2892 ),
2893 new_text: "X".into(),
2894 }],
2895 )]
2896 .into_iter()
2897 .collect(),
2898 ),
2899 ..Default::default()
2900 },
2901 },
2902 )
2903 .await
2904 .unwrap();
2905 Ok(Some(json!(null)))
2906 }
2907 }
2908 })
2909 .next()
2910 .await;
2911
2912 // Applying the code action returns a project transaction containing the edits
2913 // sent by the language server in its `workspaceEdit` request.
2914 let transaction = apply.await.unwrap();
2915 assert!(transaction.0.contains_key(&buffer));
2916 buffer.update(cx, |buffer, cx| {
2917 assert_eq!(buffer.text(), "Xa");
2918 buffer.undo(cx);
2919 assert_eq!(buffer.text(), "a");
2920 });
2921}
2922
2923#[gpui::test(iterations = 10)]
2924async fn test_save_file(cx: &mut gpui::TestAppContext) {
2925 init_test(cx);
2926
2927 let fs = FakeFs::new(cx.executor());
2928 fs.insert_tree(
2929 "/dir",
2930 json!({
2931 "file1": "the old contents",
2932 }),
2933 )
2934 .await;
2935
2936 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2937 let buffer = project
2938 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2939 .await
2940 .unwrap();
2941 buffer.update(cx, |buffer, cx| {
2942 assert_eq!(buffer.text(), "the old contents");
2943 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2944 });
2945
2946 project
2947 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2948 .await
2949 .unwrap();
2950
2951 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2952 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2953}
2954
2955#[gpui::test(iterations = 30)]
2956async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2957 init_test(cx);
2958
2959 let fs = FakeFs::new(cx.executor().clone());
2960 fs.insert_tree(
2961 "/dir",
2962 json!({
2963 "file1": "the original contents",
2964 }),
2965 )
2966 .await;
2967
2968 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2969 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2970 let buffer = project
2971 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2972 .await
2973 .unwrap();
2974
2975 // Simulate buffer diffs being slow, so that they don't complete before
2976 // the next file change occurs.
2977 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2978
2979 // Change the buffer's file on disk, and then wait for the file change
2980 // to be detected by the worktree, so that the buffer starts reloading.
2981 fs.save(
2982 "/dir/file1".as_ref(),
2983 &"the first contents".into(),
2984 Default::default(),
2985 )
2986 .await
2987 .unwrap();
2988 worktree.next_event(cx).await;
2989
2990 // Change the buffer's file again. Depending on the random seed, the
2991 // previous file change may still be in progress.
2992 fs.save(
2993 "/dir/file1".as_ref(),
2994 &"the second contents".into(),
2995 Default::default(),
2996 )
2997 .await
2998 .unwrap();
2999 worktree.next_event(cx).await;
3000
3001 cx.executor().run_until_parked();
3002 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3003 buffer.read_with(cx, |buffer, _| {
3004 assert_eq!(buffer.text(), on_disk_text);
3005 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3006 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3007 });
3008}
3009
3010#[gpui::test(iterations = 30)]
3011async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3012 init_test(cx);
3013
3014 let fs = FakeFs::new(cx.executor().clone());
3015 fs.insert_tree(
3016 "/dir",
3017 json!({
3018 "file1": "the original contents",
3019 }),
3020 )
3021 .await;
3022
3023 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3024 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3025 let buffer = project
3026 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3027 .await
3028 .unwrap();
3029
3030 // Simulate buffer diffs being slow, so that they don't complete before
3031 // the next file change occurs.
3032 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3033
3034 // Change the buffer's file on disk, and then wait for the file change
3035 // to be detected by the worktree, so that the buffer starts reloading.
3036 fs.save(
3037 "/dir/file1".as_ref(),
3038 &"the first contents".into(),
3039 Default::default(),
3040 )
3041 .await
3042 .unwrap();
3043 worktree.next_event(cx).await;
3044
3045 cx.executor()
3046 .spawn(cx.executor().simulate_random_delay())
3047 .await;
3048
3049 // Perform a noop edit, causing the buffer's version to increase.
3050 buffer.update(cx, |buffer, cx| {
3051 buffer.edit([(0..0, " ")], None, cx);
3052 buffer.undo(cx);
3053 });
3054
3055 cx.executor().run_until_parked();
3056 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3057 buffer.read_with(cx, |buffer, _| {
3058 let buffer_text = buffer.text();
3059 if buffer_text == on_disk_text {
3060 assert!(
3061 !buffer.is_dirty() && !buffer.has_conflict(),
3062 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3063 );
3064 }
3065 // If the file change occurred while the buffer was processing the first
3066 // change, the buffer will be in a conflicting state.
3067 else {
3068 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3069 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3070 }
3071 });
3072}
3073
3074#[gpui::test]
3075async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3076 init_test(cx);
3077
3078 let fs = FakeFs::new(cx.executor());
3079 fs.insert_tree(
3080 "/dir",
3081 json!({
3082 "file1": "the old contents",
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3088 let buffer = project
3089 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3090 .await
3091 .unwrap();
3092 buffer.update(cx, |buffer, cx| {
3093 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3094 });
3095
3096 project
3097 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3098 .await
3099 .unwrap();
3100
3101 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3102 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3103}
3104
3105#[gpui::test]
3106async fn test_save_as(cx: &mut gpui::TestAppContext) {
3107 init_test(cx);
3108
3109 let fs = FakeFs::new(cx.executor());
3110 fs.insert_tree("/dir", json!({})).await;
3111
3112 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3113
3114 let languages = project.update(cx, |project, _| project.languages().clone());
3115 languages.add(rust_lang());
3116
3117 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3118 buffer.update(cx, |buffer, cx| {
3119 buffer.edit([(0..0, "abc")], None, cx);
3120 assert!(buffer.is_dirty());
3121 assert!(!buffer.has_conflict());
3122 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3123 });
3124 project
3125 .update(cx, |project, cx| {
3126 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3127 let path = ProjectPath {
3128 worktree_id,
3129 path: Arc::from(Path::new("file1.rs")),
3130 };
3131 project.save_buffer_as(buffer.clone(), path, cx)
3132 })
3133 .await
3134 .unwrap();
3135 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3136
3137 cx.executor().run_until_parked();
3138 buffer.update(cx, |buffer, cx| {
3139 assert_eq!(
3140 buffer.file().unwrap().full_path(cx),
3141 Path::new("dir/file1.rs")
3142 );
3143 assert!(!buffer.is_dirty());
3144 assert!(!buffer.has_conflict());
3145 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3146 });
3147
3148 let opened_buffer = project
3149 .update(cx, |project, cx| {
3150 project.open_local_buffer("/dir/file1.rs", cx)
3151 })
3152 .await
3153 .unwrap();
3154 assert_eq!(opened_buffer, buffer);
3155}
3156
3157#[gpui::test(retries = 5)]
3158async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3159 use worktree::WorktreeModelHandle as _;
3160
3161 init_test(cx);
3162 cx.executor().allow_parking();
3163
3164 let dir = temp_tree(json!({
3165 "a": {
3166 "file1": "",
3167 "file2": "",
3168 "file3": "",
3169 },
3170 "b": {
3171 "c": {
3172 "file4": "",
3173 "file5": "",
3174 }
3175 }
3176 }));
3177
3178 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3179
3180 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3181 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3182 async move { buffer.await.unwrap() }
3183 };
3184 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3185 project.update(cx, |project, cx| {
3186 let tree = project.worktrees(cx).next().unwrap();
3187 tree.read(cx)
3188 .entry_for_path(path)
3189 .unwrap_or_else(|| panic!("no entry for path {}", path))
3190 .id
3191 })
3192 };
3193
3194 let buffer2 = buffer_for_path("a/file2", cx).await;
3195 let buffer3 = buffer_for_path("a/file3", cx).await;
3196 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3197 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3198
3199 let file2_id = id_for_path("a/file2", cx);
3200 let file3_id = id_for_path("a/file3", cx);
3201 let file4_id = id_for_path("b/c/file4", cx);
3202
3203 // Create a remote copy of this worktree.
3204 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3205 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3206
3207 let updates = Arc::new(Mutex::new(Vec::new()));
3208 tree.update(cx, |tree, cx| {
3209 let updates = updates.clone();
3210 tree.observe_updates(0, cx, move |update| {
3211 updates.lock().push(update);
3212 async { true }
3213 });
3214 });
3215
3216 let remote =
3217 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3218
3219 cx.executor().run_until_parked();
3220
3221 cx.update(|cx| {
3222 assert!(!buffer2.read(cx).is_dirty());
3223 assert!(!buffer3.read(cx).is_dirty());
3224 assert!(!buffer4.read(cx).is_dirty());
3225 assert!(!buffer5.read(cx).is_dirty());
3226 });
3227
3228 // Rename and delete files and directories.
3229 tree.flush_fs_events(cx).await;
3230 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3231 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3232 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3233 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3234 tree.flush_fs_events(cx).await;
3235
3236 let expected_paths = vec![
3237 "a",
3238 "a/file1",
3239 "a/file2.new",
3240 "b",
3241 "d",
3242 "d/file3",
3243 "d/file4",
3244 ];
3245
3246 cx.update(|app| {
3247 assert_eq!(
3248 tree.read(app)
3249 .paths()
3250 .map(|p| p.to_str().unwrap())
3251 .collect::<Vec<_>>(),
3252 expected_paths
3253 );
3254 });
3255
3256 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3257 assert_eq!(id_for_path("d/file3", cx), file3_id);
3258 assert_eq!(id_for_path("d/file4", cx), file4_id);
3259
3260 cx.update(|cx| {
3261 assert_eq!(
3262 buffer2.read(cx).file().unwrap().path().as_ref(),
3263 Path::new("a/file2.new")
3264 );
3265 assert_eq!(
3266 buffer3.read(cx).file().unwrap().path().as_ref(),
3267 Path::new("d/file3")
3268 );
3269 assert_eq!(
3270 buffer4.read(cx).file().unwrap().path().as_ref(),
3271 Path::new("d/file4")
3272 );
3273 assert_eq!(
3274 buffer5.read(cx).file().unwrap().path().as_ref(),
3275 Path::new("b/c/file5")
3276 );
3277
3278 assert_matches!(
3279 buffer2.read(cx).file().unwrap().disk_state(),
3280 DiskState::Present { .. }
3281 );
3282 assert_matches!(
3283 buffer3.read(cx).file().unwrap().disk_state(),
3284 DiskState::Present { .. }
3285 );
3286 assert_matches!(
3287 buffer4.read(cx).file().unwrap().disk_state(),
3288 DiskState::Present { .. }
3289 );
3290 assert_eq!(
3291 buffer5.read(cx).file().unwrap().disk_state(),
3292 DiskState::Deleted
3293 );
3294 });
3295
3296 // Update the remote worktree. Check that it becomes consistent with the
3297 // local worktree.
3298 cx.executor().run_until_parked();
3299
3300 remote.update(cx, |remote, _| {
3301 for update in updates.lock().drain(..) {
3302 remote.as_remote_mut().unwrap().update_from_remote(update);
3303 }
3304 });
3305 cx.executor().run_until_parked();
3306 remote.update(cx, |remote, _| {
3307 assert_eq!(
3308 remote
3309 .paths()
3310 .map(|p| p.to_str().unwrap())
3311 .collect::<Vec<_>>(),
3312 expected_paths
3313 );
3314 });
3315}
3316
3317#[gpui::test(iterations = 10)]
3318async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3319 init_test(cx);
3320
3321 let fs = FakeFs::new(cx.executor());
3322 fs.insert_tree(
3323 "/dir",
3324 json!({
3325 "a": {
3326 "file1": "",
3327 }
3328 }),
3329 )
3330 .await;
3331
3332 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3333 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3334 let tree_id = tree.update(cx, |tree, _| tree.id());
3335
3336 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3337 project.update(cx, |project, cx| {
3338 let tree = project.worktrees(cx).next().unwrap();
3339 tree.read(cx)
3340 .entry_for_path(path)
3341 .unwrap_or_else(|| panic!("no entry for path {}", path))
3342 .id
3343 })
3344 };
3345
3346 let dir_id = id_for_path("a", cx);
3347 let file_id = id_for_path("a/file1", cx);
3348 let buffer = project
3349 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3350 .await
3351 .unwrap();
3352 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3353
3354 project
3355 .update(cx, |project, cx| {
3356 project.rename_entry(dir_id, Path::new("b"), cx)
3357 })
3358 .unwrap()
3359 .await
3360 .to_included()
3361 .unwrap();
3362 cx.executor().run_until_parked();
3363
3364 assert_eq!(id_for_path("b", cx), dir_id);
3365 assert_eq!(id_for_path("b/file1", cx), file_id);
3366 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3367}
3368
3369#[gpui::test]
3370async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3371 init_test(cx);
3372
3373 let fs = FakeFs::new(cx.executor());
3374 fs.insert_tree(
3375 "/dir",
3376 json!({
3377 "a.txt": "a-contents",
3378 "b.txt": "b-contents",
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3384
3385 // Spawn multiple tasks to open paths, repeating some paths.
3386 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3387 (
3388 p.open_local_buffer("/dir/a.txt", cx),
3389 p.open_local_buffer("/dir/b.txt", cx),
3390 p.open_local_buffer("/dir/a.txt", cx),
3391 )
3392 });
3393
3394 let buffer_a_1 = buffer_a_1.await.unwrap();
3395 let buffer_a_2 = buffer_a_2.await.unwrap();
3396 let buffer_b = buffer_b.await.unwrap();
3397 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3398 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3399
3400 // There is only one buffer per path.
3401 let buffer_a_id = buffer_a_1.entity_id();
3402 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3403
3404 // Open the same path again while it is still open.
3405 drop(buffer_a_1);
3406 let buffer_a_3 = project
3407 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3408 .await
3409 .unwrap();
3410
3411 // There's still only one buffer per path.
3412 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3413}
3414
3415#[gpui::test]
3416async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3417 init_test(cx);
3418
3419 let fs = FakeFs::new(cx.executor());
3420 fs.insert_tree(
3421 "/dir",
3422 json!({
3423 "file1": "abc",
3424 "file2": "def",
3425 "file3": "ghi",
3426 }),
3427 )
3428 .await;
3429
3430 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3431
3432 let buffer1 = project
3433 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3434 .await
3435 .unwrap();
3436 let events = Arc::new(Mutex::new(Vec::new()));
3437
3438 // initially, the buffer isn't dirty.
3439 buffer1.update(cx, |buffer, cx| {
3440 cx.subscribe(&buffer1, {
3441 let events = events.clone();
3442 move |_, _, event, _| match event {
3443 BufferEvent::Operation { .. } => {}
3444 _ => events.lock().push(event.clone()),
3445 }
3446 })
3447 .detach();
3448
3449 assert!(!buffer.is_dirty());
3450 assert!(events.lock().is_empty());
3451
3452 buffer.edit([(1..2, "")], None, cx);
3453 });
3454
3455 // after the first edit, the buffer is dirty, and emits a dirtied event.
3456 buffer1.update(cx, |buffer, cx| {
3457 assert!(buffer.text() == "ac");
3458 assert!(buffer.is_dirty());
3459 assert_eq!(
3460 *events.lock(),
3461 &[
3462 language::BufferEvent::Edited,
3463 language::BufferEvent::DirtyChanged
3464 ]
3465 );
3466 events.lock().clear();
3467 buffer.did_save(
3468 buffer.version(),
3469 buffer.file().unwrap().disk_state().mtime(),
3470 cx,
3471 );
3472 });
3473
3474 // after saving, the buffer is not dirty, and emits a saved event.
3475 buffer1.update(cx, |buffer, cx| {
3476 assert!(!buffer.is_dirty());
3477 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3478 events.lock().clear();
3479
3480 buffer.edit([(1..1, "B")], None, cx);
3481 buffer.edit([(2..2, "D")], None, cx);
3482 });
3483
3484 // after editing again, the buffer is dirty, and emits another dirty event.
3485 buffer1.update(cx, |buffer, cx| {
3486 assert!(buffer.text() == "aBDc");
3487 assert!(buffer.is_dirty());
3488 assert_eq!(
3489 *events.lock(),
3490 &[
3491 language::BufferEvent::Edited,
3492 language::BufferEvent::DirtyChanged,
3493 language::BufferEvent::Edited,
3494 ],
3495 );
3496 events.lock().clear();
3497
3498 // After restoring the buffer to its previously-saved state,
3499 // the buffer is not considered dirty anymore.
3500 buffer.edit([(1..3, "")], None, cx);
3501 assert!(buffer.text() == "ac");
3502 assert!(!buffer.is_dirty());
3503 });
3504
3505 assert_eq!(
3506 *events.lock(),
3507 &[
3508 language::BufferEvent::Edited,
3509 language::BufferEvent::DirtyChanged
3510 ]
3511 );
3512
3513 // When a file is deleted, the buffer is considered dirty.
3514 let events = Arc::new(Mutex::new(Vec::new()));
3515 let buffer2 = project
3516 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3517 .await
3518 .unwrap();
3519 buffer2.update(cx, |_, cx| {
3520 cx.subscribe(&buffer2, {
3521 let events = events.clone();
3522 move |_, _, event, _| events.lock().push(event.clone())
3523 })
3524 .detach();
3525 });
3526
3527 fs.remove_file("/dir/file2".as_ref(), Default::default())
3528 .await
3529 .unwrap();
3530 cx.executor().run_until_parked();
3531 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3532 assert_eq!(
3533 *events.lock(),
3534 &[
3535 language::BufferEvent::DirtyChanged,
3536 language::BufferEvent::FileHandleChanged
3537 ]
3538 );
3539
3540 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3541 let events = Arc::new(Mutex::new(Vec::new()));
3542 let buffer3 = project
3543 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3544 .await
3545 .unwrap();
3546 buffer3.update(cx, |_, cx| {
3547 cx.subscribe(&buffer3, {
3548 let events = events.clone();
3549 move |_, _, event, _| events.lock().push(event.clone())
3550 })
3551 .detach();
3552 });
3553
3554 buffer3.update(cx, |buffer, cx| {
3555 buffer.edit([(0..0, "x")], None, cx);
3556 });
3557 events.lock().clear();
3558 fs.remove_file("/dir/file3".as_ref(), Default::default())
3559 .await
3560 .unwrap();
3561 cx.executor().run_until_parked();
3562 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3563 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3564}
3565
3566#[gpui::test]
3567async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3568 init_test(cx);
3569
3570 let initial_contents = "aaa\nbbbbb\nc\n";
3571 let fs = FakeFs::new(cx.executor());
3572 fs.insert_tree(
3573 "/dir",
3574 json!({
3575 "the-file": initial_contents,
3576 }),
3577 )
3578 .await;
3579 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3580 let buffer = project
3581 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3582 .await
3583 .unwrap();
3584
3585 let anchors = (0..3)
3586 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3587 .collect::<Vec<_>>();
3588
3589 // Change the file on disk, adding two new lines of text, and removing
3590 // one line.
3591 buffer.update(cx, |buffer, _| {
3592 assert!(!buffer.is_dirty());
3593 assert!(!buffer.has_conflict());
3594 });
3595 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3596 fs.save(
3597 "/dir/the-file".as_ref(),
3598 &new_contents.into(),
3599 LineEnding::Unix,
3600 )
3601 .await
3602 .unwrap();
3603
3604 // Because the buffer was not modified, it is reloaded from disk. Its
3605 // contents are edited according to the diff between the old and new
3606 // file contents.
3607 cx.executor().run_until_parked();
3608 buffer.update(cx, |buffer, _| {
3609 assert_eq!(buffer.text(), new_contents);
3610 assert!(!buffer.is_dirty());
3611 assert!(!buffer.has_conflict());
3612
3613 let anchor_positions = anchors
3614 .iter()
3615 .map(|anchor| anchor.to_point(&*buffer))
3616 .collect::<Vec<_>>();
3617 assert_eq!(
3618 anchor_positions,
3619 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3620 );
3621 });
3622
3623 // Modify the buffer
3624 buffer.update(cx, |buffer, cx| {
3625 buffer.edit([(0..0, " ")], None, cx);
3626 assert!(buffer.is_dirty());
3627 assert!(!buffer.has_conflict());
3628 });
3629
3630 // Change the file on disk again, adding blank lines to the beginning.
3631 fs.save(
3632 "/dir/the-file".as_ref(),
3633 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3634 LineEnding::Unix,
3635 )
3636 .await
3637 .unwrap();
3638
3639 // Because the buffer is modified, it doesn't reload from disk, but is
3640 // marked as having a conflict.
3641 cx.executor().run_until_parked();
3642 buffer.update(cx, |buffer, _| {
3643 assert!(buffer.has_conflict());
3644 });
3645}
3646
3647#[gpui::test]
3648async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3649 init_test(cx);
3650
3651 let fs = FakeFs::new(cx.executor());
3652 fs.insert_tree(
3653 "/dir",
3654 json!({
3655 "file1": "a\nb\nc\n",
3656 "file2": "one\r\ntwo\r\nthree\r\n",
3657 }),
3658 )
3659 .await;
3660
3661 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3662 let buffer1 = project
3663 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3664 .await
3665 .unwrap();
3666 let buffer2 = project
3667 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3668 .await
3669 .unwrap();
3670
3671 buffer1.update(cx, |buffer, _| {
3672 assert_eq!(buffer.text(), "a\nb\nc\n");
3673 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3674 });
3675 buffer2.update(cx, |buffer, _| {
3676 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3677 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3678 });
3679
3680 // Change a file's line endings on disk from unix to windows. The buffer's
3681 // state updates correctly.
3682 fs.save(
3683 "/dir/file1".as_ref(),
3684 &"aaa\nb\nc\n".into(),
3685 LineEnding::Windows,
3686 )
3687 .await
3688 .unwrap();
3689 cx.executor().run_until_parked();
3690 buffer1.update(cx, |buffer, _| {
3691 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3692 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3693 });
3694
3695 // Save a file with windows line endings. The file is written correctly.
3696 buffer2.update(cx, |buffer, cx| {
3697 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3698 });
3699 project
3700 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3701 .await
3702 .unwrap();
3703 assert_eq!(
3704 fs.load("/dir/file2".as_ref()).await.unwrap(),
3705 "one\r\ntwo\r\nthree\r\nfour\r\n",
3706 );
3707}
3708
3709#[gpui::test]
3710async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3711 init_test(cx);
3712
3713 let fs = FakeFs::new(cx.executor());
3714 fs.insert_tree(
3715 "/the-dir",
3716 json!({
3717 "a.rs": "
3718 fn foo(mut v: Vec<usize>) {
3719 for x in &v {
3720 v.push(1);
3721 }
3722 }
3723 "
3724 .unindent(),
3725 }),
3726 )
3727 .await;
3728
3729 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3730 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3731 let buffer = project
3732 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3733 .await
3734 .unwrap();
3735
3736 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3737 let message = lsp::PublishDiagnosticsParams {
3738 uri: buffer_uri.clone(),
3739 diagnostics: vec![
3740 lsp::Diagnostic {
3741 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3742 severity: Some(DiagnosticSeverity::WARNING),
3743 message: "error 1".to_string(),
3744 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3745 location: lsp::Location {
3746 uri: buffer_uri.clone(),
3747 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3748 },
3749 message: "error 1 hint 1".to_string(),
3750 }]),
3751 ..Default::default()
3752 },
3753 lsp::Diagnostic {
3754 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3755 severity: Some(DiagnosticSeverity::HINT),
3756 message: "error 1 hint 1".to_string(),
3757 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3758 location: lsp::Location {
3759 uri: buffer_uri.clone(),
3760 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3761 },
3762 message: "original diagnostic".to_string(),
3763 }]),
3764 ..Default::default()
3765 },
3766 lsp::Diagnostic {
3767 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3768 severity: Some(DiagnosticSeverity::ERROR),
3769 message: "error 2".to_string(),
3770 related_information: Some(vec![
3771 lsp::DiagnosticRelatedInformation {
3772 location: lsp::Location {
3773 uri: buffer_uri.clone(),
3774 range: lsp::Range::new(
3775 lsp::Position::new(1, 13),
3776 lsp::Position::new(1, 15),
3777 ),
3778 },
3779 message: "error 2 hint 1".to_string(),
3780 },
3781 lsp::DiagnosticRelatedInformation {
3782 location: lsp::Location {
3783 uri: buffer_uri.clone(),
3784 range: lsp::Range::new(
3785 lsp::Position::new(1, 13),
3786 lsp::Position::new(1, 15),
3787 ),
3788 },
3789 message: "error 2 hint 2".to_string(),
3790 },
3791 ]),
3792 ..Default::default()
3793 },
3794 lsp::Diagnostic {
3795 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3796 severity: Some(DiagnosticSeverity::HINT),
3797 message: "error 2 hint 1".to_string(),
3798 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3799 location: lsp::Location {
3800 uri: buffer_uri.clone(),
3801 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3802 },
3803 message: "original diagnostic".to_string(),
3804 }]),
3805 ..Default::default()
3806 },
3807 lsp::Diagnostic {
3808 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3809 severity: Some(DiagnosticSeverity::HINT),
3810 message: "error 2 hint 2".to_string(),
3811 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3812 location: lsp::Location {
3813 uri: buffer_uri,
3814 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3815 },
3816 message: "original diagnostic".to_string(),
3817 }]),
3818 ..Default::default()
3819 },
3820 ],
3821 version: None,
3822 };
3823
3824 lsp_store
3825 .update(cx, |lsp_store, cx| {
3826 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3827 })
3828 .unwrap();
3829 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3830
3831 assert_eq!(
3832 buffer
3833 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3834 .collect::<Vec<_>>(),
3835 &[
3836 DiagnosticEntry {
3837 range: Point::new(1, 8)..Point::new(1, 9),
3838 diagnostic: Diagnostic {
3839 severity: DiagnosticSeverity::WARNING,
3840 message: "error 1".to_string(),
3841 group_id: 1,
3842 is_primary: true,
3843 ..Default::default()
3844 }
3845 },
3846 DiagnosticEntry {
3847 range: Point::new(1, 8)..Point::new(1, 9),
3848 diagnostic: Diagnostic {
3849 severity: DiagnosticSeverity::HINT,
3850 message: "error 1 hint 1".to_string(),
3851 group_id: 1,
3852 is_primary: false,
3853 ..Default::default()
3854 }
3855 },
3856 DiagnosticEntry {
3857 range: Point::new(1, 13)..Point::new(1, 15),
3858 diagnostic: Diagnostic {
3859 severity: DiagnosticSeverity::HINT,
3860 message: "error 2 hint 1".to_string(),
3861 group_id: 0,
3862 is_primary: false,
3863 ..Default::default()
3864 }
3865 },
3866 DiagnosticEntry {
3867 range: Point::new(1, 13)..Point::new(1, 15),
3868 diagnostic: Diagnostic {
3869 severity: DiagnosticSeverity::HINT,
3870 message: "error 2 hint 2".to_string(),
3871 group_id: 0,
3872 is_primary: false,
3873 ..Default::default()
3874 }
3875 },
3876 DiagnosticEntry {
3877 range: Point::new(2, 8)..Point::new(2, 17),
3878 diagnostic: Diagnostic {
3879 severity: DiagnosticSeverity::ERROR,
3880 message: "error 2".to_string(),
3881 group_id: 0,
3882 is_primary: true,
3883 ..Default::default()
3884 }
3885 }
3886 ]
3887 );
3888
3889 assert_eq!(
3890 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3891 &[
3892 DiagnosticEntry {
3893 range: Point::new(1, 13)..Point::new(1, 15),
3894 diagnostic: Diagnostic {
3895 severity: DiagnosticSeverity::HINT,
3896 message: "error 2 hint 1".to_string(),
3897 group_id: 0,
3898 is_primary: false,
3899 ..Default::default()
3900 }
3901 },
3902 DiagnosticEntry {
3903 range: Point::new(1, 13)..Point::new(1, 15),
3904 diagnostic: Diagnostic {
3905 severity: DiagnosticSeverity::HINT,
3906 message: "error 2 hint 2".to_string(),
3907 group_id: 0,
3908 is_primary: false,
3909 ..Default::default()
3910 }
3911 },
3912 DiagnosticEntry {
3913 range: Point::new(2, 8)..Point::new(2, 17),
3914 diagnostic: Diagnostic {
3915 severity: DiagnosticSeverity::ERROR,
3916 message: "error 2".to_string(),
3917 group_id: 0,
3918 is_primary: true,
3919 ..Default::default()
3920 }
3921 }
3922 ]
3923 );
3924
3925 assert_eq!(
3926 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3927 &[
3928 DiagnosticEntry {
3929 range: Point::new(1, 8)..Point::new(1, 9),
3930 diagnostic: Diagnostic {
3931 severity: DiagnosticSeverity::WARNING,
3932 message: "error 1".to_string(),
3933 group_id: 1,
3934 is_primary: true,
3935 ..Default::default()
3936 }
3937 },
3938 DiagnosticEntry {
3939 range: Point::new(1, 8)..Point::new(1, 9),
3940 diagnostic: Diagnostic {
3941 severity: DiagnosticSeverity::HINT,
3942 message: "error 1 hint 1".to_string(),
3943 group_id: 1,
3944 is_primary: false,
3945 ..Default::default()
3946 }
3947 },
3948 ]
3949 );
3950}
3951
3952#[gpui::test]
3953async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3954 init_test(cx);
3955
3956 let fs = FakeFs::new(cx.executor());
3957 fs.insert_tree(
3958 "/dir",
3959 json!({
3960 "one.rs": "const ONE: usize = 1;",
3961 "two": {
3962 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3963 }
3964
3965 }),
3966 )
3967 .await;
3968 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3969
3970 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3971 language_registry.add(rust_lang());
3972 let watched_paths = lsp::FileOperationRegistrationOptions {
3973 filters: vec![
3974 FileOperationFilter {
3975 scheme: Some("file".to_owned()),
3976 pattern: lsp::FileOperationPattern {
3977 glob: "**/*.rs".to_owned(),
3978 matches: Some(lsp::FileOperationPatternKind::File),
3979 options: None,
3980 },
3981 },
3982 FileOperationFilter {
3983 scheme: Some("file".to_owned()),
3984 pattern: lsp::FileOperationPattern {
3985 glob: "**/**".to_owned(),
3986 matches: Some(lsp::FileOperationPatternKind::Folder),
3987 options: None,
3988 },
3989 },
3990 ],
3991 };
3992 let mut fake_servers = language_registry.register_fake_lsp(
3993 "Rust",
3994 FakeLspAdapter {
3995 capabilities: lsp::ServerCapabilities {
3996 workspace: Some(lsp::WorkspaceServerCapabilities {
3997 workspace_folders: None,
3998 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
3999 did_rename: Some(watched_paths.clone()),
4000 will_rename: Some(watched_paths),
4001 ..Default::default()
4002 }),
4003 }),
4004 ..Default::default()
4005 },
4006 ..Default::default()
4007 },
4008 );
4009
4010 let _ = project
4011 .update(cx, |project, cx| {
4012 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4013 })
4014 .await
4015 .unwrap();
4016
4017 let fake_server = fake_servers.next().await.unwrap();
4018 let response = project.update(cx, |project, cx| {
4019 let worktree = project.worktrees(cx).next().unwrap();
4020 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4021 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4022 });
4023 let expected_edit = lsp::WorkspaceEdit {
4024 changes: None,
4025 document_changes: Some(DocumentChanges::Edits({
4026 vec![TextDocumentEdit {
4027 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4028 range: lsp::Range {
4029 start: lsp::Position {
4030 line: 0,
4031 character: 1,
4032 },
4033 end: lsp::Position {
4034 line: 0,
4035 character: 3,
4036 },
4037 },
4038 new_text: "This is not a drill".to_owned(),
4039 })],
4040 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4041 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4042 version: Some(1337),
4043 },
4044 }]
4045 })),
4046 change_annotations: None,
4047 };
4048 let resolved_workspace_edit = Arc::new(OnceLock::new());
4049 fake_server
4050 .handle_request::<WillRenameFiles, _, _>({
4051 let resolved_workspace_edit = resolved_workspace_edit.clone();
4052 let expected_edit = expected_edit.clone();
4053 move |params, _| {
4054 let resolved_workspace_edit = resolved_workspace_edit.clone();
4055 let expected_edit = expected_edit.clone();
4056 async move {
4057 assert_eq!(params.files.len(), 1);
4058 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4059 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4060 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4061 Ok(Some(expected_edit))
4062 }
4063 }
4064 })
4065 .next()
4066 .await
4067 .unwrap();
4068 let _ = response.await.unwrap();
4069 fake_server
4070 .handle_notification::<DidRenameFiles, _>(|params, _| {
4071 assert_eq!(params.files.len(), 1);
4072 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4073 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4074 })
4075 .next()
4076 .await
4077 .unwrap();
4078 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4079}
4080
4081#[gpui::test]
4082async fn test_rename(cx: &mut gpui::TestAppContext) {
4083 // hi
4084 init_test(cx);
4085
4086 let fs = FakeFs::new(cx.executor());
4087 fs.insert_tree(
4088 "/dir",
4089 json!({
4090 "one.rs": "const ONE: usize = 1;",
4091 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4092 }),
4093 )
4094 .await;
4095
4096 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4097
4098 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4099 language_registry.add(rust_lang());
4100 let mut fake_servers = language_registry.register_fake_lsp(
4101 "Rust",
4102 FakeLspAdapter {
4103 capabilities: lsp::ServerCapabilities {
4104 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4105 prepare_provider: Some(true),
4106 work_done_progress_options: Default::default(),
4107 })),
4108 ..Default::default()
4109 },
4110 ..Default::default()
4111 },
4112 );
4113
4114 let (buffer, _handle) = project
4115 .update(cx, |project, cx| {
4116 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4117 })
4118 .await
4119 .unwrap();
4120
4121 let fake_server = fake_servers.next().await.unwrap();
4122
4123 let response = project.update(cx, |project, cx| {
4124 project.prepare_rename(buffer.clone(), 7, cx)
4125 });
4126 fake_server
4127 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4128 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4129 assert_eq!(params.position, lsp::Position::new(0, 7));
4130 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4131 lsp::Position::new(0, 6),
4132 lsp::Position::new(0, 9),
4133 ))))
4134 })
4135 .next()
4136 .await
4137 .unwrap();
4138 let range = response.await.unwrap().unwrap();
4139 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4140 assert_eq!(range, 6..9);
4141
4142 let response = project.update(cx, |project, cx| {
4143 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4144 });
4145 fake_server
4146 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4147 assert_eq!(
4148 params.text_document_position.text_document.uri.as_str(),
4149 "file:///dir/one.rs"
4150 );
4151 assert_eq!(
4152 params.text_document_position.position,
4153 lsp::Position::new(0, 7)
4154 );
4155 assert_eq!(params.new_name, "THREE");
4156 Ok(Some(lsp::WorkspaceEdit {
4157 changes: Some(
4158 [
4159 (
4160 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4161 vec![lsp::TextEdit::new(
4162 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4163 "THREE".to_string(),
4164 )],
4165 ),
4166 (
4167 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4168 vec![
4169 lsp::TextEdit::new(
4170 lsp::Range::new(
4171 lsp::Position::new(0, 24),
4172 lsp::Position::new(0, 27),
4173 ),
4174 "THREE".to_string(),
4175 ),
4176 lsp::TextEdit::new(
4177 lsp::Range::new(
4178 lsp::Position::new(0, 35),
4179 lsp::Position::new(0, 38),
4180 ),
4181 "THREE".to_string(),
4182 ),
4183 ],
4184 ),
4185 ]
4186 .into_iter()
4187 .collect(),
4188 ),
4189 ..Default::default()
4190 }))
4191 })
4192 .next()
4193 .await
4194 .unwrap();
4195 let mut transaction = response.await.unwrap().0;
4196 assert_eq!(transaction.len(), 2);
4197 assert_eq!(
4198 transaction
4199 .remove_entry(&buffer)
4200 .unwrap()
4201 .0
4202 .update(cx, |buffer, _| buffer.text()),
4203 "const THREE: usize = 1;"
4204 );
4205 assert_eq!(
4206 transaction
4207 .into_keys()
4208 .next()
4209 .unwrap()
4210 .update(cx, |buffer, _| buffer.text()),
4211 "const TWO: usize = one::THREE + one::THREE;"
4212 );
4213}
4214
4215#[gpui::test]
4216async fn test_search(cx: &mut gpui::TestAppContext) {
4217 init_test(cx);
4218
4219 let fs = FakeFs::new(cx.executor());
4220 fs.insert_tree(
4221 "/dir",
4222 json!({
4223 "one.rs": "const ONE: usize = 1;",
4224 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4225 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4226 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4227 }),
4228 )
4229 .await;
4230 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4231 assert_eq!(
4232 search(
4233 &project,
4234 SearchQuery::text(
4235 "TWO",
4236 false,
4237 true,
4238 false,
4239 Default::default(),
4240 Default::default(),
4241 None
4242 )
4243 .unwrap(),
4244 cx
4245 )
4246 .await
4247 .unwrap(),
4248 HashMap::from_iter([
4249 ("dir/two.rs".to_string(), vec![6..9]),
4250 ("dir/three.rs".to_string(), vec![37..40])
4251 ])
4252 );
4253
4254 let buffer_4 = project
4255 .update(cx, |project, cx| {
4256 project.open_local_buffer("/dir/four.rs", cx)
4257 })
4258 .await
4259 .unwrap();
4260 buffer_4.update(cx, |buffer, cx| {
4261 let text = "two::TWO";
4262 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4263 });
4264
4265 assert_eq!(
4266 search(
4267 &project,
4268 SearchQuery::text(
4269 "TWO",
4270 false,
4271 true,
4272 false,
4273 Default::default(),
4274 Default::default(),
4275 None,
4276 )
4277 .unwrap(),
4278 cx
4279 )
4280 .await
4281 .unwrap(),
4282 HashMap::from_iter([
4283 ("dir/two.rs".to_string(), vec![6..9]),
4284 ("dir/three.rs".to_string(), vec![37..40]),
4285 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4286 ])
4287 );
4288}
4289
4290#[gpui::test]
4291async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4292 init_test(cx);
4293
4294 let search_query = "file";
4295
4296 let fs = FakeFs::new(cx.executor());
4297 fs.insert_tree(
4298 "/dir",
4299 json!({
4300 "one.rs": r#"// Rust file one"#,
4301 "one.ts": r#"// TypeScript file one"#,
4302 "two.rs": r#"// Rust file two"#,
4303 "two.ts": r#"// TypeScript file two"#,
4304 }),
4305 )
4306 .await;
4307 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4308
4309 assert!(
4310 search(
4311 &project,
4312 SearchQuery::text(
4313 search_query,
4314 false,
4315 true,
4316 false,
4317 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4318 Default::default(),
4319 None
4320 )
4321 .unwrap(),
4322 cx
4323 )
4324 .await
4325 .unwrap()
4326 .is_empty(),
4327 "If no inclusions match, no files should be returned"
4328 );
4329
4330 assert_eq!(
4331 search(
4332 &project,
4333 SearchQuery::text(
4334 search_query,
4335 false,
4336 true,
4337 false,
4338 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4339 Default::default(),
4340 None
4341 )
4342 .unwrap(),
4343 cx
4344 )
4345 .await
4346 .unwrap(),
4347 HashMap::from_iter([
4348 ("dir/one.rs".to_string(), vec![8..12]),
4349 ("dir/two.rs".to_string(), vec![8..12]),
4350 ]),
4351 "Rust only search should give only Rust files"
4352 );
4353
4354 assert_eq!(
4355 search(
4356 &project,
4357 SearchQuery::text(
4358 search_query,
4359 false,
4360 true,
4361 false,
4362
4363 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4364
4365 Default::default(),
4366 None,
4367 ).unwrap(),
4368 cx
4369 )
4370 .await
4371 .unwrap(),
4372 HashMap::from_iter([
4373 ("dir/one.ts".to_string(), vec![14..18]),
4374 ("dir/two.ts".to_string(), vec![14..18]),
4375 ]),
4376 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4377 );
4378
4379 assert_eq!(
4380 search(
4381 &project,
4382 SearchQuery::text(
4383 search_query,
4384 false,
4385 true,
4386 false,
4387
4388 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4389
4390 Default::default(),
4391 None,
4392 ).unwrap(),
4393 cx
4394 )
4395 .await
4396 .unwrap(),
4397 HashMap::from_iter([
4398 ("dir/two.ts".to_string(), vec![14..18]),
4399 ("dir/one.rs".to_string(), vec![8..12]),
4400 ("dir/one.ts".to_string(), vec![14..18]),
4401 ("dir/two.rs".to_string(), vec![8..12]),
4402 ]),
4403 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4404 );
4405}
4406
4407#[gpui::test]
4408async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4409 init_test(cx);
4410
4411 let search_query = "file";
4412
4413 let fs = FakeFs::new(cx.executor());
4414 fs.insert_tree(
4415 "/dir",
4416 json!({
4417 "one.rs": r#"// Rust file one"#,
4418 "one.ts": r#"// TypeScript file one"#,
4419 "two.rs": r#"// Rust file two"#,
4420 "two.ts": r#"// TypeScript file two"#,
4421 }),
4422 )
4423 .await;
4424 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4425
4426 assert_eq!(
4427 search(
4428 &project,
4429 SearchQuery::text(
4430 search_query,
4431 false,
4432 true,
4433 false,
4434 Default::default(),
4435 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4436 None,
4437 )
4438 .unwrap(),
4439 cx
4440 )
4441 .await
4442 .unwrap(),
4443 HashMap::from_iter([
4444 ("dir/one.rs".to_string(), vec![8..12]),
4445 ("dir/one.ts".to_string(), vec![14..18]),
4446 ("dir/two.rs".to_string(), vec![8..12]),
4447 ("dir/two.ts".to_string(), vec![14..18]),
4448 ]),
4449 "If no exclusions match, all files should be returned"
4450 );
4451
4452 assert_eq!(
4453 search(
4454 &project,
4455 SearchQuery::text(
4456 search_query,
4457 false,
4458 true,
4459 false,
4460 Default::default(),
4461 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4462 None,
4463 )
4464 .unwrap(),
4465 cx
4466 )
4467 .await
4468 .unwrap(),
4469 HashMap::from_iter([
4470 ("dir/one.ts".to_string(), vec![14..18]),
4471 ("dir/two.ts".to_string(), vec![14..18]),
4472 ]),
4473 "Rust exclusion search should give only TypeScript files"
4474 );
4475
4476 assert_eq!(
4477 search(
4478 &project,
4479 SearchQuery::text(
4480 search_query,
4481 false,
4482 true,
4483 false,
4484 Default::default(),
4485 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4486 None,
4487 ).unwrap(),
4488 cx
4489 )
4490 .await
4491 .unwrap(),
4492 HashMap::from_iter([
4493 ("dir/one.rs".to_string(), vec![8..12]),
4494 ("dir/two.rs".to_string(), vec![8..12]),
4495 ]),
4496 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4497 );
4498
4499 assert!(
4500 search(
4501 &project,
4502 SearchQuery::text(
4503 search_query,
4504 false,
4505 true,
4506 false,
4507 Default::default(),
4508
4509 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4510 None,
4511
4512 ).unwrap(),
4513 cx
4514 )
4515 .await
4516 .unwrap().is_empty(),
4517 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4518 );
4519}
4520
4521#[gpui::test]
4522async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4523 init_test(cx);
4524
4525 let search_query = "file";
4526
4527 let fs = FakeFs::new(cx.executor());
4528 fs.insert_tree(
4529 "/dir",
4530 json!({
4531 "one.rs": r#"// Rust file one"#,
4532 "one.ts": r#"// TypeScript file one"#,
4533 "two.rs": r#"// Rust file two"#,
4534 "two.ts": r#"// TypeScript file two"#,
4535 }),
4536 )
4537 .await;
4538 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4539
4540 assert!(
4541 search(
4542 &project,
4543 SearchQuery::text(
4544 search_query,
4545 false,
4546 true,
4547 false,
4548 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4549 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4550 None,
4551 )
4552 .unwrap(),
4553 cx
4554 )
4555 .await
4556 .unwrap()
4557 .is_empty(),
4558 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4559 );
4560
4561 assert!(
4562 search(
4563 &project,
4564 SearchQuery::text(
4565 search_query,
4566 false,
4567 true,
4568 false,
4569 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4570 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4571 None,
4572 ).unwrap(),
4573 cx
4574 )
4575 .await
4576 .unwrap()
4577 .is_empty(),
4578 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4579 );
4580
4581 assert!(
4582 search(
4583 &project,
4584 SearchQuery::text(
4585 search_query,
4586 false,
4587 true,
4588 false,
4589 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4590 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4591 None,
4592 )
4593 .unwrap(),
4594 cx
4595 )
4596 .await
4597 .unwrap()
4598 .is_empty(),
4599 "Non-matching inclusions and exclusions should not change that."
4600 );
4601
4602 assert_eq!(
4603 search(
4604 &project,
4605 SearchQuery::text(
4606 search_query,
4607 false,
4608 true,
4609 false,
4610 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4611 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4612 None,
4613 )
4614 .unwrap(),
4615 cx
4616 )
4617 .await
4618 .unwrap(),
4619 HashMap::from_iter([
4620 ("dir/one.ts".to_string(), vec![14..18]),
4621 ("dir/two.ts".to_string(), vec![14..18]),
4622 ]),
4623 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4624 );
4625}
4626
4627#[gpui::test]
4628async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4629 init_test(cx);
4630
4631 let fs = FakeFs::new(cx.executor());
4632 fs.insert_tree(
4633 "/worktree-a",
4634 json!({
4635 "haystack.rs": r#"// NEEDLE"#,
4636 "haystack.ts": r#"// NEEDLE"#,
4637 }),
4638 )
4639 .await;
4640 fs.insert_tree(
4641 "/worktree-b",
4642 json!({
4643 "haystack.rs": r#"// NEEDLE"#,
4644 "haystack.ts": r#"// NEEDLE"#,
4645 }),
4646 )
4647 .await;
4648
4649 let project = Project::test(
4650 fs.clone(),
4651 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4652 cx,
4653 )
4654 .await;
4655
4656 assert_eq!(
4657 search(
4658 &project,
4659 SearchQuery::text(
4660 "NEEDLE",
4661 false,
4662 true,
4663 false,
4664 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4665 Default::default(),
4666 None,
4667 )
4668 .unwrap(),
4669 cx
4670 )
4671 .await
4672 .unwrap(),
4673 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4674 "should only return results from included worktree"
4675 );
4676 assert_eq!(
4677 search(
4678 &project,
4679 SearchQuery::text(
4680 "NEEDLE",
4681 false,
4682 true,
4683 false,
4684 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4685 Default::default(),
4686 None,
4687 )
4688 .unwrap(),
4689 cx
4690 )
4691 .await
4692 .unwrap(),
4693 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4694 "should only return results from included worktree"
4695 );
4696
4697 assert_eq!(
4698 search(
4699 &project,
4700 SearchQuery::text(
4701 "NEEDLE",
4702 false,
4703 true,
4704 false,
4705 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4706 Default::default(),
4707 None,
4708 )
4709 .unwrap(),
4710 cx
4711 )
4712 .await
4713 .unwrap(),
4714 HashMap::from_iter([
4715 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4716 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4717 ]),
4718 "should return results from both worktrees"
4719 );
4720}
4721
4722#[gpui::test]
4723async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4724 init_test(cx);
4725
4726 let fs = FakeFs::new(cx.background_executor.clone());
4727 fs.insert_tree(
4728 "/dir",
4729 json!({
4730 ".git": {},
4731 ".gitignore": "**/target\n/node_modules\n",
4732 "target": {
4733 "index.txt": "index_key:index_value"
4734 },
4735 "node_modules": {
4736 "eslint": {
4737 "index.ts": "const eslint_key = 'eslint value'",
4738 "package.json": r#"{ "some_key": "some value" }"#,
4739 },
4740 "prettier": {
4741 "index.ts": "const prettier_key = 'prettier value'",
4742 "package.json": r#"{ "other_key": "other value" }"#,
4743 },
4744 },
4745 "package.json": r#"{ "main_key": "main value" }"#,
4746 }),
4747 )
4748 .await;
4749 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4750
4751 let query = "key";
4752 assert_eq!(
4753 search(
4754 &project,
4755 SearchQuery::text(
4756 query,
4757 false,
4758 false,
4759 false,
4760 Default::default(),
4761 Default::default(),
4762 None,
4763 )
4764 .unwrap(),
4765 cx
4766 )
4767 .await
4768 .unwrap(),
4769 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4770 "Only one non-ignored file should have the query"
4771 );
4772
4773 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4774 assert_eq!(
4775 search(
4776 &project,
4777 SearchQuery::text(
4778 query,
4779 false,
4780 false,
4781 true,
4782 Default::default(),
4783 Default::default(),
4784 None,
4785 )
4786 .unwrap(),
4787 cx
4788 )
4789 .await
4790 .unwrap(),
4791 HashMap::from_iter([
4792 ("dir/package.json".to_string(), vec![8..11]),
4793 ("dir/target/index.txt".to_string(), vec![6..9]),
4794 (
4795 "dir/node_modules/prettier/package.json".to_string(),
4796 vec![9..12]
4797 ),
4798 (
4799 "dir/node_modules/prettier/index.ts".to_string(),
4800 vec![15..18]
4801 ),
4802 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4803 (
4804 "dir/node_modules/eslint/package.json".to_string(),
4805 vec![8..11]
4806 ),
4807 ]),
4808 "Unrestricted search with ignored directories should find every file with the query"
4809 );
4810
4811 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4812 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4813 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4814 assert_eq!(
4815 search(
4816 &project,
4817 SearchQuery::text(
4818 query,
4819 false,
4820 false,
4821 true,
4822 files_to_include,
4823 files_to_exclude,
4824 None,
4825 )
4826 .unwrap(),
4827 cx
4828 )
4829 .await
4830 .unwrap(),
4831 HashMap::from_iter([(
4832 "dir/node_modules/prettier/package.json".to_string(),
4833 vec![9..12]
4834 )]),
4835 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4836 );
4837}
4838
4839#[gpui::test]
4840async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4841 init_test(cx);
4842
4843 let fs = FakeFs::new(cx.executor().clone());
4844 fs.insert_tree(
4845 "/one/two",
4846 json!({
4847 "three": {
4848 "a.txt": "",
4849 "four": {}
4850 },
4851 "c.rs": ""
4852 }),
4853 )
4854 .await;
4855
4856 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4857 project
4858 .update(cx, |project, cx| {
4859 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4860 project.create_entry((id, "b.."), true, cx)
4861 })
4862 .await
4863 .unwrap()
4864 .to_included()
4865 .unwrap();
4866
4867 // Can't create paths outside the project
4868 let result = project
4869 .update(cx, |project, cx| {
4870 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4871 project.create_entry((id, "../../boop"), true, cx)
4872 })
4873 .await;
4874 assert!(result.is_err());
4875
4876 // Can't create paths with '..'
4877 let result = project
4878 .update(cx, |project, cx| {
4879 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4880 project.create_entry((id, "four/../beep"), true, cx)
4881 })
4882 .await;
4883 assert!(result.is_err());
4884
4885 assert_eq!(
4886 fs.paths(true),
4887 vec![
4888 PathBuf::from("/"),
4889 PathBuf::from("/one"),
4890 PathBuf::from("/one/two"),
4891 PathBuf::from("/one/two/c.rs"),
4892 PathBuf::from("/one/two/three"),
4893 PathBuf::from("/one/two/three/a.txt"),
4894 PathBuf::from("/one/two/three/b.."),
4895 PathBuf::from("/one/two/three/four"),
4896 ]
4897 );
4898
4899 // And we cannot open buffers with '..'
4900 let result = project
4901 .update(cx, |project, cx| {
4902 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4903 project.open_buffer((id, "../c.rs"), cx)
4904 })
4905 .await;
4906 assert!(result.is_err())
4907}
4908
4909#[gpui::test]
4910async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4911 init_test(cx);
4912
4913 let fs = FakeFs::new(cx.executor());
4914 fs.insert_tree(
4915 "/dir",
4916 json!({
4917 "a.tsx": "a",
4918 }),
4919 )
4920 .await;
4921
4922 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4923
4924 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4925 language_registry.add(tsx_lang());
4926 let language_server_names = [
4927 "TypeScriptServer",
4928 "TailwindServer",
4929 "ESLintServer",
4930 "NoHoverCapabilitiesServer",
4931 ];
4932 let mut language_servers = [
4933 language_registry.register_fake_lsp(
4934 "tsx",
4935 FakeLspAdapter {
4936 name: language_server_names[0],
4937 capabilities: lsp::ServerCapabilities {
4938 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4939 ..lsp::ServerCapabilities::default()
4940 },
4941 ..FakeLspAdapter::default()
4942 },
4943 ),
4944 language_registry.register_fake_lsp(
4945 "tsx",
4946 FakeLspAdapter {
4947 name: language_server_names[1],
4948 capabilities: lsp::ServerCapabilities {
4949 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4950 ..lsp::ServerCapabilities::default()
4951 },
4952 ..FakeLspAdapter::default()
4953 },
4954 ),
4955 language_registry.register_fake_lsp(
4956 "tsx",
4957 FakeLspAdapter {
4958 name: language_server_names[2],
4959 capabilities: lsp::ServerCapabilities {
4960 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4961 ..lsp::ServerCapabilities::default()
4962 },
4963 ..FakeLspAdapter::default()
4964 },
4965 ),
4966 language_registry.register_fake_lsp(
4967 "tsx",
4968 FakeLspAdapter {
4969 name: language_server_names[3],
4970 capabilities: lsp::ServerCapabilities {
4971 hover_provider: None,
4972 ..lsp::ServerCapabilities::default()
4973 },
4974 ..FakeLspAdapter::default()
4975 },
4976 ),
4977 ];
4978
4979 let (buffer, _handle) = project
4980 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
4981 .await
4982 .unwrap();
4983 cx.executor().run_until_parked();
4984
4985 let mut servers_with_hover_requests = HashMap::default();
4986 for i in 0..language_server_names.len() {
4987 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4988 panic!(
4989 "Failed to get language server #{i} with name {}",
4990 &language_server_names[i]
4991 )
4992 });
4993 let new_server_name = new_server.server.name();
4994 assert!(
4995 !servers_with_hover_requests.contains_key(&new_server_name),
4996 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
4997 );
4998 match new_server_name.as_ref() {
4999 "TailwindServer" | "TypeScriptServer" => {
5000 servers_with_hover_requests.insert(
5001 new_server_name.clone(),
5002 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5003 let name = new_server_name.clone();
5004 async move {
5005 Ok(Some(lsp::Hover {
5006 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5007 format!("{name} hover"),
5008 )),
5009 range: None,
5010 }))
5011 }
5012 }),
5013 );
5014 }
5015 "ESLintServer" => {
5016 servers_with_hover_requests.insert(
5017 new_server_name,
5018 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5019 |_, _| async move { Ok(None) },
5020 ),
5021 );
5022 }
5023 "NoHoverCapabilitiesServer" => {
5024 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5025 |_, _| async move {
5026 panic!(
5027 "Should not call for hovers server with no corresponding capabilities"
5028 )
5029 },
5030 );
5031 }
5032 unexpected => panic!("Unexpected server name: {unexpected}"),
5033 }
5034 }
5035
5036 let hover_task = project.update(cx, |project, cx| {
5037 project.hover(&buffer, Point::new(0, 0), cx)
5038 });
5039 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5040 |mut hover_request| async move {
5041 hover_request
5042 .next()
5043 .await
5044 .expect("All hover requests should have been triggered")
5045 },
5046 ))
5047 .await;
5048 assert_eq!(
5049 vec!["TailwindServer hover", "TypeScriptServer hover"],
5050 hover_task
5051 .await
5052 .into_iter()
5053 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5054 .sorted()
5055 .collect::<Vec<_>>(),
5056 "Should receive hover responses from all related servers with hover capabilities"
5057 );
5058}
5059
5060#[gpui::test]
5061async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5062 init_test(cx);
5063
5064 let fs = FakeFs::new(cx.executor());
5065 fs.insert_tree(
5066 "/dir",
5067 json!({
5068 "a.ts": "a",
5069 }),
5070 )
5071 .await;
5072
5073 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5074
5075 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5076 language_registry.add(typescript_lang());
5077 let mut fake_language_servers = language_registry.register_fake_lsp(
5078 "TypeScript",
5079 FakeLspAdapter {
5080 capabilities: lsp::ServerCapabilities {
5081 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5082 ..lsp::ServerCapabilities::default()
5083 },
5084 ..FakeLspAdapter::default()
5085 },
5086 );
5087
5088 let (buffer, _handle) = project
5089 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5090 .await
5091 .unwrap();
5092 cx.executor().run_until_parked();
5093
5094 let fake_server = fake_language_servers
5095 .next()
5096 .await
5097 .expect("failed to get the language server");
5098
5099 let mut request_handled =
5100 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5101 Ok(Some(lsp::Hover {
5102 contents: lsp::HoverContents::Array(vec![
5103 lsp::MarkedString::String("".to_string()),
5104 lsp::MarkedString::String(" ".to_string()),
5105 lsp::MarkedString::String("\n\n\n".to_string()),
5106 ]),
5107 range: None,
5108 }))
5109 });
5110
5111 let hover_task = project.update(cx, |project, cx| {
5112 project.hover(&buffer, Point::new(0, 0), cx)
5113 });
5114 let () = request_handled
5115 .next()
5116 .await
5117 .expect("All hover requests should have been triggered");
5118 assert_eq!(
5119 Vec::<String>::new(),
5120 hover_task
5121 .await
5122 .into_iter()
5123 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5124 .sorted()
5125 .collect::<Vec<_>>(),
5126 "Empty hover parts should be ignored"
5127 );
5128}
5129
5130#[gpui::test]
5131async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5132 init_test(cx);
5133
5134 let fs = FakeFs::new(cx.executor());
5135 fs.insert_tree(
5136 "/dir",
5137 json!({
5138 "a.ts": "a",
5139 }),
5140 )
5141 .await;
5142
5143 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5144
5145 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5146 language_registry.add(typescript_lang());
5147 let mut fake_language_servers = language_registry.register_fake_lsp(
5148 "TypeScript",
5149 FakeLspAdapter {
5150 capabilities: lsp::ServerCapabilities {
5151 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5152 ..lsp::ServerCapabilities::default()
5153 },
5154 ..FakeLspAdapter::default()
5155 },
5156 );
5157
5158 let (buffer, _handle) = project
5159 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5160 .await
5161 .unwrap();
5162 cx.executor().run_until_parked();
5163
5164 let fake_server = fake_language_servers
5165 .next()
5166 .await
5167 .expect("failed to get the language server");
5168
5169 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5170 move |_, _| async move {
5171 Ok(Some(vec![
5172 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5173 title: "organize imports".to_string(),
5174 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5175 ..lsp::CodeAction::default()
5176 }),
5177 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5178 title: "fix code".to_string(),
5179 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5180 ..lsp::CodeAction::default()
5181 }),
5182 ]))
5183 },
5184 );
5185
5186 let code_actions_task = project.update(cx, |project, cx| {
5187 project.code_actions(
5188 &buffer,
5189 0..buffer.read(cx).len(),
5190 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5191 cx,
5192 )
5193 });
5194
5195 let () = request_handled
5196 .next()
5197 .await
5198 .expect("The code action request should have been triggered");
5199
5200 let code_actions = code_actions_task.await.unwrap();
5201 assert_eq!(code_actions.len(), 1);
5202 assert_eq!(
5203 code_actions[0].lsp_action.kind,
5204 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5205 );
5206}
5207
5208#[gpui::test]
5209async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5210 init_test(cx);
5211
5212 let fs = FakeFs::new(cx.executor());
5213 fs.insert_tree(
5214 "/dir",
5215 json!({
5216 "a.tsx": "a",
5217 }),
5218 )
5219 .await;
5220
5221 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5222
5223 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5224 language_registry.add(tsx_lang());
5225 let language_server_names = [
5226 "TypeScriptServer",
5227 "TailwindServer",
5228 "ESLintServer",
5229 "NoActionsCapabilitiesServer",
5230 ];
5231
5232 let mut language_server_rxs = [
5233 language_registry.register_fake_lsp(
5234 "tsx",
5235 FakeLspAdapter {
5236 name: language_server_names[0],
5237 capabilities: lsp::ServerCapabilities {
5238 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5239 ..lsp::ServerCapabilities::default()
5240 },
5241 ..FakeLspAdapter::default()
5242 },
5243 ),
5244 language_registry.register_fake_lsp(
5245 "tsx",
5246 FakeLspAdapter {
5247 name: language_server_names[1],
5248 capabilities: lsp::ServerCapabilities {
5249 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5250 ..lsp::ServerCapabilities::default()
5251 },
5252 ..FakeLspAdapter::default()
5253 },
5254 ),
5255 language_registry.register_fake_lsp(
5256 "tsx",
5257 FakeLspAdapter {
5258 name: language_server_names[2],
5259 capabilities: lsp::ServerCapabilities {
5260 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5261 ..lsp::ServerCapabilities::default()
5262 },
5263 ..FakeLspAdapter::default()
5264 },
5265 ),
5266 language_registry.register_fake_lsp(
5267 "tsx",
5268 FakeLspAdapter {
5269 name: language_server_names[3],
5270 capabilities: lsp::ServerCapabilities {
5271 code_action_provider: None,
5272 ..lsp::ServerCapabilities::default()
5273 },
5274 ..FakeLspAdapter::default()
5275 },
5276 ),
5277 ];
5278
5279 let (buffer, _handle) = project
5280 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5281 .await
5282 .unwrap();
5283 cx.executor().run_until_parked();
5284
5285 let mut servers_with_actions_requests = HashMap::default();
5286 for i in 0..language_server_names.len() {
5287 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5288 panic!(
5289 "Failed to get language server #{i} with name {}",
5290 &language_server_names[i]
5291 )
5292 });
5293 let new_server_name = new_server.server.name();
5294
5295 assert!(
5296 !servers_with_actions_requests.contains_key(&new_server_name),
5297 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5298 );
5299 match new_server_name.0.as_ref() {
5300 "TailwindServer" | "TypeScriptServer" => {
5301 servers_with_actions_requests.insert(
5302 new_server_name.clone(),
5303 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5304 move |_, _| {
5305 let name = new_server_name.clone();
5306 async move {
5307 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5308 lsp::CodeAction {
5309 title: format!("{name} code action"),
5310 ..lsp::CodeAction::default()
5311 },
5312 )]))
5313 }
5314 },
5315 ),
5316 );
5317 }
5318 "ESLintServer" => {
5319 servers_with_actions_requests.insert(
5320 new_server_name,
5321 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5322 |_, _| async move { Ok(None) },
5323 ),
5324 );
5325 }
5326 "NoActionsCapabilitiesServer" => {
5327 let _never_handled = new_server
5328 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5329 panic!(
5330 "Should not call for code actions server with no corresponding capabilities"
5331 )
5332 });
5333 }
5334 unexpected => panic!("Unexpected server name: {unexpected}"),
5335 }
5336 }
5337
5338 let code_actions_task = project.update(cx, |project, cx| {
5339 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5340 });
5341
5342 // cx.run_until_parked();
5343 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5344 |mut code_actions_request| async move {
5345 code_actions_request
5346 .next()
5347 .await
5348 .expect("All code actions requests should have been triggered")
5349 },
5350 ))
5351 .await;
5352 assert_eq!(
5353 vec!["TailwindServer code action", "TypeScriptServer code action"],
5354 code_actions_task
5355 .await
5356 .unwrap()
5357 .into_iter()
5358 .map(|code_action| code_action.lsp_action.title)
5359 .sorted()
5360 .collect::<Vec<_>>(),
5361 "Should receive code actions responses from all related servers with hover capabilities"
5362 );
5363}
5364
5365#[gpui::test]
5366async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5367 init_test(cx);
5368
5369 let fs = FakeFs::new(cx.executor());
5370 fs.insert_tree(
5371 "/dir",
5372 json!({
5373 "a.rs": "let a = 1;",
5374 "b.rs": "let b = 2;",
5375 "c.rs": "let c = 2;",
5376 }),
5377 )
5378 .await;
5379
5380 let project = Project::test(
5381 fs,
5382 [
5383 "/dir/a.rs".as_ref(),
5384 "/dir/b.rs".as_ref(),
5385 "/dir/c.rs".as_ref(),
5386 ],
5387 cx,
5388 )
5389 .await;
5390
5391 // check the initial state and get the worktrees
5392 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5393 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5394 assert_eq!(worktrees.len(), 3);
5395
5396 let worktree_a = worktrees[0].read(cx);
5397 let worktree_b = worktrees[1].read(cx);
5398 let worktree_c = worktrees[2].read(cx);
5399
5400 // check they start in the right order
5401 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5402 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5403 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5404
5405 (
5406 worktrees[0].clone(),
5407 worktrees[1].clone(),
5408 worktrees[2].clone(),
5409 )
5410 });
5411
5412 // move first worktree to after the second
5413 // [a, b, c] -> [b, a, c]
5414 project
5415 .update(cx, |project, cx| {
5416 let first = worktree_a.read(cx);
5417 let second = worktree_b.read(cx);
5418 project.move_worktree(first.id(), second.id(), cx)
5419 })
5420 .expect("moving first after second");
5421
5422 // check the state after moving
5423 project.update(cx, |project, cx| {
5424 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5425 assert_eq!(worktrees.len(), 3);
5426
5427 let first = worktrees[0].read(cx);
5428 let second = worktrees[1].read(cx);
5429 let third = worktrees[2].read(cx);
5430
5431 // check they are now in the right order
5432 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5433 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5434 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5435 });
5436
5437 // move the second worktree to before the first
5438 // [b, a, c] -> [a, b, c]
5439 project
5440 .update(cx, |project, cx| {
5441 let second = worktree_a.read(cx);
5442 let first = worktree_b.read(cx);
5443 project.move_worktree(first.id(), second.id(), cx)
5444 })
5445 .expect("moving second before first");
5446
5447 // check the state after moving
5448 project.update(cx, |project, cx| {
5449 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5450 assert_eq!(worktrees.len(), 3);
5451
5452 let first = worktrees[0].read(cx);
5453 let second = worktrees[1].read(cx);
5454 let third = worktrees[2].read(cx);
5455
5456 // check they are now in the right order
5457 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5458 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5459 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5460 });
5461
5462 // move the second worktree to after the third
5463 // [a, b, c] -> [a, c, b]
5464 project
5465 .update(cx, |project, cx| {
5466 let second = worktree_b.read(cx);
5467 let third = worktree_c.read(cx);
5468 project.move_worktree(second.id(), third.id(), cx)
5469 })
5470 .expect("moving second after third");
5471
5472 // check the state after moving
5473 project.update(cx, |project, cx| {
5474 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5475 assert_eq!(worktrees.len(), 3);
5476
5477 let first = worktrees[0].read(cx);
5478 let second = worktrees[1].read(cx);
5479 let third = worktrees[2].read(cx);
5480
5481 // check they are now in the right order
5482 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5483 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5484 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5485 });
5486
5487 // move the third worktree to before the second
5488 // [a, c, b] -> [a, b, c]
5489 project
5490 .update(cx, |project, cx| {
5491 let third = worktree_c.read(cx);
5492 let second = worktree_b.read(cx);
5493 project.move_worktree(third.id(), second.id(), cx)
5494 })
5495 .expect("moving third before second");
5496
5497 // check the state after moving
5498 project.update(cx, |project, cx| {
5499 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5500 assert_eq!(worktrees.len(), 3);
5501
5502 let first = worktrees[0].read(cx);
5503 let second = worktrees[1].read(cx);
5504 let third = worktrees[2].read(cx);
5505
5506 // check they are now in the right order
5507 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5508 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5509 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5510 });
5511
5512 // move the first worktree to after the third
5513 // [a, b, c] -> [b, c, a]
5514 project
5515 .update(cx, |project, cx| {
5516 let first = worktree_a.read(cx);
5517 let third = worktree_c.read(cx);
5518 project.move_worktree(first.id(), third.id(), cx)
5519 })
5520 .expect("moving first after third");
5521
5522 // check the state after moving
5523 project.update(cx, |project, cx| {
5524 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5525 assert_eq!(worktrees.len(), 3);
5526
5527 let first = worktrees[0].read(cx);
5528 let second = worktrees[1].read(cx);
5529 let third = worktrees[2].read(cx);
5530
5531 // check they are now in the right order
5532 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5533 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5534 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5535 });
5536
5537 // move the third worktree to before the first
5538 // [b, c, a] -> [a, b, c]
5539 project
5540 .update(cx, |project, cx| {
5541 let third = worktree_a.read(cx);
5542 let first = worktree_b.read(cx);
5543 project.move_worktree(third.id(), first.id(), cx)
5544 })
5545 .expect("moving third before first");
5546
5547 // check the state after moving
5548 project.update(cx, |project, cx| {
5549 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5550 assert_eq!(worktrees.len(), 3);
5551
5552 let first = worktrees[0].read(cx);
5553 let second = worktrees[1].read(cx);
5554 let third = worktrees[2].read(cx);
5555
5556 // check they are now in the right order
5557 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5558 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5559 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5560 });
5561}
5562
5563#[gpui::test]
5564async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5565 init_test(cx);
5566
5567 let staged_contents = r#"
5568 fn main() {
5569 println!("hello world");
5570 }
5571 "#
5572 .unindent();
5573 let file_contents = r#"
5574 // print goodbye
5575 fn main() {
5576 println!("goodbye world");
5577 }
5578 "#
5579 .unindent();
5580
5581 let fs = FakeFs::new(cx.background_executor.clone());
5582 fs.insert_tree(
5583 "/dir",
5584 json!({
5585 ".git": {},
5586 "src": {
5587 "main.rs": file_contents,
5588 }
5589 }),
5590 )
5591 .await;
5592
5593 fs.set_index_for_repo(
5594 Path::new("/dir/.git"),
5595 &[(Path::new("src/main.rs"), staged_contents)],
5596 );
5597
5598 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5599
5600 let buffer = project
5601 .update(cx, |project, cx| {
5602 project.open_local_buffer("/dir/src/main.rs", cx)
5603 })
5604 .await
5605 .unwrap();
5606 let unstaged_changes = project
5607 .update(cx, |project, cx| {
5608 project.open_unstaged_changes(buffer.clone(), cx)
5609 })
5610 .await
5611 .unwrap();
5612
5613 cx.run_until_parked();
5614 unstaged_changes.update(cx, |unstaged_changes, cx| {
5615 let snapshot = buffer.read(cx).snapshot();
5616 assert_hunks(
5617 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5618 &snapshot,
5619 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5620 &[
5621 (0..1, "", "// print goodbye\n"),
5622 (
5623 2..3,
5624 " println!(\"hello world\");\n",
5625 " println!(\"goodbye world\");\n",
5626 ),
5627 ],
5628 );
5629 });
5630
5631 let staged_contents = r#"
5632 // print goodbye
5633 fn main() {
5634 }
5635 "#
5636 .unindent();
5637
5638 fs.set_index_for_repo(
5639 Path::new("/dir/.git"),
5640 &[(Path::new("src/main.rs"), staged_contents)],
5641 );
5642
5643 cx.run_until_parked();
5644 unstaged_changes.update(cx, |unstaged_changes, cx| {
5645 let snapshot = buffer.read(cx).snapshot();
5646 assert_hunks(
5647 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5648 &snapshot,
5649 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5650 &[(2..3, "", " println!(\"goodbye world\");\n")],
5651 );
5652 });
5653}
5654
5655async fn search(
5656 project: &Model<Project>,
5657 query: SearchQuery,
5658 cx: &mut gpui::TestAppContext,
5659) -> Result<HashMap<String, Vec<Range<usize>>>> {
5660 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5661 let mut results = HashMap::default();
5662 while let Ok(search_result) = search_rx.recv().await {
5663 match search_result {
5664 SearchResult::Buffer { buffer, ranges } => {
5665 results.entry(buffer).or_insert(ranges);
5666 }
5667 SearchResult::LimitReached => {}
5668 }
5669 }
5670 Ok(results
5671 .into_iter()
5672 .map(|(buffer, ranges)| {
5673 buffer.update(cx, |buffer, cx| {
5674 let path = buffer
5675 .file()
5676 .unwrap()
5677 .full_path(cx)
5678 .to_string_lossy()
5679 .to_string();
5680 let ranges = ranges
5681 .into_iter()
5682 .map(|range| range.to_offset(buffer))
5683 .collect::<Vec<_>>();
5684 (path, ranges)
5685 })
5686 })
5687 .collect())
5688}
5689
5690pub fn init_test(cx: &mut gpui::TestAppContext) {
5691 if std::env::var("RUST_LOG").is_ok() {
5692 env_logger::try_init().ok();
5693 }
5694
5695 cx.update(|cx| {
5696 let settings_store = SettingsStore::test(cx);
5697 cx.set_global(settings_store);
5698 release_channel::init(SemanticVersion::default(), cx);
5699 language::init(cx);
5700 Project::init_settings(cx);
5701 });
5702}
5703
5704fn json_lang() -> Arc<Language> {
5705 Arc::new(Language::new(
5706 LanguageConfig {
5707 name: "JSON".into(),
5708 matcher: LanguageMatcher {
5709 path_suffixes: vec!["json".to_string()],
5710 ..Default::default()
5711 },
5712 ..Default::default()
5713 },
5714 None,
5715 ))
5716}
5717
5718fn js_lang() -> Arc<Language> {
5719 Arc::new(Language::new(
5720 LanguageConfig {
5721 name: "JavaScript".into(),
5722 matcher: LanguageMatcher {
5723 path_suffixes: vec!["js".to_string()],
5724 ..Default::default()
5725 },
5726 ..Default::default()
5727 },
5728 None,
5729 ))
5730}
5731
5732fn rust_lang() -> Arc<Language> {
5733 Arc::new(Language::new(
5734 LanguageConfig {
5735 name: "Rust".into(),
5736 matcher: LanguageMatcher {
5737 path_suffixes: vec!["rs".to_string()],
5738 ..Default::default()
5739 },
5740 ..Default::default()
5741 },
5742 Some(tree_sitter_rust::LANGUAGE.into()),
5743 ))
5744}
5745
5746fn typescript_lang() -> Arc<Language> {
5747 Arc::new(Language::new(
5748 LanguageConfig {
5749 name: "TypeScript".into(),
5750 matcher: LanguageMatcher {
5751 path_suffixes: vec!["ts".to_string()],
5752 ..Default::default()
5753 },
5754 ..Default::default()
5755 },
5756 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5757 ))
5758}
5759
5760fn tsx_lang() -> Arc<Language> {
5761 Arc::new(Language::new(
5762 LanguageConfig {
5763 name: "tsx".into(),
5764 matcher: LanguageMatcher {
5765 path_suffixes: vec!["tsx".to_string()],
5766 ..Default::default()
5767 },
5768 ..Default::default()
5769 },
5770 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5771 ))
5772}
5773
5774fn get_all_tasks(
5775 project: &Model<Project>,
5776 worktree_id: Option<WorktreeId>,
5777 task_context: &TaskContext,
5778 cx: &mut AppContext,
5779) -> Vec<(TaskSourceKind, ResolvedTask)> {
5780 let (mut old, new) = project.update(cx, |project, cx| {
5781 project
5782 .task_store
5783 .read(cx)
5784 .task_inventory()
5785 .unwrap()
5786 .read(cx)
5787 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5788 });
5789 old.extend(new);
5790 old
5791}