1use crate::{Event, *};
2use fs::FakeFs;
3use futures::{future, StreamExt};
4use git::diff::assert_hunks;
5use gpui::{AppContext, SemanticVersion, UpdateGlobal};
6use http_client::Url;
7use language::{
8 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
9 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
10 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
11 OffsetRangeExt, Point, ToPoint,
12};
13use lsp::{
14 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
15 NumberOrString, TextDocumentEdit, WillRenameFiles,
16};
17use parking_lot::Mutex;
18use pretty_assertions::{assert_eq, assert_matches};
19use serde_json::json;
20#[cfg(not(windows))]
21use std::os;
22use std::{str::FromStr, sync::OnceLock};
23
24use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
25use task::{ResolvedTask, TaskContext};
26use unindent::Unindent as _;
27use util::{assert_set_eq, paths::PathMatcher, test::temp_tree, TryFutureExt as _};
28
29#[gpui::test]
30async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
31 cx.executor().allow_parking();
32
33 let (tx, mut rx) = futures::channel::mpsc::unbounded();
34 let _thread = std::thread::spawn(move || {
35 std::fs::metadata("/tmp").unwrap();
36 std::thread::sleep(Duration::from_millis(1000));
37 tx.unbounded_send(1).unwrap();
38 });
39 rx.next().await.unwrap();
40}
41
42#[gpui::test]
43async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
44 cx.executor().allow_parking();
45
46 let io_task = smol::unblock(move || {
47 println!("sleeping on thread {:?}", std::thread::current().id());
48 std::thread::sleep(Duration::from_millis(10));
49 1
50 });
51
52 let task = cx.foreground_executor().spawn(async move {
53 io_task.await;
54 });
55
56 task.await;
57}
58
59#[cfg(not(windows))]
60#[gpui::test]
61async fn test_symlinks(cx: &mut gpui::TestAppContext) {
62 init_test(cx);
63 cx.executor().allow_parking();
64
65 let dir = temp_tree(json!({
66 "root": {
67 "apple": "",
68 "banana": {
69 "carrot": {
70 "date": "",
71 "endive": "",
72 }
73 },
74 "fennel": {
75 "grape": "",
76 }
77 }
78 }));
79
80 let root_link_path = dir.path().join("root_link");
81 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
82 os::unix::fs::symlink(
83 dir.path().join("root/fennel"),
84 dir.path().join("root/finnochio"),
85 )
86 .unwrap();
87
88 let project = Project::test(Arc::new(RealFs::default()), [root_link_path.as_ref()], cx).await;
89
90 project.update(cx, |project, cx| {
91 let tree = project.worktrees(cx).next().unwrap().read(cx);
92 assert_eq!(tree.file_count(), 5);
93 assert_eq!(
94 tree.inode_for_path("fennel/grape"),
95 tree.inode_for_path("finnochio/grape")
96 );
97 });
98}
99
100#[gpui::test]
101async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
102 init_test(cx);
103
104 let dir = temp_tree(json!({
105 ".editorconfig": r#"
106 root = true
107 [*.rs]
108 indent_style = tab
109 indent_size = 3
110 end_of_line = lf
111 insert_final_newline = true
112 trim_trailing_whitespace = true
113 [*.js]
114 tab_width = 10
115 "#,
116 ".zed": {
117 "settings.json": r#"{
118 "tab_size": 8,
119 "hard_tabs": false,
120 "ensure_final_newline_on_save": false,
121 "remove_trailing_whitespace_on_save": false,
122 "soft_wrap": "editor_width"
123 }"#,
124 },
125 "a.rs": "fn a() {\n A\n}",
126 "b": {
127 ".editorconfig": r#"
128 [*.rs]
129 indent_size = 2
130 "#,
131 "b.rs": "fn b() {\n B\n}",
132 },
133 "c.js": "def c\n C\nend",
134 "README.json": "tabs are better\n",
135 }));
136
137 let path = dir.path();
138 let fs = FakeFs::new(cx.executor());
139 fs.insert_tree_from_real_fs(path, path).await;
140 let project = Project::test(fs, [path], cx).await;
141
142 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
143 language_registry.add(js_lang());
144 language_registry.add(json_lang());
145 language_registry.add(rust_lang());
146
147 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
148
149 cx.executor().run_until_parked();
150
151 cx.update(|cx| {
152 let tree = worktree.read(cx);
153 let settings_for = |path: &str| {
154 let file_entry = tree.entry_for_path(path).unwrap().clone();
155 let file = File::for_entry(file_entry, worktree.clone());
156 let file_language = project
157 .read(cx)
158 .languages()
159 .language_for_file_path(file.path.as_ref());
160 let file_language = cx
161 .background_executor()
162 .block(file_language)
163 .expect("Failed to get file language");
164 let file = file as _;
165 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
166 };
167
168 let settings_a = settings_for("a.rs");
169 let settings_b = settings_for("b/b.rs");
170 let settings_c = settings_for("c.js");
171 let settings_readme = settings_for("README.json");
172
173 // .editorconfig overrides .zed/settings
174 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
175 assert_eq!(settings_a.hard_tabs, true);
176 assert_eq!(settings_a.ensure_final_newline_on_save, true);
177 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
178
179 // .editorconfig in b/ overrides .editorconfig in root
180 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
181
182 // "indent_size" is not set, so "tab_width" is used
183 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
184
185 // README.md should not be affected by .editorconfig's globe "*.rs"
186 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
187 });
188}
189
190#[gpui::test]
191async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
192 init_test(cx);
193 TaskStore::init(None);
194
195 let fs = FakeFs::new(cx.executor());
196 fs.insert_tree(
197 "/the-root",
198 json!({
199 ".zed": {
200 "settings.json": r#"{ "tab_size": 8 }"#,
201 "tasks.json": r#"[{
202 "label": "cargo check all",
203 "command": "cargo",
204 "args": ["check", "--all"]
205 },]"#,
206 },
207 "a": {
208 "a.rs": "fn a() {\n A\n}"
209 },
210 "b": {
211 ".zed": {
212 "settings.json": r#"{ "tab_size": 2 }"#,
213 "tasks.json": r#"[{
214 "label": "cargo check",
215 "command": "cargo",
216 "args": ["check"]
217 },]"#,
218 },
219 "b.rs": "fn b() {\n B\n}"
220 }
221 }),
222 )
223 .await;
224
225 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
226 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
227 let task_context = TaskContext::default();
228
229 cx.executor().run_until_parked();
230 let worktree_id = cx.update(|cx| {
231 project.update(cx, |project, cx| {
232 project.worktrees(cx).next().unwrap().read(cx).id()
233 })
234 });
235 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
236 id: worktree_id,
237 directory_in_worktree: PathBuf::from(".zed"),
238 id_base: "local worktree tasks from directory \".zed\"".into(),
239 };
240
241 let all_tasks = cx
242 .update(|cx| {
243 let tree = worktree.read(cx);
244
245 let file_a = File::for_entry(
246 tree.entry_for_path("a/a.rs").unwrap().clone(),
247 worktree.clone(),
248 ) as _;
249 let settings_a = language_settings(None, Some(&file_a), cx);
250 let file_b = File::for_entry(
251 tree.entry_for_path("b/b.rs").unwrap().clone(),
252 worktree.clone(),
253 ) as _;
254 let settings_b = language_settings(None, Some(&file_b), cx);
255
256 assert_eq!(settings_a.tab_size.get(), 8);
257 assert_eq!(settings_b.tab_size.get(), 2);
258
259 get_all_tasks(&project, Some(worktree_id), &task_context, cx)
260 })
261 .into_iter()
262 .map(|(source_kind, task)| {
263 let resolved = task.resolved.unwrap();
264 (
265 source_kind,
266 task.resolved_label,
267 resolved.args,
268 resolved.env,
269 )
270 })
271 .collect::<Vec<_>>();
272 assert_eq!(
273 all_tasks,
274 vec![
275 (
276 TaskSourceKind::Worktree {
277 id: worktree_id,
278 directory_in_worktree: PathBuf::from("b/.zed"),
279 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
280 },
281 "cargo check".to_string(),
282 vec!["check".to_string()],
283 HashMap::default(),
284 ),
285 (
286 topmost_local_task_source_kind.clone(),
287 "cargo check all".to_string(),
288 vec!["check".to_string(), "--all".to_string()],
289 HashMap::default(),
290 ),
291 ]
292 );
293
294 let (_, resolved_task) = cx
295 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
296 .into_iter()
297 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
298 .expect("should have one global task");
299 project.update(cx, |project, cx| {
300 let task_inventory = project
301 .task_store
302 .read(cx)
303 .task_inventory()
304 .cloned()
305 .unwrap();
306 task_inventory.update(cx, |inventory, _| {
307 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
308 inventory
309 .update_file_based_tasks(
310 None,
311 Some(
312 &json!([{
313 "label": "cargo check unstable",
314 "command": "cargo",
315 "args": [
316 "check",
317 "--all",
318 "--all-targets"
319 ],
320 "env": {
321 "RUSTFLAGS": "-Zunstable-options"
322 }
323 }])
324 .to_string(),
325 ),
326 )
327 .unwrap();
328 });
329 });
330 cx.run_until_parked();
331
332 let all_tasks = cx
333 .update(|cx| get_all_tasks(&project, Some(worktree_id), &task_context, cx))
334 .into_iter()
335 .map(|(source_kind, task)| {
336 let resolved = task.resolved.unwrap();
337 (
338 source_kind,
339 task.resolved_label,
340 resolved.args,
341 resolved.env,
342 )
343 })
344 .collect::<Vec<_>>();
345 assert_eq!(
346 all_tasks,
347 vec![
348 (
349 topmost_local_task_source_kind.clone(),
350 "cargo check all".to_string(),
351 vec!["check".to_string(), "--all".to_string()],
352 HashMap::default(),
353 ),
354 (
355 TaskSourceKind::Worktree {
356 id: worktree_id,
357 directory_in_worktree: PathBuf::from("b/.zed"),
358 id_base: "local worktree tasks from directory \"b/.zed\"".into(),
359 },
360 "cargo check".to_string(),
361 vec!["check".to_string()],
362 HashMap::default(),
363 ),
364 (
365 TaskSourceKind::AbsPath {
366 abs_path: paths::tasks_file().clone(),
367 id_base: "global tasks.json".into(),
368 },
369 "cargo check unstable".to_string(),
370 vec![
371 "check".to_string(),
372 "--all".to_string(),
373 "--all-targets".to_string(),
374 ],
375 HashMap::from_iter(Some((
376 "RUSTFLAGS".to_string(),
377 "-Zunstable-options".to_string()
378 ))),
379 ),
380 ]
381 );
382}
383
384#[gpui::test]
385async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
386 init_test(cx);
387
388 let fs = FakeFs::new(cx.executor());
389 fs.insert_tree(
390 "/the-root",
391 json!({
392 "test.rs": "const A: i32 = 1;",
393 "test2.rs": "",
394 "Cargo.toml": "a = 1",
395 "package.json": "{\"a\": 1}",
396 }),
397 )
398 .await;
399
400 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
401 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
402
403 let mut fake_rust_servers = language_registry.register_fake_lsp(
404 "Rust",
405 FakeLspAdapter {
406 name: "the-rust-language-server",
407 capabilities: lsp::ServerCapabilities {
408 completion_provider: Some(lsp::CompletionOptions {
409 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
410 ..Default::default()
411 }),
412 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
413 lsp::TextDocumentSyncOptions {
414 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
415 ..Default::default()
416 },
417 )),
418 ..Default::default()
419 },
420 ..Default::default()
421 },
422 );
423 let mut fake_json_servers = language_registry.register_fake_lsp(
424 "JSON",
425 FakeLspAdapter {
426 name: "the-json-language-server",
427 capabilities: lsp::ServerCapabilities {
428 completion_provider: Some(lsp::CompletionOptions {
429 trigger_characters: Some(vec![":".to_string()]),
430 ..Default::default()
431 }),
432 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
433 lsp::TextDocumentSyncOptions {
434 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
435 ..Default::default()
436 },
437 )),
438 ..Default::default()
439 },
440 ..Default::default()
441 },
442 );
443
444 // Open a buffer without an associated language server.
445 let (toml_buffer, _handle) = project
446 .update(cx, |project, cx| {
447 project.open_local_buffer_with_lsp("/the-root/Cargo.toml", cx)
448 })
449 .await
450 .unwrap();
451
452 // Open a buffer with an associated language server before the language for it has been loaded.
453 let (rust_buffer, _handle2) = project
454 .update(cx, |project, cx| {
455 project.open_local_buffer_with_lsp("/the-root/test.rs", cx)
456 })
457 .await
458 .unwrap();
459 rust_buffer.update(cx, |buffer, _| {
460 assert_eq!(buffer.language().map(|l| l.name()), None);
461 });
462
463 // Now we add the languages to the project, and ensure they get assigned to all
464 // the relevant open buffers.
465 language_registry.add(json_lang());
466 language_registry.add(rust_lang());
467 cx.executor().run_until_parked();
468 rust_buffer.update(cx, |buffer, _| {
469 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
470 });
471
472 // A server is started up, and it is notified about Rust files.
473 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
474 assert_eq!(
475 fake_rust_server
476 .receive_notification::<lsp::notification::DidOpenTextDocument>()
477 .await
478 .text_document,
479 lsp::TextDocumentItem {
480 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
481 version: 0,
482 text: "const A: i32 = 1;".to_string(),
483 language_id: "rust".to_string(),
484 }
485 );
486
487 // The buffer is configured based on the language server's capabilities.
488 rust_buffer.update(cx, |buffer, _| {
489 assert_eq!(
490 buffer
491 .completion_triggers()
492 .into_iter()
493 .cloned()
494 .collect::<Vec<_>>(),
495 &[".".to_string(), "::".to_string()]
496 );
497 });
498 toml_buffer.update(cx, |buffer, _| {
499 assert!(buffer.completion_triggers().is_empty());
500 });
501
502 // Edit a buffer. The changes are reported to the language server.
503 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
504 assert_eq!(
505 fake_rust_server
506 .receive_notification::<lsp::notification::DidChangeTextDocument>()
507 .await
508 .text_document,
509 lsp::VersionedTextDocumentIdentifier::new(
510 lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
511 1
512 )
513 );
514
515 // Open a third buffer with a different associated language server.
516 let (json_buffer, _json_handle) = project
517 .update(cx, |project, cx| {
518 project.open_local_buffer_with_lsp("/the-root/package.json", cx)
519 })
520 .await
521 .unwrap();
522
523 // A json language server is started up and is only notified about the json buffer.
524 let mut fake_json_server = fake_json_servers.next().await.unwrap();
525 assert_eq!(
526 fake_json_server
527 .receive_notification::<lsp::notification::DidOpenTextDocument>()
528 .await
529 .text_document,
530 lsp::TextDocumentItem {
531 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
532 version: 0,
533 text: "{\"a\": 1}".to_string(),
534 language_id: "json".to_string(),
535 }
536 );
537
538 // This buffer is configured based on the second language server's
539 // capabilities.
540 json_buffer.update(cx, |buffer, _| {
541 assert_eq!(
542 buffer
543 .completion_triggers()
544 .into_iter()
545 .cloned()
546 .collect::<Vec<_>>(),
547 &[":".to_string()]
548 );
549 });
550
551 // When opening another buffer whose language server is already running,
552 // it is also configured based on the existing language server's capabilities.
553 let (rust_buffer2, _handle4) = project
554 .update(cx, |project, cx| {
555 project.open_local_buffer_with_lsp("/the-root/test2.rs", cx)
556 })
557 .await
558 .unwrap();
559 rust_buffer2.update(cx, |buffer, _| {
560 assert_eq!(
561 buffer
562 .completion_triggers()
563 .into_iter()
564 .cloned()
565 .collect::<Vec<_>>(),
566 &[".".to_string(), "::".to_string()]
567 );
568 });
569
570 // Changes are reported only to servers matching the buffer's language.
571 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
572 rust_buffer2.update(cx, |buffer, cx| {
573 buffer.edit([(0..0, "let x = 1;")], None, cx)
574 });
575 assert_eq!(
576 fake_rust_server
577 .receive_notification::<lsp::notification::DidChangeTextDocument>()
578 .await
579 .text_document,
580 lsp::VersionedTextDocumentIdentifier::new(
581 lsp::Url::from_file_path("/the-root/test2.rs").unwrap(),
582 1
583 )
584 );
585
586 // Save notifications are reported to all servers.
587 project
588 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
589 .await
590 .unwrap();
591 assert_eq!(
592 fake_rust_server
593 .receive_notification::<lsp::notification::DidSaveTextDocument>()
594 .await
595 .text_document,
596 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
597 );
598 assert_eq!(
599 fake_json_server
600 .receive_notification::<lsp::notification::DidSaveTextDocument>()
601 .await
602 .text_document,
603 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/Cargo.toml").unwrap())
604 );
605
606 // Renames are reported only to servers matching the buffer's language.
607 fs.rename(
608 Path::new("/the-root/test2.rs"),
609 Path::new("/the-root/test3.rs"),
610 Default::default(),
611 )
612 .await
613 .unwrap();
614 assert_eq!(
615 fake_rust_server
616 .receive_notification::<lsp::notification::DidCloseTextDocument>()
617 .await
618 .text_document,
619 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test2.rs").unwrap()),
620 );
621 assert_eq!(
622 fake_rust_server
623 .receive_notification::<lsp::notification::DidOpenTextDocument>()
624 .await
625 .text_document,
626 lsp::TextDocumentItem {
627 uri: lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),
628 version: 0,
629 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
630 language_id: "rust".to_string(),
631 },
632 );
633
634 rust_buffer2.update(cx, |buffer, cx| {
635 buffer.update_diagnostics(
636 LanguageServerId(0),
637 DiagnosticSet::from_sorted_entries(
638 vec![DiagnosticEntry {
639 diagnostic: Default::default(),
640 range: Anchor::MIN..Anchor::MAX,
641 }],
642 &buffer.snapshot(),
643 ),
644 cx,
645 );
646 assert_eq!(
647 buffer
648 .snapshot()
649 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
650 .count(),
651 1
652 );
653 });
654
655 // When the rename changes the extension of the file, the buffer gets closed on the old
656 // language server and gets opened on the new one.
657 fs.rename(
658 Path::new("/the-root/test3.rs"),
659 Path::new("/the-root/test3.json"),
660 Default::default(),
661 )
662 .await
663 .unwrap();
664 assert_eq!(
665 fake_rust_server
666 .receive_notification::<lsp::notification::DidCloseTextDocument>()
667 .await
668 .text_document,
669 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path("/the-root/test3.rs").unwrap(),),
670 );
671 assert_eq!(
672 fake_json_server
673 .receive_notification::<lsp::notification::DidOpenTextDocument>()
674 .await
675 .text_document,
676 lsp::TextDocumentItem {
677 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
678 version: 0,
679 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
680 language_id: "json".to_string(),
681 },
682 );
683
684 // We clear the diagnostics, since the language has changed.
685 rust_buffer2.update(cx, |buffer, _| {
686 assert_eq!(
687 buffer
688 .snapshot()
689 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
690 .count(),
691 0
692 );
693 });
694
695 // The renamed file's version resets after changing language server.
696 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
697 assert_eq!(
698 fake_json_server
699 .receive_notification::<lsp::notification::DidChangeTextDocument>()
700 .await
701 .text_document,
702 lsp::VersionedTextDocumentIdentifier::new(
703 lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
704 1
705 )
706 );
707
708 // Restart language servers
709 project.update(cx, |project, cx| {
710 project.restart_language_servers_for_buffers(
711 vec![rust_buffer.clone(), json_buffer.clone()],
712 cx,
713 );
714 });
715
716 let mut rust_shutdown_requests = fake_rust_server
717 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
718 let mut json_shutdown_requests = fake_json_server
719 .handle_request::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
720 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
721
722 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
723 let mut fake_json_server = fake_json_servers.next().await.unwrap();
724
725 // Ensure rust document is reopened in new rust language server
726 assert_eq!(
727 fake_rust_server
728 .receive_notification::<lsp::notification::DidOpenTextDocument>()
729 .await
730 .text_document,
731 lsp::TextDocumentItem {
732 uri: lsp::Url::from_file_path("/the-root/test.rs").unwrap(),
733 version: 0,
734 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
735 language_id: "rust".to_string(),
736 }
737 );
738
739 // Ensure json documents are reopened in new json language server
740 assert_set_eq!(
741 [
742 fake_json_server
743 .receive_notification::<lsp::notification::DidOpenTextDocument>()
744 .await
745 .text_document,
746 fake_json_server
747 .receive_notification::<lsp::notification::DidOpenTextDocument>()
748 .await
749 .text_document,
750 ],
751 [
752 lsp::TextDocumentItem {
753 uri: lsp::Url::from_file_path("/the-root/package.json").unwrap(),
754 version: 0,
755 text: json_buffer.update(cx, |buffer, _| buffer.text()),
756 language_id: "json".to_string(),
757 },
758 lsp::TextDocumentItem {
759 uri: lsp::Url::from_file_path("/the-root/test3.json").unwrap(),
760 version: 0,
761 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
762 language_id: "json".to_string(),
763 }
764 ]
765 );
766
767 // Close notifications are reported only to servers matching the buffer's language.
768 cx.update(|_| drop(_json_handle));
769 let close_message = lsp::DidCloseTextDocumentParams {
770 text_document: lsp::TextDocumentIdentifier::new(
771 lsp::Url::from_file_path("/the-root/package.json").unwrap(),
772 ),
773 };
774 assert_eq!(
775 fake_json_server
776 .receive_notification::<lsp::notification::DidCloseTextDocument>()
777 .await,
778 close_message,
779 );
780}
781
782#[gpui::test]
783async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
784 init_test(cx);
785
786 let fs = FakeFs::new(cx.executor());
787 fs.insert_tree(
788 "/the-root",
789 json!({
790 ".gitignore": "target\n",
791 "src": {
792 "a.rs": "",
793 "b.rs": "",
794 },
795 "target": {
796 "x": {
797 "out": {
798 "x.rs": ""
799 }
800 },
801 "y": {
802 "out": {
803 "y.rs": "",
804 }
805 },
806 "z": {
807 "out": {
808 "z.rs": ""
809 }
810 }
811 }
812 }),
813 )
814 .await;
815
816 let project = Project::test(fs.clone(), ["/the-root".as_ref()], cx).await;
817 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
818 language_registry.add(rust_lang());
819 let mut fake_servers = language_registry.register_fake_lsp(
820 "Rust",
821 FakeLspAdapter {
822 name: "the-language-server",
823 ..Default::default()
824 },
825 );
826
827 cx.executor().run_until_parked();
828
829 // Start the language server by opening a buffer with a compatible file extension.
830 let _ = project
831 .update(cx, |project, cx| {
832 project.open_local_buffer_with_lsp("/the-root/src/a.rs", cx)
833 })
834 .await
835 .unwrap();
836
837 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
838 project.update(cx, |project, cx| {
839 let worktree = project.worktrees(cx).next().unwrap();
840 assert_eq!(
841 worktree
842 .read(cx)
843 .snapshot()
844 .entries(true, 0)
845 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
846 .collect::<Vec<_>>(),
847 &[
848 (Path::new(""), false),
849 (Path::new(".gitignore"), false),
850 (Path::new("src"), false),
851 (Path::new("src/a.rs"), false),
852 (Path::new("src/b.rs"), false),
853 (Path::new("target"), true),
854 ]
855 );
856 });
857
858 let prev_read_dir_count = fs.read_dir_call_count();
859
860 // Keep track of the FS events reported to the language server.
861 let fake_server = fake_servers.next().await.unwrap();
862 let file_changes = Arc::new(Mutex::new(Vec::new()));
863 fake_server
864 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
865 registrations: vec![lsp::Registration {
866 id: Default::default(),
867 method: "workspace/didChangeWatchedFiles".to_string(),
868 register_options: serde_json::to_value(
869 lsp::DidChangeWatchedFilesRegistrationOptions {
870 watchers: vec![
871 lsp::FileSystemWatcher {
872 glob_pattern: lsp::GlobPattern::String(
873 "/the-root/Cargo.toml".to_string(),
874 ),
875 kind: None,
876 },
877 lsp::FileSystemWatcher {
878 glob_pattern: lsp::GlobPattern::String(
879 "/the-root/src/*.{rs,c}".to_string(),
880 ),
881 kind: None,
882 },
883 lsp::FileSystemWatcher {
884 glob_pattern: lsp::GlobPattern::String(
885 "/the-root/target/y/**/*.rs".to_string(),
886 ),
887 kind: None,
888 },
889 ],
890 },
891 )
892 .ok(),
893 }],
894 })
895 .await
896 .unwrap();
897 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
898 let file_changes = file_changes.clone();
899 move |params, _| {
900 let mut file_changes = file_changes.lock();
901 file_changes.extend(params.changes);
902 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
903 }
904 });
905
906 cx.executor().run_until_parked();
907 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
908 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
909
910 // Now the language server has asked us to watch an ignored directory path,
911 // so we recursively load it.
912 project.update(cx, |project, cx| {
913 let worktree = project.worktrees(cx).next().unwrap();
914 assert_eq!(
915 worktree
916 .read(cx)
917 .snapshot()
918 .entries(true, 0)
919 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
920 .collect::<Vec<_>>(),
921 &[
922 (Path::new(""), false),
923 (Path::new(".gitignore"), false),
924 (Path::new("src"), false),
925 (Path::new("src/a.rs"), false),
926 (Path::new("src/b.rs"), false),
927 (Path::new("target"), true),
928 (Path::new("target/x"), true),
929 (Path::new("target/y"), true),
930 (Path::new("target/y/out"), true),
931 (Path::new("target/y/out/y.rs"), true),
932 (Path::new("target/z"), true),
933 ]
934 );
935 });
936
937 // Perform some file system mutations, two of which match the watched patterns,
938 // and one of which does not.
939 fs.create_file("/the-root/src/c.rs".as_ref(), Default::default())
940 .await
941 .unwrap();
942 fs.create_file("/the-root/src/d.txt".as_ref(), Default::default())
943 .await
944 .unwrap();
945 fs.remove_file("/the-root/src/b.rs".as_ref(), Default::default())
946 .await
947 .unwrap();
948 fs.create_file("/the-root/target/x/out/x2.rs".as_ref(), Default::default())
949 .await
950 .unwrap();
951 fs.create_file("/the-root/target/y/out/y2.rs".as_ref(), Default::default())
952 .await
953 .unwrap();
954
955 // The language server receives events for the FS mutations that match its watch patterns.
956 cx.executor().run_until_parked();
957 assert_eq!(
958 &*file_changes.lock(),
959 &[
960 lsp::FileEvent {
961 uri: lsp::Url::from_file_path("/the-root/src/b.rs").unwrap(),
962 typ: lsp::FileChangeType::DELETED,
963 },
964 lsp::FileEvent {
965 uri: lsp::Url::from_file_path("/the-root/src/c.rs").unwrap(),
966 typ: lsp::FileChangeType::CREATED,
967 },
968 lsp::FileEvent {
969 uri: lsp::Url::from_file_path("/the-root/target/y/out/y2.rs").unwrap(),
970 typ: lsp::FileChangeType::CREATED,
971 },
972 ]
973 );
974}
975
976#[gpui::test]
977async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
978 init_test(cx);
979
980 let fs = FakeFs::new(cx.executor());
981 fs.insert_tree(
982 "/dir",
983 json!({
984 "a.rs": "let a = 1;",
985 "b.rs": "let b = 2;"
986 }),
987 )
988 .await;
989
990 let project = Project::test(fs, ["/dir/a.rs".as_ref(), "/dir/b.rs".as_ref()], cx).await;
991 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
992
993 let buffer_a = project
994 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
995 .await
996 .unwrap();
997 let buffer_b = project
998 .update(cx, |project, cx| project.open_local_buffer("/dir/b.rs", cx))
999 .await
1000 .unwrap();
1001
1002 lsp_store.update(cx, |lsp_store, cx| {
1003 lsp_store
1004 .update_diagnostics(
1005 LanguageServerId(0),
1006 lsp::PublishDiagnosticsParams {
1007 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1008 version: None,
1009 diagnostics: vec![lsp::Diagnostic {
1010 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1011 severity: Some(lsp::DiagnosticSeverity::ERROR),
1012 message: "error 1".to_string(),
1013 ..Default::default()
1014 }],
1015 },
1016 &[],
1017 cx,
1018 )
1019 .unwrap();
1020 lsp_store
1021 .update_diagnostics(
1022 LanguageServerId(0),
1023 lsp::PublishDiagnosticsParams {
1024 uri: Url::from_file_path("/dir/b.rs").unwrap(),
1025 version: None,
1026 diagnostics: vec![lsp::Diagnostic {
1027 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1028 severity: Some(DiagnosticSeverity::WARNING),
1029 message: "error 2".to_string(),
1030 ..Default::default()
1031 }],
1032 },
1033 &[],
1034 cx,
1035 )
1036 .unwrap();
1037 });
1038
1039 buffer_a.update(cx, |buffer, _| {
1040 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1041 assert_eq!(
1042 chunks
1043 .iter()
1044 .map(|(s, d)| (s.as_str(), *d))
1045 .collect::<Vec<_>>(),
1046 &[
1047 ("let ", None),
1048 ("a", Some(DiagnosticSeverity::ERROR)),
1049 (" = 1;", None),
1050 ]
1051 );
1052 });
1053 buffer_b.update(cx, |buffer, _| {
1054 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1055 assert_eq!(
1056 chunks
1057 .iter()
1058 .map(|(s, d)| (s.as_str(), *d))
1059 .collect::<Vec<_>>(),
1060 &[
1061 ("let ", None),
1062 ("b", Some(DiagnosticSeverity::WARNING)),
1063 (" = 2;", None),
1064 ]
1065 );
1066 });
1067}
1068
1069#[gpui::test]
1070async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1071 init_test(cx);
1072
1073 let fs = FakeFs::new(cx.executor());
1074 fs.insert_tree(
1075 "/root",
1076 json!({
1077 "dir": {
1078 ".git": {
1079 "HEAD": "ref: refs/heads/main",
1080 },
1081 ".gitignore": "b.rs",
1082 "a.rs": "let a = 1;",
1083 "b.rs": "let b = 2;",
1084 },
1085 "other.rs": "let b = c;"
1086 }),
1087 )
1088 .await;
1089
1090 let project = Project::test(fs, ["/root/dir".as_ref()], cx).await;
1091 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1092 let (worktree, _) = project
1093 .update(cx, |project, cx| {
1094 project.find_or_create_worktree("/root/dir", true, cx)
1095 })
1096 .await
1097 .unwrap();
1098 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1099
1100 let (worktree, _) = project
1101 .update(cx, |project, cx| {
1102 project.find_or_create_worktree("/root/other.rs", false, cx)
1103 })
1104 .await
1105 .unwrap();
1106 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1107
1108 let server_id = LanguageServerId(0);
1109 lsp_store.update(cx, |lsp_store, cx| {
1110 lsp_store
1111 .update_diagnostics(
1112 server_id,
1113 lsp::PublishDiagnosticsParams {
1114 uri: Url::from_file_path("/root/dir/b.rs").unwrap(),
1115 version: None,
1116 diagnostics: vec![lsp::Diagnostic {
1117 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1118 severity: Some(lsp::DiagnosticSeverity::ERROR),
1119 message: "unused variable 'b'".to_string(),
1120 ..Default::default()
1121 }],
1122 },
1123 &[],
1124 cx,
1125 )
1126 .unwrap();
1127 lsp_store
1128 .update_diagnostics(
1129 server_id,
1130 lsp::PublishDiagnosticsParams {
1131 uri: Url::from_file_path("/root/other.rs").unwrap(),
1132 version: None,
1133 diagnostics: vec![lsp::Diagnostic {
1134 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1135 severity: Some(lsp::DiagnosticSeverity::ERROR),
1136 message: "unknown variable 'c'".to_string(),
1137 ..Default::default()
1138 }],
1139 },
1140 &[],
1141 cx,
1142 )
1143 .unwrap();
1144 });
1145
1146 let main_ignored_buffer = project
1147 .update(cx, |project, cx| {
1148 project.open_buffer((main_worktree_id, "b.rs"), cx)
1149 })
1150 .await
1151 .unwrap();
1152 main_ignored_buffer.update(cx, |buffer, _| {
1153 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1154 assert_eq!(
1155 chunks
1156 .iter()
1157 .map(|(s, d)| (s.as_str(), *d))
1158 .collect::<Vec<_>>(),
1159 &[
1160 ("let ", None),
1161 ("b", Some(DiagnosticSeverity::ERROR)),
1162 (" = 2;", None),
1163 ],
1164 "Gigitnored buffers should still get in-buffer diagnostics",
1165 );
1166 });
1167 let other_buffer = project
1168 .update(cx, |project, cx| {
1169 project.open_buffer((other_worktree_id, ""), cx)
1170 })
1171 .await
1172 .unwrap();
1173 other_buffer.update(cx, |buffer, _| {
1174 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1175 assert_eq!(
1176 chunks
1177 .iter()
1178 .map(|(s, d)| (s.as_str(), *d))
1179 .collect::<Vec<_>>(),
1180 &[
1181 ("let b = ", None),
1182 ("c", Some(DiagnosticSeverity::ERROR)),
1183 (";", None),
1184 ],
1185 "Buffers from hidden projects should still get in-buffer diagnostics"
1186 );
1187 });
1188
1189 project.update(cx, |project, cx| {
1190 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1191 assert_eq!(
1192 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1193 vec![(
1194 ProjectPath {
1195 worktree_id: main_worktree_id,
1196 path: Arc::from(Path::new("b.rs")),
1197 },
1198 server_id,
1199 DiagnosticSummary {
1200 error_count: 1,
1201 warning_count: 0,
1202 }
1203 )]
1204 );
1205 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1206 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1207 });
1208}
1209
1210#[gpui::test]
1211async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let progress_token = "the-progress-token";
1215
1216 let fs = FakeFs::new(cx.executor());
1217 fs.insert_tree(
1218 "/dir",
1219 json!({
1220 "a.rs": "fn a() { A }",
1221 "b.rs": "const y: i32 = 1",
1222 }),
1223 )
1224 .await;
1225
1226 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1227 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1228
1229 language_registry.add(rust_lang());
1230 let mut fake_servers = language_registry.register_fake_lsp(
1231 "Rust",
1232 FakeLspAdapter {
1233 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1234 disk_based_diagnostics_sources: vec!["disk".into()],
1235 ..Default::default()
1236 },
1237 );
1238
1239 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1240
1241 // Cause worktree to start the fake language server
1242 let _ = project
1243 .update(cx, |project, cx| {
1244 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
1245 })
1246 .await
1247 .unwrap();
1248
1249 let mut events = cx.events(&project);
1250
1251 let fake_server = fake_servers.next().await.unwrap();
1252 assert_eq!(
1253 events.next().await.unwrap(),
1254 Event::LanguageServerAdded(
1255 LanguageServerId(0),
1256 fake_server.server.name(),
1257 Some(worktree_id)
1258 ),
1259 );
1260
1261 fake_server
1262 .start_progress(format!("{}/0", progress_token))
1263 .await;
1264 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1265 assert_eq!(
1266 events.next().await.unwrap(),
1267 Event::DiskBasedDiagnosticsStarted {
1268 language_server_id: LanguageServerId(0),
1269 }
1270 );
1271
1272 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1273 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1274 version: None,
1275 diagnostics: vec![lsp::Diagnostic {
1276 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1277 severity: Some(lsp::DiagnosticSeverity::ERROR),
1278 message: "undefined variable 'A'".to_string(),
1279 ..Default::default()
1280 }],
1281 });
1282 assert_eq!(
1283 events.next().await.unwrap(),
1284 Event::DiagnosticsUpdated {
1285 language_server_id: LanguageServerId(0),
1286 path: (worktree_id, Path::new("a.rs")).into()
1287 }
1288 );
1289
1290 fake_server.end_progress(format!("{}/0", progress_token));
1291 assert_eq!(
1292 events.next().await.unwrap(),
1293 Event::DiskBasedDiagnosticsFinished {
1294 language_server_id: LanguageServerId(0)
1295 }
1296 );
1297
1298 let buffer = project
1299 .update(cx, |p, cx| p.open_local_buffer("/dir/a.rs", cx))
1300 .await
1301 .unwrap();
1302
1303 buffer.update(cx, |buffer, _| {
1304 let snapshot = buffer.snapshot();
1305 let diagnostics = snapshot
1306 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1307 .collect::<Vec<_>>();
1308 assert_eq!(
1309 diagnostics,
1310 &[DiagnosticEntry {
1311 range: Point::new(0, 9)..Point::new(0, 10),
1312 diagnostic: Diagnostic {
1313 severity: lsp::DiagnosticSeverity::ERROR,
1314 message: "undefined variable 'A'".to_string(),
1315 group_id: 0,
1316 is_primary: true,
1317 ..Default::default()
1318 }
1319 }]
1320 )
1321 });
1322
1323 // Ensure publishing empty diagnostics twice only results in one update event.
1324 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1325 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1326 version: None,
1327 diagnostics: Default::default(),
1328 });
1329 assert_eq!(
1330 events.next().await.unwrap(),
1331 Event::DiagnosticsUpdated {
1332 language_server_id: LanguageServerId(0),
1333 path: (worktree_id, Path::new("a.rs")).into()
1334 }
1335 );
1336
1337 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1338 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1339 version: None,
1340 diagnostics: Default::default(),
1341 });
1342 cx.executor().run_until_parked();
1343 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1344}
1345
1346#[gpui::test]
1347async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1348 init_test(cx);
1349
1350 let progress_token = "the-progress-token";
1351
1352 let fs = FakeFs::new(cx.executor());
1353 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1354
1355 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1356
1357 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1358 language_registry.add(rust_lang());
1359 let mut fake_servers = language_registry.register_fake_lsp(
1360 "Rust",
1361 FakeLspAdapter {
1362 name: "the-language-server",
1363 disk_based_diagnostics_sources: vec!["disk".into()],
1364 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1365 ..Default::default()
1366 },
1367 );
1368
1369 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1370
1371 let (buffer, _handle) = project
1372 .update(cx, |project, cx| {
1373 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1374 })
1375 .await
1376 .unwrap();
1377
1378 // Simulate diagnostics starting to update.
1379 let fake_server = fake_servers.next().await.unwrap();
1380 fake_server.start_progress(progress_token).await;
1381
1382 // Restart the server before the diagnostics finish updating.
1383 project.update(cx, |project, cx| {
1384 project.restart_language_servers_for_buffers([buffer], cx);
1385 });
1386 let mut events = cx.events(&project);
1387
1388 // Simulate the newly started server sending more diagnostics.
1389 let fake_server = fake_servers.next().await.unwrap();
1390 assert_eq!(
1391 events.next().await.unwrap(),
1392 Event::LanguageServerAdded(
1393 LanguageServerId(1),
1394 fake_server.server.name(),
1395 Some(worktree_id)
1396 )
1397 );
1398 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1399 fake_server.start_progress(progress_token).await;
1400 assert_eq!(
1401 events.next().await.unwrap(),
1402 Event::DiskBasedDiagnosticsStarted {
1403 language_server_id: LanguageServerId(1)
1404 }
1405 );
1406 project.update(cx, |project, cx| {
1407 assert_eq!(
1408 project
1409 .language_servers_running_disk_based_diagnostics(cx)
1410 .collect::<Vec<_>>(),
1411 [LanguageServerId(1)]
1412 );
1413 });
1414
1415 // All diagnostics are considered done, despite the old server's diagnostic
1416 // task never completing.
1417 fake_server.end_progress(progress_token);
1418 assert_eq!(
1419 events.next().await.unwrap(),
1420 Event::DiskBasedDiagnosticsFinished {
1421 language_server_id: LanguageServerId(1)
1422 }
1423 );
1424 project.update(cx, |project, cx| {
1425 assert_eq!(
1426 project
1427 .language_servers_running_disk_based_diagnostics(cx)
1428 .collect::<Vec<_>>(),
1429 [] as [language::LanguageServerId; 0]
1430 );
1431 });
1432}
1433
1434#[gpui::test]
1435async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1436 init_test(cx);
1437
1438 let fs = FakeFs::new(cx.executor());
1439 fs.insert_tree("/dir", json!({ "a.rs": "x" })).await;
1440
1441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1442
1443 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1444 language_registry.add(rust_lang());
1445 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1446
1447 let (buffer, _) = project
1448 .update(cx, |project, cx| {
1449 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1450 })
1451 .await
1452 .unwrap();
1453
1454 // Publish diagnostics
1455 let fake_server = fake_servers.next().await.unwrap();
1456 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1457 uri: Url::from_file_path("/dir/a.rs").unwrap(),
1458 version: None,
1459 diagnostics: vec![lsp::Diagnostic {
1460 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1461 severity: Some(lsp::DiagnosticSeverity::ERROR),
1462 message: "the message".to_string(),
1463 ..Default::default()
1464 }],
1465 });
1466
1467 cx.executor().run_until_parked();
1468 buffer.update(cx, |buffer, _| {
1469 assert_eq!(
1470 buffer
1471 .snapshot()
1472 .diagnostics_in_range::<_, usize>(0..1, false)
1473 .map(|entry| entry.diagnostic.message.clone())
1474 .collect::<Vec<_>>(),
1475 ["the message".to_string()]
1476 );
1477 });
1478 project.update(cx, |project, cx| {
1479 assert_eq!(
1480 project.diagnostic_summary(false, cx),
1481 DiagnosticSummary {
1482 error_count: 1,
1483 warning_count: 0,
1484 }
1485 );
1486 });
1487
1488 project.update(cx, |project, cx| {
1489 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1490 });
1491
1492 // The diagnostics are cleared.
1493 cx.executor().run_until_parked();
1494 buffer.update(cx, |buffer, _| {
1495 assert_eq!(
1496 buffer
1497 .snapshot()
1498 .diagnostics_in_range::<_, usize>(0..1, false)
1499 .map(|entry| entry.diagnostic.message.clone())
1500 .collect::<Vec<_>>(),
1501 Vec::<String>::new(),
1502 );
1503 });
1504 project.update(cx, |project, cx| {
1505 assert_eq!(
1506 project.diagnostic_summary(false, cx),
1507 DiagnosticSummary {
1508 error_count: 0,
1509 warning_count: 0,
1510 }
1511 );
1512 });
1513}
1514
1515#[gpui::test]
1516async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1517 init_test(cx);
1518
1519 let fs = FakeFs::new(cx.executor());
1520 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1521
1522 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1524
1525 language_registry.add(rust_lang());
1526 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1527
1528 let (buffer, _handle) = project
1529 .update(cx, |project, cx| {
1530 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1531 })
1532 .await
1533 .unwrap();
1534
1535 // Before restarting the server, report diagnostics with an unknown buffer version.
1536 let fake_server = fake_servers.next().await.unwrap();
1537 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1538 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1539 version: Some(10000),
1540 diagnostics: Vec::new(),
1541 });
1542 cx.executor().run_until_parked();
1543
1544 project.update(cx, |project, cx| {
1545 project.restart_language_servers_for_buffers([buffer.clone()], cx);
1546 });
1547 let mut fake_server = fake_servers.next().await.unwrap();
1548 let notification = fake_server
1549 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1550 .await
1551 .text_document;
1552 assert_eq!(notification.version, 0);
1553}
1554
1555#[gpui::test]
1556async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1557 init_test(cx);
1558
1559 let progress_token = "the-progress-token";
1560
1561 let fs = FakeFs::new(cx.executor());
1562 fs.insert_tree("/dir", json!({ "a.rs": "" })).await;
1563
1564 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1565
1566 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1567 language_registry.add(rust_lang());
1568 let mut fake_servers = language_registry.register_fake_lsp(
1569 "Rust",
1570 FakeLspAdapter {
1571 name: "the-language-server",
1572 disk_based_diagnostics_sources: vec!["disk".into()],
1573 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1574 ..Default::default()
1575 },
1576 );
1577
1578 let (buffer, _handle) = project
1579 .update(cx, |project, cx| {
1580 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1581 })
1582 .await
1583 .unwrap();
1584
1585 // Simulate diagnostics starting to update.
1586 let mut fake_server = fake_servers.next().await.unwrap();
1587 fake_server
1588 .start_progress_with(
1589 "another-token",
1590 lsp::WorkDoneProgressBegin {
1591 cancellable: Some(false),
1592 ..Default::default()
1593 },
1594 )
1595 .await;
1596 fake_server
1597 .start_progress_with(
1598 progress_token,
1599 lsp::WorkDoneProgressBegin {
1600 cancellable: Some(true),
1601 ..Default::default()
1602 },
1603 )
1604 .await;
1605 cx.executor().run_until_parked();
1606
1607 project.update(cx, |project, cx| {
1608 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1609 });
1610
1611 let cancel_notification = fake_server
1612 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1613 .await;
1614 assert_eq!(
1615 cancel_notification.token,
1616 NumberOrString::String(progress_token.into())
1617 );
1618}
1619
1620#[gpui::test]
1621async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1622 init_test(cx);
1623
1624 let fs = FakeFs::new(cx.executor());
1625 fs.insert_tree("/dir", json!({ "a.rs": "", "b.js": "" }))
1626 .await;
1627
1628 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1629 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1630
1631 let mut fake_rust_servers = language_registry.register_fake_lsp(
1632 "Rust",
1633 FakeLspAdapter {
1634 name: "rust-lsp",
1635 ..Default::default()
1636 },
1637 );
1638 let mut fake_js_servers = language_registry.register_fake_lsp(
1639 "JavaScript",
1640 FakeLspAdapter {
1641 name: "js-lsp",
1642 ..Default::default()
1643 },
1644 );
1645 language_registry.add(rust_lang());
1646 language_registry.add(js_lang());
1647
1648 let _rs_buffer = project
1649 .update(cx, |project, cx| {
1650 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
1651 })
1652 .await
1653 .unwrap();
1654 let _js_buffer = project
1655 .update(cx, |project, cx| {
1656 project.open_local_buffer_with_lsp("/dir/b.js", cx)
1657 })
1658 .await
1659 .unwrap();
1660
1661 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1662 assert_eq!(
1663 fake_rust_server_1
1664 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1665 .await
1666 .text_document
1667 .uri
1668 .as_str(),
1669 "file:///dir/a.rs"
1670 );
1671
1672 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1673 assert_eq!(
1674 fake_js_server
1675 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1676 .await
1677 .text_document
1678 .uri
1679 .as_str(),
1680 "file:///dir/b.js"
1681 );
1682
1683 // Disable Rust language server, ensuring only that server gets stopped.
1684 cx.update(|cx| {
1685 SettingsStore::update_global(cx, |settings, cx| {
1686 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1687 settings.languages.insert(
1688 "Rust".into(),
1689 LanguageSettingsContent {
1690 enable_language_server: Some(false),
1691 ..Default::default()
1692 },
1693 );
1694 });
1695 })
1696 });
1697 fake_rust_server_1
1698 .receive_notification::<lsp::notification::Exit>()
1699 .await;
1700
1701 // Enable Rust and disable JavaScript language servers, ensuring that the
1702 // former gets started again and that the latter stops.
1703 cx.update(|cx| {
1704 SettingsStore::update_global(cx, |settings, cx| {
1705 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1706 settings.languages.insert(
1707 LanguageName::new("Rust"),
1708 LanguageSettingsContent {
1709 enable_language_server: Some(true),
1710 ..Default::default()
1711 },
1712 );
1713 settings.languages.insert(
1714 LanguageName::new("JavaScript"),
1715 LanguageSettingsContent {
1716 enable_language_server: Some(false),
1717 ..Default::default()
1718 },
1719 );
1720 });
1721 })
1722 });
1723 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1724 assert_eq!(
1725 fake_rust_server_2
1726 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1727 .await
1728 .text_document
1729 .uri
1730 .as_str(),
1731 "file:///dir/a.rs"
1732 );
1733 fake_js_server
1734 .receive_notification::<lsp::notification::Exit>()
1735 .await;
1736}
1737
1738#[gpui::test(iterations = 3)]
1739async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1740 init_test(cx);
1741
1742 let text = "
1743 fn a() { A }
1744 fn b() { BB }
1745 fn c() { CCC }
1746 "
1747 .unindent();
1748
1749 let fs = FakeFs::new(cx.executor());
1750 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
1751
1752 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
1753 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1754 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1755
1756 language_registry.add(rust_lang());
1757 let mut fake_servers = language_registry.register_fake_lsp(
1758 "Rust",
1759 FakeLspAdapter {
1760 disk_based_diagnostics_sources: vec!["disk".into()],
1761 ..Default::default()
1762 },
1763 );
1764
1765 let buffer = project
1766 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
1767 .await
1768 .unwrap();
1769
1770 let _handle = lsp_store.update(cx, |lsp_store, cx| {
1771 lsp_store.register_buffer_with_language_servers(&buffer, cx)
1772 });
1773
1774 let mut fake_server = fake_servers.next().await.unwrap();
1775 let open_notification = fake_server
1776 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1777 .await;
1778
1779 // Edit the buffer, moving the content down
1780 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1781 let change_notification_1 = fake_server
1782 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1783 .await;
1784 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1785
1786 // Report some diagnostics for the initial version of the buffer
1787 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1788 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1789 version: Some(open_notification.text_document.version),
1790 diagnostics: vec![
1791 lsp::Diagnostic {
1792 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1793 severity: Some(DiagnosticSeverity::ERROR),
1794 message: "undefined variable 'A'".to_string(),
1795 source: Some("disk".to_string()),
1796 ..Default::default()
1797 },
1798 lsp::Diagnostic {
1799 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1800 severity: Some(DiagnosticSeverity::ERROR),
1801 message: "undefined variable 'BB'".to_string(),
1802 source: Some("disk".to_string()),
1803 ..Default::default()
1804 },
1805 lsp::Diagnostic {
1806 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1807 severity: Some(DiagnosticSeverity::ERROR),
1808 source: Some("disk".to_string()),
1809 message: "undefined variable 'CCC'".to_string(),
1810 ..Default::default()
1811 },
1812 ],
1813 });
1814
1815 // The diagnostics have moved down since they were created.
1816 cx.executor().run_until_parked();
1817 buffer.update(cx, |buffer, _| {
1818 assert_eq!(
1819 buffer
1820 .snapshot()
1821 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1822 .collect::<Vec<_>>(),
1823 &[
1824 DiagnosticEntry {
1825 range: Point::new(3, 9)..Point::new(3, 11),
1826 diagnostic: Diagnostic {
1827 source: Some("disk".into()),
1828 severity: DiagnosticSeverity::ERROR,
1829 message: "undefined variable 'BB'".to_string(),
1830 is_disk_based: true,
1831 group_id: 1,
1832 is_primary: true,
1833 ..Default::default()
1834 },
1835 },
1836 DiagnosticEntry {
1837 range: Point::new(4, 9)..Point::new(4, 12),
1838 diagnostic: Diagnostic {
1839 source: Some("disk".into()),
1840 severity: DiagnosticSeverity::ERROR,
1841 message: "undefined variable 'CCC'".to_string(),
1842 is_disk_based: true,
1843 group_id: 2,
1844 is_primary: true,
1845 ..Default::default()
1846 }
1847 }
1848 ]
1849 );
1850 assert_eq!(
1851 chunks_with_diagnostics(buffer, 0..buffer.len()),
1852 [
1853 ("\n\nfn a() { ".to_string(), None),
1854 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1855 (" }\nfn b() { ".to_string(), None),
1856 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1857 (" }\nfn c() { ".to_string(), None),
1858 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1859 (" }\n".to_string(), None),
1860 ]
1861 );
1862 assert_eq!(
1863 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
1864 [
1865 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
1866 (" }\nfn c() { ".to_string(), None),
1867 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
1868 ]
1869 );
1870 });
1871
1872 // Ensure overlapping diagnostics are highlighted correctly.
1873 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1874 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1875 version: Some(open_notification.text_document.version),
1876 diagnostics: vec![
1877 lsp::Diagnostic {
1878 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1879 severity: Some(DiagnosticSeverity::ERROR),
1880 message: "undefined variable 'A'".to_string(),
1881 source: Some("disk".to_string()),
1882 ..Default::default()
1883 },
1884 lsp::Diagnostic {
1885 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
1886 severity: Some(DiagnosticSeverity::WARNING),
1887 message: "unreachable statement".to_string(),
1888 source: Some("disk".to_string()),
1889 ..Default::default()
1890 },
1891 ],
1892 });
1893
1894 cx.executor().run_until_parked();
1895 buffer.update(cx, |buffer, _| {
1896 assert_eq!(
1897 buffer
1898 .snapshot()
1899 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
1900 .collect::<Vec<_>>(),
1901 &[
1902 DiagnosticEntry {
1903 range: Point::new(2, 9)..Point::new(2, 12),
1904 diagnostic: Diagnostic {
1905 source: Some("disk".into()),
1906 severity: DiagnosticSeverity::WARNING,
1907 message: "unreachable statement".to_string(),
1908 is_disk_based: true,
1909 group_id: 4,
1910 is_primary: true,
1911 ..Default::default()
1912 }
1913 },
1914 DiagnosticEntry {
1915 range: Point::new(2, 9)..Point::new(2, 10),
1916 diagnostic: Diagnostic {
1917 source: Some("disk".into()),
1918 severity: DiagnosticSeverity::ERROR,
1919 message: "undefined variable 'A'".to_string(),
1920 is_disk_based: true,
1921 group_id: 3,
1922 is_primary: true,
1923 ..Default::default()
1924 },
1925 }
1926 ]
1927 );
1928 assert_eq!(
1929 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
1930 [
1931 ("fn a() { ".to_string(), None),
1932 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1933 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1934 ("\n".to_string(), None),
1935 ]
1936 );
1937 assert_eq!(
1938 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
1939 [
1940 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
1941 ("\n".to_string(), None),
1942 ]
1943 );
1944 });
1945
1946 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
1947 // changes since the last save.
1948 buffer.update(cx, |buffer, cx| {
1949 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
1950 buffer.edit(
1951 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
1952 None,
1953 cx,
1954 );
1955 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
1956 });
1957 let change_notification_2 = fake_server
1958 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1959 .await;
1960 assert!(
1961 change_notification_2.text_document.version > change_notification_1.text_document.version
1962 );
1963
1964 // Handle out-of-order diagnostics
1965 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1966 uri: lsp::Url::from_file_path("/dir/a.rs").unwrap(),
1967 version: Some(change_notification_2.text_document.version),
1968 diagnostics: vec![
1969 lsp::Diagnostic {
1970 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1971 severity: Some(DiagnosticSeverity::ERROR),
1972 message: "undefined variable 'BB'".to_string(),
1973 source: Some("disk".to_string()),
1974 ..Default::default()
1975 },
1976 lsp::Diagnostic {
1977 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1978 severity: Some(DiagnosticSeverity::WARNING),
1979 message: "undefined variable 'A'".to_string(),
1980 source: Some("disk".to_string()),
1981 ..Default::default()
1982 },
1983 ],
1984 });
1985
1986 cx.executor().run_until_parked();
1987 buffer.update(cx, |buffer, _| {
1988 assert_eq!(
1989 buffer
1990 .snapshot()
1991 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1992 .collect::<Vec<_>>(),
1993 &[
1994 DiagnosticEntry {
1995 range: Point::new(2, 21)..Point::new(2, 22),
1996 diagnostic: Diagnostic {
1997 source: Some("disk".into()),
1998 severity: DiagnosticSeverity::WARNING,
1999 message: "undefined variable 'A'".to_string(),
2000 is_disk_based: true,
2001 group_id: 6,
2002 is_primary: true,
2003 ..Default::default()
2004 }
2005 },
2006 DiagnosticEntry {
2007 range: Point::new(3, 9)..Point::new(3, 14),
2008 diagnostic: Diagnostic {
2009 source: Some("disk".into()),
2010 severity: DiagnosticSeverity::ERROR,
2011 message: "undefined variable 'BB'".to_string(),
2012 is_disk_based: true,
2013 group_id: 5,
2014 is_primary: true,
2015 ..Default::default()
2016 },
2017 }
2018 ]
2019 );
2020 });
2021}
2022
2023#[gpui::test]
2024async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2025 init_test(cx);
2026
2027 let text = concat!(
2028 "let one = ;\n", //
2029 "let two = \n",
2030 "let three = 3;\n",
2031 );
2032
2033 let fs = FakeFs::new(cx.executor());
2034 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2035
2036 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2037 let buffer = project
2038 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2039 .await
2040 .unwrap();
2041
2042 project.update(cx, |project, cx| {
2043 project.lsp_store.update(cx, |lsp_store, cx| {
2044 lsp_store
2045 .update_diagnostic_entries(
2046 LanguageServerId(0),
2047 PathBuf::from("/dir/a.rs"),
2048 None,
2049 vec![
2050 DiagnosticEntry {
2051 range: Unclipped(PointUtf16::new(0, 10))
2052 ..Unclipped(PointUtf16::new(0, 10)),
2053 diagnostic: Diagnostic {
2054 severity: DiagnosticSeverity::ERROR,
2055 message: "syntax error 1".to_string(),
2056 ..Default::default()
2057 },
2058 },
2059 DiagnosticEntry {
2060 range: Unclipped(PointUtf16::new(1, 10))
2061 ..Unclipped(PointUtf16::new(1, 10)),
2062 diagnostic: Diagnostic {
2063 severity: DiagnosticSeverity::ERROR,
2064 message: "syntax error 2".to_string(),
2065 ..Default::default()
2066 },
2067 },
2068 ],
2069 cx,
2070 )
2071 .unwrap();
2072 })
2073 });
2074
2075 // An empty range is extended forward to include the following character.
2076 // At the end of a line, an empty range is extended backward to include
2077 // the preceding character.
2078 buffer.update(cx, |buffer, _| {
2079 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2080 assert_eq!(
2081 chunks
2082 .iter()
2083 .map(|(s, d)| (s.as_str(), *d))
2084 .collect::<Vec<_>>(),
2085 &[
2086 ("let one = ", None),
2087 (";", Some(DiagnosticSeverity::ERROR)),
2088 ("\nlet two =", None),
2089 (" ", Some(DiagnosticSeverity::ERROR)),
2090 ("\nlet three = 3;\n", None)
2091 ]
2092 );
2093 });
2094}
2095
2096#[gpui::test]
2097async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2098 init_test(cx);
2099
2100 let fs = FakeFs::new(cx.executor());
2101 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2102 .await;
2103
2104 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2105 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2106
2107 lsp_store.update(cx, |lsp_store, cx| {
2108 lsp_store
2109 .update_diagnostic_entries(
2110 LanguageServerId(0),
2111 Path::new("/dir/a.rs").to_owned(),
2112 None,
2113 vec![DiagnosticEntry {
2114 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2115 diagnostic: Diagnostic {
2116 severity: DiagnosticSeverity::ERROR,
2117 is_primary: true,
2118 message: "syntax error a1".to_string(),
2119 ..Default::default()
2120 },
2121 }],
2122 cx,
2123 )
2124 .unwrap();
2125 lsp_store
2126 .update_diagnostic_entries(
2127 LanguageServerId(1),
2128 Path::new("/dir/a.rs").to_owned(),
2129 None,
2130 vec![DiagnosticEntry {
2131 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2132 diagnostic: Diagnostic {
2133 severity: DiagnosticSeverity::ERROR,
2134 is_primary: true,
2135 message: "syntax error b1".to_string(),
2136 ..Default::default()
2137 },
2138 }],
2139 cx,
2140 )
2141 .unwrap();
2142
2143 assert_eq!(
2144 lsp_store.diagnostic_summary(false, cx),
2145 DiagnosticSummary {
2146 error_count: 2,
2147 warning_count: 0,
2148 }
2149 );
2150 });
2151}
2152
2153#[gpui::test]
2154async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2155 init_test(cx);
2156
2157 let text = "
2158 fn a() {
2159 f1();
2160 }
2161 fn b() {
2162 f2();
2163 }
2164 fn c() {
2165 f3();
2166 }
2167 "
2168 .unindent();
2169
2170 let fs = FakeFs::new(cx.executor());
2171 fs.insert_tree(
2172 "/dir",
2173 json!({
2174 "a.rs": text.clone(),
2175 }),
2176 )
2177 .await;
2178
2179 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2180 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2181
2182 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2183 language_registry.add(rust_lang());
2184 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2185
2186 let (buffer, _handle) = project
2187 .update(cx, |project, cx| {
2188 project.open_local_buffer_with_lsp("/dir/a.rs", cx)
2189 })
2190 .await
2191 .unwrap();
2192
2193 let mut fake_server = fake_servers.next().await.unwrap();
2194 let lsp_document_version = fake_server
2195 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2196 .await
2197 .text_document
2198 .version;
2199
2200 // Simulate editing the buffer after the language server computes some edits.
2201 buffer.update(cx, |buffer, cx| {
2202 buffer.edit(
2203 [(
2204 Point::new(0, 0)..Point::new(0, 0),
2205 "// above first function\n",
2206 )],
2207 None,
2208 cx,
2209 );
2210 buffer.edit(
2211 [(
2212 Point::new(2, 0)..Point::new(2, 0),
2213 " // inside first function\n",
2214 )],
2215 None,
2216 cx,
2217 );
2218 buffer.edit(
2219 [(
2220 Point::new(6, 4)..Point::new(6, 4),
2221 "// inside second function ",
2222 )],
2223 None,
2224 cx,
2225 );
2226
2227 assert_eq!(
2228 buffer.text(),
2229 "
2230 // above first function
2231 fn a() {
2232 // inside first function
2233 f1();
2234 }
2235 fn b() {
2236 // inside second function f2();
2237 }
2238 fn c() {
2239 f3();
2240 }
2241 "
2242 .unindent()
2243 );
2244 });
2245
2246 let edits = lsp_store
2247 .update(cx, |lsp_store, cx| {
2248 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2249 &buffer,
2250 vec![
2251 // replace body of first function
2252 lsp::TextEdit {
2253 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2254 new_text: "
2255 fn a() {
2256 f10();
2257 }
2258 "
2259 .unindent(),
2260 },
2261 // edit inside second function
2262 lsp::TextEdit {
2263 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2264 new_text: "00".into(),
2265 },
2266 // edit inside third function via two distinct edits
2267 lsp::TextEdit {
2268 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2269 new_text: "4000".into(),
2270 },
2271 lsp::TextEdit {
2272 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2273 new_text: "".into(),
2274 },
2275 ],
2276 LanguageServerId(0),
2277 Some(lsp_document_version),
2278 cx,
2279 )
2280 })
2281 .await
2282 .unwrap();
2283
2284 buffer.update(cx, |buffer, cx| {
2285 for (range, new_text) in edits {
2286 buffer.edit([(range, new_text)], None, cx);
2287 }
2288 assert_eq!(
2289 buffer.text(),
2290 "
2291 // above first function
2292 fn a() {
2293 // inside first function
2294 f10();
2295 }
2296 fn b() {
2297 // inside second function f200();
2298 }
2299 fn c() {
2300 f4000();
2301 }
2302 "
2303 .unindent()
2304 );
2305 });
2306}
2307
2308#[gpui::test]
2309async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2310 init_test(cx);
2311
2312 let text = "
2313 use a::b;
2314 use a::c;
2315
2316 fn f() {
2317 b();
2318 c();
2319 }
2320 "
2321 .unindent();
2322
2323 let fs = FakeFs::new(cx.executor());
2324 fs.insert_tree(
2325 "/dir",
2326 json!({
2327 "a.rs": text.clone(),
2328 }),
2329 )
2330 .await;
2331
2332 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2333 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2334 let buffer = project
2335 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2336 .await
2337 .unwrap();
2338
2339 // Simulate the language server sending us a small edit in the form of a very large diff.
2340 // Rust-analyzer does this when performing a merge-imports code action.
2341 let edits = lsp_store
2342 .update(cx, |lsp_store, cx| {
2343 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2344 &buffer,
2345 [
2346 // Replace the first use statement without editing the semicolon.
2347 lsp::TextEdit {
2348 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2349 new_text: "a::{b, c}".into(),
2350 },
2351 // Reinsert the remainder of the file between the semicolon and the final
2352 // newline of the file.
2353 lsp::TextEdit {
2354 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2355 new_text: "\n\n".into(),
2356 },
2357 lsp::TextEdit {
2358 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2359 new_text: "
2360 fn f() {
2361 b();
2362 c();
2363 }"
2364 .unindent(),
2365 },
2366 // Delete everything after the first newline of the file.
2367 lsp::TextEdit {
2368 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2369 new_text: "".into(),
2370 },
2371 ],
2372 LanguageServerId(0),
2373 None,
2374 cx,
2375 )
2376 })
2377 .await
2378 .unwrap();
2379
2380 buffer.update(cx, |buffer, cx| {
2381 let edits = edits
2382 .into_iter()
2383 .map(|(range, text)| {
2384 (
2385 range.start.to_point(buffer)..range.end.to_point(buffer),
2386 text,
2387 )
2388 })
2389 .collect::<Vec<_>>();
2390
2391 assert_eq!(
2392 edits,
2393 [
2394 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2395 (Point::new(1, 0)..Point::new(2, 0), "".into())
2396 ]
2397 );
2398
2399 for (range, new_text) in edits {
2400 buffer.edit([(range, new_text)], None, cx);
2401 }
2402 assert_eq!(
2403 buffer.text(),
2404 "
2405 use a::{b, c};
2406
2407 fn f() {
2408 b();
2409 c();
2410 }
2411 "
2412 .unindent()
2413 );
2414 });
2415}
2416
2417#[gpui::test]
2418async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2419 init_test(cx);
2420
2421 let text = "
2422 use a::b;
2423 use a::c;
2424
2425 fn f() {
2426 b();
2427 c();
2428 }
2429 "
2430 .unindent();
2431
2432 let fs = FakeFs::new(cx.executor());
2433 fs.insert_tree(
2434 "/dir",
2435 json!({
2436 "a.rs": text.clone(),
2437 }),
2438 )
2439 .await;
2440
2441 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2442 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2443 let buffer = project
2444 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2445 .await
2446 .unwrap();
2447
2448 // Simulate the language server sending us edits in a non-ordered fashion,
2449 // with ranges sometimes being inverted or pointing to invalid locations.
2450 let edits = lsp_store
2451 .update(cx, |lsp_store, cx| {
2452 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2453 &buffer,
2454 [
2455 lsp::TextEdit {
2456 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2457 new_text: "\n\n".into(),
2458 },
2459 lsp::TextEdit {
2460 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2461 new_text: "a::{b, c}".into(),
2462 },
2463 lsp::TextEdit {
2464 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2465 new_text: "".into(),
2466 },
2467 lsp::TextEdit {
2468 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2469 new_text: "
2470 fn f() {
2471 b();
2472 c();
2473 }"
2474 .unindent(),
2475 },
2476 ],
2477 LanguageServerId(0),
2478 None,
2479 cx,
2480 )
2481 })
2482 .await
2483 .unwrap();
2484
2485 buffer.update(cx, |buffer, cx| {
2486 let edits = edits
2487 .into_iter()
2488 .map(|(range, text)| {
2489 (
2490 range.start.to_point(buffer)..range.end.to_point(buffer),
2491 text,
2492 )
2493 })
2494 .collect::<Vec<_>>();
2495
2496 assert_eq!(
2497 edits,
2498 [
2499 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2500 (Point::new(1, 0)..Point::new(2, 0), "".into())
2501 ]
2502 );
2503
2504 for (range, new_text) in edits {
2505 buffer.edit([(range, new_text)], None, cx);
2506 }
2507 assert_eq!(
2508 buffer.text(),
2509 "
2510 use a::{b, c};
2511
2512 fn f() {
2513 b();
2514 c();
2515 }
2516 "
2517 .unindent()
2518 );
2519 });
2520}
2521
2522fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2523 buffer: &Buffer,
2524 range: Range<T>,
2525) -> Vec<(String, Option<DiagnosticSeverity>)> {
2526 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2527 for chunk in buffer.snapshot().chunks(range, true) {
2528 if chunks.last().map_or(false, |prev_chunk| {
2529 prev_chunk.1 == chunk.diagnostic_severity
2530 }) {
2531 chunks.last_mut().unwrap().0.push_str(chunk.text);
2532 } else {
2533 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2534 }
2535 }
2536 chunks
2537}
2538
2539#[gpui::test(iterations = 10)]
2540async fn test_definition(cx: &mut gpui::TestAppContext) {
2541 init_test(cx);
2542
2543 let fs = FakeFs::new(cx.executor());
2544 fs.insert_tree(
2545 "/dir",
2546 json!({
2547 "a.rs": "const fn a() { A }",
2548 "b.rs": "const y: i32 = crate::a()",
2549 }),
2550 )
2551 .await;
2552
2553 let project = Project::test(fs, ["/dir/b.rs".as_ref()], cx).await;
2554
2555 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2556 language_registry.add(rust_lang());
2557 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2558
2559 let (buffer, _handle) = project
2560 .update(cx, |project, cx| {
2561 project.open_local_buffer_with_lsp("/dir/b.rs", cx)
2562 })
2563 .await
2564 .unwrap();
2565
2566 let fake_server = fake_servers.next().await.unwrap();
2567 fake_server.handle_request::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2568 let params = params.text_document_position_params;
2569 assert_eq!(
2570 params.text_document.uri.to_file_path().unwrap(),
2571 Path::new("/dir/b.rs"),
2572 );
2573 assert_eq!(params.position, lsp::Position::new(0, 22));
2574
2575 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2576 lsp::Location::new(
2577 lsp::Url::from_file_path("/dir/a.rs").unwrap(),
2578 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2579 ),
2580 )))
2581 });
2582
2583 let mut definitions = project
2584 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2585 .await
2586 .unwrap();
2587
2588 // Assert no new language server started
2589 cx.executor().run_until_parked();
2590 assert!(fake_servers.try_next().is_err());
2591
2592 assert_eq!(definitions.len(), 1);
2593 let definition = definitions.pop().unwrap();
2594 cx.update(|cx| {
2595 let target_buffer = definition.target.buffer.read(cx);
2596 assert_eq!(
2597 target_buffer
2598 .file()
2599 .unwrap()
2600 .as_local()
2601 .unwrap()
2602 .abs_path(cx),
2603 Path::new("/dir/a.rs"),
2604 );
2605 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2606 assert_eq!(
2607 list_worktrees(&project, cx),
2608 [("/dir/a.rs".as_ref(), false), ("/dir/b.rs".as_ref(), true)],
2609 );
2610
2611 drop(definition);
2612 });
2613 cx.update(|cx| {
2614 assert_eq!(list_worktrees(&project, cx), [("/dir/b.rs".as_ref(), true)]);
2615 });
2616
2617 fn list_worktrees<'a>(
2618 project: &'a Model<Project>,
2619 cx: &'a AppContext,
2620 ) -> Vec<(&'a Path, bool)> {
2621 project
2622 .read(cx)
2623 .worktrees(cx)
2624 .map(|worktree| {
2625 let worktree = worktree.read(cx);
2626 (
2627 worktree.as_local().unwrap().abs_path().as_ref(),
2628 worktree.is_visible(),
2629 )
2630 })
2631 .collect::<Vec<_>>()
2632 }
2633}
2634
2635#[gpui::test]
2636async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2637 init_test(cx);
2638
2639 let fs = FakeFs::new(cx.executor());
2640 fs.insert_tree(
2641 "/dir",
2642 json!({
2643 "a.ts": "",
2644 }),
2645 )
2646 .await;
2647
2648 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2649
2650 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2651 language_registry.add(typescript_lang());
2652 let mut fake_language_servers = language_registry.register_fake_lsp(
2653 "TypeScript",
2654 FakeLspAdapter {
2655 capabilities: lsp::ServerCapabilities {
2656 completion_provider: Some(lsp::CompletionOptions {
2657 trigger_characters: Some(vec![":".to_string()]),
2658 ..Default::default()
2659 }),
2660 ..Default::default()
2661 },
2662 ..Default::default()
2663 },
2664 );
2665
2666 let (buffer, _handle) = project
2667 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2668 .await
2669 .unwrap();
2670
2671 let fake_server = fake_language_servers.next().await.unwrap();
2672
2673 let text = "let a = b.fqn";
2674 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2675 let completions = project.update(cx, |project, cx| {
2676 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2677 });
2678
2679 fake_server
2680 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2681 Ok(Some(lsp::CompletionResponse::Array(vec![
2682 lsp::CompletionItem {
2683 label: "fullyQualifiedName?".into(),
2684 insert_text: Some("fullyQualifiedName".into()),
2685 ..Default::default()
2686 },
2687 ])))
2688 })
2689 .next()
2690 .await;
2691 let completions = completions.await.unwrap();
2692 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2693 assert_eq!(completions.len(), 1);
2694 assert_eq!(completions[0].new_text, "fullyQualifiedName");
2695 assert_eq!(
2696 completions[0].old_range.to_offset(&snapshot),
2697 text.len() - 3..text.len()
2698 );
2699
2700 let text = "let a = \"atoms/cmp\"";
2701 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2702 let completions = project.update(cx, |project, cx| {
2703 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
2704 });
2705
2706 fake_server
2707 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2708 Ok(Some(lsp::CompletionResponse::Array(vec![
2709 lsp::CompletionItem {
2710 label: "component".into(),
2711 ..Default::default()
2712 },
2713 ])))
2714 })
2715 .next()
2716 .await;
2717 let completions = completions.await.unwrap();
2718 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2719 assert_eq!(completions.len(), 1);
2720 assert_eq!(completions[0].new_text, "component");
2721 assert_eq!(
2722 completions[0].old_range.to_offset(&snapshot),
2723 text.len() - 4..text.len() - 1
2724 );
2725}
2726
2727#[gpui::test]
2728async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
2729 init_test(cx);
2730
2731 let fs = FakeFs::new(cx.executor());
2732 fs.insert_tree(
2733 "/dir",
2734 json!({
2735 "a.ts": "",
2736 }),
2737 )
2738 .await;
2739
2740 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2741
2742 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2743 language_registry.add(typescript_lang());
2744 let mut fake_language_servers = language_registry.register_fake_lsp(
2745 "TypeScript",
2746 FakeLspAdapter {
2747 capabilities: lsp::ServerCapabilities {
2748 completion_provider: Some(lsp::CompletionOptions {
2749 trigger_characters: Some(vec![":".to_string()]),
2750 ..Default::default()
2751 }),
2752 ..Default::default()
2753 },
2754 ..Default::default()
2755 },
2756 );
2757
2758 let (buffer, _handle) = project
2759 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2760 .await
2761 .unwrap();
2762
2763 let fake_server = fake_language_servers.next().await.unwrap();
2764
2765 let text = "let a = b.fqn";
2766 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2767 let completions = project.update(cx, |project, cx| {
2768 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2769 });
2770
2771 fake_server
2772 .handle_request::<lsp::request::Completion, _, _>(|_, _| async move {
2773 Ok(Some(lsp::CompletionResponse::Array(vec![
2774 lsp::CompletionItem {
2775 label: "fullyQualifiedName?".into(),
2776 insert_text: Some("fully\rQualified\r\nName".into()),
2777 ..Default::default()
2778 },
2779 ])))
2780 })
2781 .next()
2782 .await;
2783 let completions = completions.await.unwrap();
2784 assert_eq!(completions.len(), 1);
2785 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
2786}
2787
2788#[gpui::test(iterations = 10)]
2789async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
2790 init_test(cx);
2791
2792 let fs = FakeFs::new(cx.executor());
2793 fs.insert_tree(
2794 "/dir",
2795 json!({
2796 "a.ts": "a",
2797 }),
2798 )
2799 .await;
2800
2801 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2802
2803 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2804 language_registry.add(typescript_lang());
2805 let mut fake_language_servers = language_registry.register_fake_lsp(
2806 "TypeScript",
2807 FakeLspAdapter {
2808 capabilities: lsp::ServerCapabilities {
2809 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
2810 lsp::CodeActionOptions {
2811 resolve_provider: Some(true),
2812 ..lsp::CodeActionOptions::default()
2813 },
2814 )),
2815 ..lsp::ServerCapabilities::default()
2816 },
2817 ..FakeLspAdapter::default()
2818 },
2819 );
2820
2821 let (buffer, _handle) = project
2822 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
2823 .await
2824 .unwrap();
2825
2826 let fake_server = fake_language_servers.next().await.unwrap();
2827
2828 // Language server returns code actions that contain commands, and not edits.
2829 let actions = project.update(cx, |project, cx| {
2830 project.code_actions(&buffer, 0..0, None, cx)
2831 });
2832 fake_server
2833 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
2834 Ok(Some(vec![
2835 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2836 title: "The code action".into(),
2837 data: Some(serde_json::json!({
2838 "command": "_the/command",
2839 })),
2840 ..lsp::CodeAction::default()
2841 }),
2842 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
2843 title: "two".into(),
2844 ..lsp::CodeAction::default()
2845 }),
2846 ]))
2847 })
2848 .next()
2849 .await;
2850
2851 let action = actions.await.unwrap()[0].clone();
2852 let apply = project.update(cx, |project, cx| {
2853 project.apply_code_action(buffer.clone(), action, true, cx)
2854 });
2855
2856 // Resolving the code action does not populate its edits. In absence of
2857 // edits, we must execute the given command.
2858 fake_server.handle_request::<lsp::request::CodeActionResolveRequest, _, _>(
2859 |mut action, _| async move {
2860 if action.data.is_some() {
2861 action.command = Some(lsp::Command {
2862 title: "The command".into(),
2863 command: "_the/command".into(),
2864 arguments: Some(vec![json!("the-argument")]),
2865 });
2866 }
2867 Ok(action)
2868 },
2869 );
2870
2871 // While executing the command, the language server sends the editor
2872 // a `workspaceEdit` request.
2873 fake_server
2874 .handle_request::<lsp::request::ExecuteCommand, _, _>({
2875 let fake = fake_server.clone();
2876 move |params, _| {
2877 assert_eq!(params.command, "_the/command");
2878 let fake = fake.clone();
2879 async move {
2880 fake.server
2881 .request::<lsp::request::ApplyWorkspaceEdit>(
2882 lsp::ApplyWorkspaceEditParams {
2883 label: None,
2884 edit: lsp::WorkspaceEdit {
2885 changes: Some(
2886 [(
2887 lsp::Url::from_file_path("/dir/a.ts").unwrap(),
2888 vec![lsp::TextEdit {
2889 range: lsp::Range::new(
2890 lsp::Position::new(0, 0),
2891 lsp::Position::new(0, 0),
2892 ),
2893 new_text: "X".into(),
2894 }],
2895 )]
2896 .into_iter()
2897 .collect(),
2898 ),
2899 ..Default::default()
2900 },
2901 },
2902 )
2903 .await
2904 .unwrap();
2905 Ok(Some(json!(null)))
2906 }
2907 }
2908 })
2909 .next()
2910 .await;
2911
2912 // Applying the code action returns a project transaction containing the edits
2913 // sent by the language server in its `workspaceEdit` request.
2914 let transaction = apply.await.unwrap();
2915 assert!(transaction.0.contains_key(&buffer));
2916 buffer.update(cx, |buffer, cx| {
2917 assert_eq!(buffer.text(), "Xa");
2918 buffer.undo(cx);
2919 assert_eq!(buffer.text(), "a");
2920 });
2921}
2922
2923#[gpui::test(iterations = 10)]
2924async fn test_save_file(cx: &mut gpui::TestAppContext) {
2925 init_test(cx);
2926
2927 let fs = FakeFs::new(cx.executor());
2928 fs.insert_tree(
2929 "/dir",
2930 json!({
2931 "file1": "the old contents",
2932 }),
2933 )
2934 .await;
2935
2936 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2937 let buffer = project
2938 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2939 .await
2940 .unwrap();
2941 buffer.update(cx, |buffer, cx| {
2942 assert_eq!(buffer.text(), "the old contents");
2943 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
2944 });
2945
2946 project
2947 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
2948 .await
2949 .unwrap();
2950
2951 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
2952 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
2953}
2954
2955#[gpui::test(iterations = 30)]
2956async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
2957 init_test(cx);
2958
2959 let fs = FakeFs::new(cx.executor().clone());
2960 fs.insert_tree(
2961 "/dir",
2962 json!({
2963 "file1": "the original contents",
2964 }),
2965 )
2966 .await;
2967
2968 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
2969 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
2970 let buffer = project
2971 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
2972 .await
2973 .unwrap();
2974
2975 // Simulate buffer diffs being slow, so that they don't complete before
2976 // the next file change occurs.
2977 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
2978
2979 // Change the buffer's file on disk, and then wait for the file change
2980 // to be detected by the worktree, so that the buffer starts reloading.
2981 fs.save(
2982 "/dir/file1".as_ref(),
2983 &"the first contents".into(),
2984 Default::default(),
2985 )
2986 .await
2987 .unwrap();
2988 worktree.next_event(cx).await;
2989
2990 // Change the buffer's file again. Depending on the random seed, the
2991 // previous file change may still be in progress.
2992 fs.save(
2993 "/dir/file1".as_ref(),
2994 &"the second contents".into(),
2995 Default::default(),
2996 )
2997 .await
2998 .unwrap();
2999 worktree.next_event(cx).await;
3000
3001 cx.executor().run_until_parked();
3002 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3003 buffer.read_with(cx, |buffer, _| {
3004 assert_eq!(buffer.text(), on_disk_text);
3005 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3006 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3007 });
3008}
3009
3010#[gpui::test(iterations = 30)]
3011async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3012 init_test(cx);
3013
3014 let fs = FakeFs::new(cx.executor().clone());
3015 fs.insert_tree(
3016 "/dir",
3017 json!({
3018 "file1": "the original contents",
3019 }),
3020 )
3021 .await;
3022
3023 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3024 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3025 let buffer = project
3026 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3027 .await
3028 .unwrap();
3029
3030 // Simulate buffer diffs being slow, so that they don't complete before
3031 // the next file change occurs.
3032 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3033
3034 // Change the buffer's file on disk, and then wait for the file change
3035 // to be detected by the worktree, so that the buffer starts reloading.
3036 fs.save(
3037 "/dir/file1".as_ref(),
3038 &"the first contents".into(),
3039 Default::default(),
3040 )
3041 .await
3042 .unwrap();
3043 worktree.next_event(cx).await;
3044
3045 cx.executor()
3046 .spawn(cx.executor().simulate_random_delay())
3047 .await;
3048
3049 // Perform a noop edit, causing the buffer's version to increase.
3050 buffer.update(cx, |buffer, cx| {
3051 buffer.edit([(0..0, " ")], None, cx);
3052 buffer.undo(cx);
3053 });
3054
3055 cx.executor().run_until_parked();
3056 let on_disk_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3057 buffer.read_with(cx, |buffer, _| {
3058 let buffer_text = buffer.text();
3059 if buffer_text == on_disk_text {
3060 assert!(
3061 !buffer.is_dirty() && !buffer.has_conflict(),
3062 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3063 );
3064 }
3065 // If the file change occurred while the buffer was processing the first
3066 // change, the buffer will be in a conflicting state.
3067 else {
3068 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3069 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3070 }
3071 });
3072}
3073
3074#[gpui::test]
3075async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3076 init_test(cx);
3077
3078 let fs = FakeFs::new(cx.executor());
3079 fs.insert_tree(
3080 "/dir",
3081 json!({
3082 "file1": "the old contents",
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs.clone(), ["/dir/file1".as_ref()], cx).await;
3088 let buffer = project
3089 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3090 .await
3091 .unwrap();
3092 buffer.update(cx, |buffer, cx| {
3093 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3094 });
3095
3096 project
3097 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3098 .await
3099 .unwrap();
3100
3101 let new_text = fs.load(Path::new("/dir/file1")).await.unwrap();
3102 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3103}
3104
3105#[gpui::test]
3106async fn test_save_as(cx: &mut gpui::TestAppContext) {
3107 init_test(cx);
3108
3109 let fs = FakeFs::new(cx.executor());
3110 fs.insert_tree("/dir", json!({})).await;
3111
3112 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3113
3114 let languages = project.update(cx, |project, _| project.languages().clone());
3115 languages.add(rust_lang());
3116
3117 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3118 buffer.update(cx, |buffer, cx| {
3119 buffer.edit([(0..0, "abc")], None, cx);
3120 assert!(buffer.is_dirty());
3121 assert!(!buffer.has_conflict());
3122 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3123 });
3124 project
3125 .update(cx, |project, cx| {
3126 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3127 let path = ProjectPath {
3128 worktree_id,
3129 path: Arc::from(Path::new("file1.rs")),
3130 };
3131 project.save_buffer_as(buffer.clone(), path, cx)
3132 })
3133 .await
3134 .unwrap();
3135 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3136
3137 cx.executor().run_until_parked();
3138 buffer.update(cx, |buffer, cx| {
3139 assert_eq!(
3140 buffer.file().unwrap().full_path(cx),
3141 Path::new("dir/file1.rs")
3142 );
3143 assert!(!buffer.is_dirty());
3144 assert!(!buffer.has_conflict());
3145 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3146 });
3147
3148 let opened_buffer = project
3149 .update(cx, |project, cx| {
3150 project.open_local_buffer("/dir/file1.rs", cx)
3151 })
3152 .await
3153 .unwrap();
3154 assert_eq!(opened_buffer, buffer);
3155}
3156
3157#[gpui::test(retries = 5)]
3158async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3159 use worktree::WorktreeModelHandle as _;
3160
3161 init_test(cx);
3162 cx.executor().allow_parking();
3163
3164 let dir = temp_tree(json!({
3165 "a": {
3166 "file1": "",
3167 "file2": "",
3168 "file3": "",
3169 },
3170 "b": {
3171 "c": {
3172 "file4": "",
3173 "file5": "",
3174 }
3175 }
3176 }));
3177
3178 let project = Project::test(Arc::new(RealFs::default()), [dir.path()], cx).await;
3179
3180 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3181 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3182 async move { buffer.await.unwrap() }
3183 };
3184 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3185 project.update(cx, |project, cx| {
3186 let tree = project.worktrees(cx).next().unwrap();
3187 tree.read(cx)
3188 .entry_for_path(path)
3189 .unwrap_or_else(|| panic!("no entry for path {}", path))
3190 .id
3191 })
3192 };
3193
3194 let buffer2 = buffer_for_path("a/file2", cx).await;
3195 let buffer3 = buffer_for_path("a/file3", cx).await;
3196 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3197 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3198
3199 let file2_id = id_for_path("a/file2", cx);
3200 let file3_id = id_for_path("a/file3", cx);
3201 let file4_id = id_for_path("b/c/file4", cx);
3202
3203 // Create a remote copy of this worktree.
3204 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3205 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3206
3207 let updates = Arc::new(Mutex::new(Vec::new()));
3208 tree.update(cx, |tree, cx| {
3209 let updates = updates.clone();
3210 tree.observe_updates(0, cx, move |update| {
3211 updates.lock().push(update);
3212 async { true }
3213 });
3214 });
3215
3216 let remote =
3217 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3218
3219 cx.executor().run_until_parked();
3220
3221 cx.update(|cx| {
3222 assert!(!buffer2.read(cx).is_dirty());
3223 assert!(!buffer3.read(cx).is_dirty());
3224 assert!(!buffer4.read(cx).is_dirty());
3225 assert!(!buffer5.read(cx).is_dirty());
3226 });
3227
3228 // Rename and delete files and directories.
3229 tree.flush_fs_events(cx).await;
3230 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3231 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3232 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3233 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3234 tree.flush_fs_events(cx).await;
3235
3236 let expected_paths = vec![
3237 "a",
3238 "a/file1",
3239 "a/file2.new",
3240 "b",
3241 "d",
3242 "d/file3",
3243 "d/file4",
3244 ];
3245
3246 cx.update(|app| {
3247 assert_eq!(
3248 tree.read(app)
3249 .paths()
3250 .map(|p| p.to_str().unwrap())
3251 .collect::<Vec<_>>(),
3252 expected_paths
3253 );
3254 });
3255
3256 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3257 assert_eq!(id_for_path("d/file3", cx), file3_id);
3258 assert_eq!(id_for_path("d/file4", cx), file4_id);
3259
3260 cx.update(|cx| {
3261 assert_eq!(
3262 buffer2.read(cx).file().unwrap().path().as_ref(),
3263 Path::new("a/file2.new")
3264 );
3265 assert_eq!(
3266 buffer3.read(cx).file().unwrap().path().as_ref(),
3267 Path::new("d/file3")
3268 );
3269 assert_eq!(
3270 buffer4.read(cx).file().unwrap().path().as_ref(),
3271 Path::new("d/file4")
3272 );
3273 assert_eq!(
3274 buffer5.read(cx).file().unwrap().path().as_ref(),
3275 Path::new("b/c/file5")
3276 );
3277
3278 assert_matches!(
3279 buffer2.read(cx).file().unwrap().disk_state(),
3280 DiskState::Present { .. }
3281 );
3282 assert_matches!(
3283 buffer3.read(cx).file().unwrap().disk_state(),
3284 DiskState::Present { .. }
3285 );
3286 assert_matches!(
3287 buffer4.read(cx).file().unwrap().disk_state(),
3288 DiskState::Present { .. }
3289 );
3290 assert_eq!(
3291 buffer5.read(cx).file().unwrap().disk_state(),
3292 DiskState::Deleted
3293 );
3294 });
3295
3296 // Update the remote worktree. Check that it becomes consistent with the
3297 // local worktree.
3298 cx.executor().run_until_parked();
3299
3300 remote.update(cx, |remote, _| {
3301 for update in updates.lock().drain(..) {
3302 remote.as_remote_mut().unwrap().update_from_remote(update);
3303 }
3304 });
3305 cx.executor().run_until_parked();
3306 remote.update(cx, |remote, _| {
3307 assert_eq!(
3308 remote
3309 .paths()
3310 .map(|p| p.to_str().unwrap())
3311 .collect::<Vec<_>>(),
3312 expected_paths
3313 );
3314 });
3315}
3316
3317#[gpui::test(iterations = 10)]
3318async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3319 init_test(cx);
3320
3321 let fs = FakeFs::new(cx.executor());
3322 fs.insert_tree(
3323 "/dir",
3324 json!({
3325 "a": {
3326 "file1": "",
3327 }
3328 }),
3329 )
3330 .await;
3331
3332 let project = Project::test(fs, [Path::new("/dir")], cx).await;
3333 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3334 let tree_id = tree.update(cx, |tree, _| tree.id());
3335
3336 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3337 project.update(cx, |project, cx| {
3338 let tree = project.worktrees(cx).next().unwrap();
3339 tree.read(cx)
3340 .entry_for_path(path)
3341 .unwrap_or_else(|| panic!("no entry for path {}", path))
3342 .id
3343 })
3344 };
3345
3346 let dir_id = id_for_path("a", cx);
3347 let file_id = id_for_path("a/file1", cx);
3348 let buffer = project
3349 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3350 .await
3351 .unwrap();
3352 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3353
3354 project
3355 .update(cx, |project, cx| {
3356 project.rename_entry(dir_id, Path::new("b"), cx)
3357 })
3358 .unwrap()
3359 .await
3360 .to_included()
3361 .unwrap();
3362 cx.executor().run_until_parked();
3363
3364 assert_eq!(id_for_path("b", cx), dir_id);
3365 assert_eq!(id_for_path("b/file1", cx), file_id);
3366 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3367}
3368
3369#[gpui::test]
3370async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3371 init_test(cx);
3372
3373 let fs = FakeFs::new(cx.executor());
3374 fs.insert_tree(
3375 "/dir",
3376 json!({
3377 "a.txt": "a-contents",
3378 "b.txt": "b-contents",
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3384
3385 // Spawn multiple tasks to open paths, repeating some paths.
3386 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3387 (
3388 p.open_local_buffer("/dir/a.txt", cx),
3389 p.open_local_buffer("/dir/b.txt", cx),
3390 p.open_local_buffer("/dir/a.txt", cx),
3391 )
3392 });
3393
3394 let buffer_a_1 = buffer_a_1.await.unwrap();
3395 let buffer_a_2 = buffer_a_2.await.unwrap();
3396 let buffer_b = buffer_b.await.unwrap();
3397 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3398 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3399
3400 // There is only one buffer per path.
3401 let buffer_a_id = buffer_a_1.entity_id();
3402 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3403
3404 // Open the same path again while it is still open.
3405 drop(buffer_a_1);
3406 let buffer_a_3 = project
3407 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3408 .await
3409 .unwrap();
3410
3411 // There's still only one buffer per path.
3412 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3413}
3414
3415#[gpui::test]
3416async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3417 init_test(cx);
3418
3419 let fs = FakeFs::new(cx.executor());
3420 fs.insert_tree(
3421 "/dir",
3422 json!({
3423 "file1": "abc",
3424 "file2": "def",
3425 "file3": "ghi",
3426 }),
3427 )
3428 .await;
3429
3430 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3431
3432 let buffer1 = project
3433 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3434 .await
3435 .unwrap();
3436 let events = Arc::new(Mutex::new(Vec::new()));
3437
3438 // initially, the buffer isn't dirty.
3439 buffer1.update(cx, |buffer, cx| {
3440 cx.subscribe(&buffer1, {
3441 let events = events.clone();
3442 move |_, _, event, _| match event {
3443 BufferEvent::Operation { .. } => {}
3444 _ => events.lock().push(event.clone()),
3445 }
3446 })
3447 .detach();
3448
3449 assert!(!buffer.is_dirty());
3450 assert!(events.lock().is_empty());
3451
3452 buffer.edit([(1..2, "")], None, cx);
3453 });
3454
3455 // after the first edit, the buffer is dirty, and emits a dirtied event.
3456 buffer1.update(cx, |buffer, cx| {
3457 assert!(buffer.text() == "ac");
3458 assert!(buffer.is_dirty());
3459 assert_eq!(
3460 *events.lock(),
3461 &[
3462 language::BufferEvent::Edited,
3463 language::BufferEvent::DirtyChanged
3464 ]
3465 );
3466 events.lock().clear();
3467 buffer.did_save(
3468 buffer.version(),
3469 buffer.file().unwrap().disk_state().mtime(),
3470 cx,
3471 );
3472 });
3473
3474 // after saving, the buffer is not dirty, and emits a saved event.
3475 buffer1.update(cx, |buffer, cx| {
3476 assert!(!buffer.is_dirty());
3477 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3478 events.lock().clear();
3479
3480 buffer.edit([(1..1, "B")], None, cx);
3481 buffer.edit([(2..2, "D")], None, cx);
3482 });
3483
3484 // after editing again, the buffer is dirty, and emits another dirty event.
3485 buffer1.update(cx, |buffer, cx| {
3486 assert!(buffer.text() == "aBDc");
3487 assert!(buffer.is_dirty());
3488 assert_eq!(
3489 *events.lock(),
3490 &[
3491 language::BufferEvent::Edited,
3492 language::BufferEvent::DirtyChanged,
3493 language::BufferEvent::Edited,
3494 ],
3495 );
3496 events.lock().clear();
3497
3498 // After restoring the buffer to its previously-saved state,
3499 // the buffer is not considered dirty anymore.
3500 buffer.edit([(1..3, "")], None, cx);
3501 assert!(buffer.text() == "ac");
3502 assert!(!buffer.is_dirty());
3503 });
3504
3505 assert_eq!(
3506 *events.lock(),
3507 &[
3508 language::BufferEvent::Edited,
3509 language::BufferEvent::DirtyChanged
3510 ]
3511 );
3512
3513 // When a file is deleted, the buffer is considered dirty.
3514 let events = Arc::new(Mutex::new(Vec::new()));
3515 let buffer2 = project
3516 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3517 .await
3518 .unwrap();
3519 buffer2.update(cx, |_, cx| {
3520 cx.subscribe(&buffer2, {
3521 let events = events.clone();
3522 move |_, _, event, _| events.lock().push(event.clone())
3523 })
3524 .detach();
3525 });
3526
3527 fs.remove_file("/dir/file2".as_ref(), Default::default())
3528 .await
3529 .unwrap();
3530 cx.executor().run_until_parked();
3531 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3532 assert_eq!(
3533 *events.lock(),
3534 &[
3535 language::BufferEvent::DirtyChanged,
3536 language::BufferEvent::FileHandleChanged
3537 ]
3538 );
3539
3540 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3541 let events = Arc::new(Mutex::new(Vec::new()));
3542 let buffer3 = project
3543 .update(cx, |p, cx| p.open_local_buffer("/dir/file3", cx))
3544 .await
3545 .unwrap();
3546 buffer3.update(cx, |_, cx| {
3547 cx.subscribe(&buffer3, {
3548 let events = events.clone();
3549 move |_, _, event, _| events.lock().push(event.clone())
3550 })
3551 .detach();
3552 });
3553
3554 buffer3.update(cx, |buffer, cx| {
3555 buffer.edit([(0..0, "x")], None, cx);
3556 });
3557 events.lock().clear();
3558 fs.remove_file("/dir/file3".as_ref(), Default::default())
3559 .await
3560 .unwrap();
3561 cx.executor().run_until_parked();
3562 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3563 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3564}
3565
3566#[gpui::test]
3567async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3568 init_test(cx);
3569
3570 let initial_contents = "aaa\nbbbbb\nc\n";
3571 let fs = FakeFs::new(cx.executor());
3572 fs.insert_tree(
3573 "/dir",
3574 json!({
3575 "the-file": initial_contents,
3576 }),
3577 )
3578 .await;
3579 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3580 let buffer = project
3581 .update(cx, |p, cx| p.open_local_buffer("/dir/the-file", cx))
3582 .await
3583 .unwrap();
3584
3585 let anchors = (0..3)
3586 .map(|row| buffer.update(cx, |b, _| b.anchor_before(Point::new(row, 1))))
3587 .collect::<Vec<_>>();
3588
3589 // Change the file on disk, adding two new lines of text, and removing
3590 // one line.
3591 buffer.update(cx, |buffer, _| {
3592 assert!(!buffer.is_dirty());
3593 assert!(!buffer.has_conflict());
3594 });
3595 let new_contents = "AAAA\naaa\nBB\nbbbbb\n";
3596 fs.save(
3597 "/dir/the-file".as_ref(),
3598 &new_contents.into(),
3599 LineEnding::Unix,
3600 )
3601 .await
3602 .unwrap();
3603
3604 // Because the buffer was not modified, it is reloaded from disk. Its
3605 // contents are edited according to the diff between the old and new
3606 // file contents.
3607 cx.executor().run_until_parked();
3608 buffer.update(cx, |buffer, _| {
3609 assert_eq!(buffer.text(), new_contents);
3610 assert!(!buffer.is_dirty());
3611 assert!(!buffer.has_conflict());
3612
3613 let anchor_positions = anchors
3614 .iter()
3615 .map(|anchor| anchor.to_point(&*buffer))
3616 .collect::<Vec<_>>();
3617 assert_eq!(
3618 anchor_positions,
3619 [Point::new(1, 1), Point::new(3, 1), Point::new(3, 5)]
3620 );
3621 });
3622
3623 // Modify the buffer
3624 buffer.update(cx, |buffer, cx| {
3625 buffer.edit([(0..0, " ")], None, cx);
3626 assert!(buffer.is_dirty());
3627 assert!(!buffer.has_conflict());
3628 });
3629
3630 // Change the file on disk again, adding blank lines to the beginning.
3631 fs.save(
3632 "/dir/the-file".as_ref(),
3633 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
3634 LineEnding::Unix,
3635 )
3636 .await
3637 .unwrap();
3638
3639 // Because the buffer is modified, it doesn't reload from disk, but is
3640 // marked as having a conflict.
3641 cx.executor().run_until_parked();
3642 buffer.update(cx, |buffer, _| {
3643 assert!(buffer.has_conflict());
3644 });
3645}
3646
3647#[gpui::test]
3648async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
3649 init_test(cx);
3650
3651 let fs = FakeFs::new(cx.executor());
3652 fs.insert_tree(
3653 "/dir",
3654 json!({
3655 "file1": "a\nb\nc\n",
3656 "file2": "one\r\ntwo\r\nthree\r\n",
3657 }),
3658 )
3659 .await;
3660
3661 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3662 let buffer1 = project
3663 .update(cx, |p, cx| p.open_local_buffer("/dir/file1", cx))
3664 .await
3665 .unwrap();
3666 let buffer2 = project
3667 .update(cx, |p, cx| p.open_local_buffer("/dir/file2", cx))
3668 .await
3669 .unwrap();
3670
3671 buffer1.update(cx, |buffer, _| {
3672 assert_eq!(buffer.text(), "a\nb\nc\n");
3673 assert_eq!(buffer.line_ending(), LineEnding::Unix);
3674 });
3675 buffer2.update(cx, |buffer, _| {
3676 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
3677 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3678 });
3679
3680 // Change a file's line endings on disk from unix to windows. The buffer's
3681 // state updates correctly.
3682 fs.save(
3683 "/dir/file1".as_ref(),
3684 &"aaa\nb\nc\n".into(),
3685 LineEnding::Windows,
3686 )
3687 .await
3688 .unwrap();
3689 cx.executor().run_until_parked();
3690 buffer1.update(cx, |buffer, _| {
3691 assert_eq!(buffer.text(), "aaa\nb\nc\n");
3692 assert_eq!(buffer.line_ending(), LineEnding::Windows);
3693 });
3694
3695 // Save a file with windows line endings. The file is written correctly.
3696 buffer2.update(cx, |buffer, cx| {
3697 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
3698 });
3699 project
3700 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
3701 .await
3702 .unwrap();
3703 assert_eq!(
3704 fs.load("/dir/file2".as_ref()).await.unwrap(),
3705 "one\r\ntwo\r\nthree\r\nfour\r\n",
3706 );
3707}
3708
3709#[gpui::test]
3710async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
3711 init_test(cx);
3712
3713 let fs = FakeFs::new(cx.executor());
3714 fs.insert_tree(
3715 "/the-dir",
3716 json!({
3717 "a.rs": "
3718 fn foo(mut v: Vec<usize>) {
3719 for x in &v {
3720 v.push(1);
3721 }
3722 }
3723 "
3724 .unindent(),
3725 }),
3726 )
3727 .await;
3728
3729 let project = Project::test(fs.clone(), ["/the-dir".as_ref()], cx).await;
3730 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3731 let buffer = project
3732 .update(cx, |p, cx| p.open_local_buffer("/the-dir/a.rs", cx))
3733 .await
3734 .unwrap();
3735
3736 let buffer_uri = Url::from_file_path("/the-dir/a.rs").unwrap();
3737 let message = lsp::PublishDiagnosticsParams {
3738 uri: buffer_uri.clone(),
3739 diagnostics: vec![
3740 lsp::Diagnostic {
3741 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3742 severity: Some(DiagnosticSeverity::WARNING),
3743 message: "error 1".to_string(),
3744 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3745 location: lsp::Location {
3746 uri: buffer_uri.clone(),
3747 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3748 },
3749 message: "error 1 hint 1".to_string(),
3750 }]),
3751 ..Default::default()
3752 },
3753 lsp::Diagnostic {
3754 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3755 severity: Some(DiagnosticSeverity::HINT),
3756 message: "error 1 hint 1".to_string(),
3757 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3758 location: lsp::Location {
3759 uri: buffer_uri.clone(),
3760 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
3761 },
3762 message: "original diagnostic".to_string(),
3763 }]),
3764 ..Default::default()
3765 },
3766 lsp::Diagnostic {
3767 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3768 severity: Some(DiagnosticSeverity::ERROR),
3769 message: "error 2".to_string(),
3770 related_information: Some(vec![
3771 lsp::DiagnosticRelatedInformation {
3772 location: lsp::Location {
3773 uri: buffer_uri.clone(),
3774 range: lsp::Range::new(
3775 lsp::Position::new(1, 13),
3776 lsp::Position::new(1, 15),
3777 ),
3778 },
3779 message: "error 2 hint 1".to_string(),
3780 },
3781 lsp::DiagnosticRelatedInformation {
3782 location: lsp::Location {
3783 uri: buffer_uri.clone(),
3784 range: lsp::Range::new(
3785 lsp::Position::new(1, 13),
3786 lsp::Position::new(1, 15),
3787 ),
3788 },
3789 message: "error 2 hint 2".to_string(),
3790 },
3791 ]),
3792 ..Default::default()
3793 },
3794 lsp::Diagnostic {
3795 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3796 severity: Some(DiagnosticSeverity::HINT),
3797 message: "error 2 hint 1".to_string(),
3798 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3799 location: lsp::Location {
3800 uri: buffer_uri.clone(),
3801 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3802 },
3803 message: "original diagnostic".to_string(),
3804 }]),
3805 ..Default::default()
3806 },
3807 lsp::Diagnostic {
3808 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
3809 severity: Some(DiagnosticSeverity::HINT),
3810 message: "error 2 hint 2".to_string(),
3811 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
3812 location: lsp::Location {
3813 uri: buffer_uri,
3814 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
3815 },
3816 message: "original diagnostic".to_string(),
3817 }]),
3818 ..Default::default()
3819 },
3820 ],
3821 version: None,
3822 };
3823
3824 lsp_store
3825 .update(cx, |lsp_store, cx| {
3826 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
3827 })
3828 .unwrap();
3829 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
3830
3831 assert_eq!(
3832 buffer
3833 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
3834 .collect::<Vec<_>>(),
3835 &[
3836 DiagnosticEntry {
3837 range: Point::new(1, 8)..Point::new(1, 9),
3838 diagnostic: Diagnostic {
3839 severity: DiagnosticSeverity::WARNING,
3840 message: "error 1".to_string(),
3841 group_id: 1,
3842 is_primary: true,
3843 ..Default::default()
3844 }
3845 },
3846 DiagnosticEntry {
3847 range: Point::new(1, 8)..Point::new(1, 9),
3848 diagnostic: Diagnostic {
3849 severity: DiagnosticSeverity::HINT,
3850 message: "error 1 hint 1".to_string(),
3851 group_id: 1,
3852 is_primary: false,
3853 ..Default::default()
3854 }
3855 },
3856 DiagnosticEntry {
3857 range: Point::new(1, 13)..Point::new(1, 15),
3858 diagnostic: Diagnostic {
3859 severity: DiagnosticSeverity::HINT,
3860 message: "error 2 hint 1".to_string(),
3861 group_id: 0,
3862 is_primary: false,
3863 ..Default::default()
3864 }
3865 },
3866 DiagnosticEntry {
3867 range: Point::new(1, 13)..Point::new(1, 15),
3868 diagnostic: Diagnostic {
3869 severity: DiagnosticSeverity::HINT,
3870 message: "error 2 hint 2".to_string(),
3871 group_id: 0,
3872 is_primary: false,
3873 ..Default::default()
3874 }
3875 },
3876 DiagnosticEntry {
3877 range: Point::new(2, 8)..Point::new(2, 17),
3878 diagnostic: Diagnostic {
3879 severity: DiagnosticSeverity::ERROR,
3880 message: "error 2".to_string(),
3881 group_id: 0,
3882 is_primary: true,
3883 ..Default::default()
3884 }
3885 }
3886 ]
3887 );
3888
3889 assert_eq!(
3890 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
3891 &[
3892 DiagnosticEntry {
3893 range: Point::new(1, 13)..Point::new(1, 15),
3894 diagnostic: Diagnostic {
3895 severity: DiagnosticSeverity::HINT,
3896 message: "error 2 hint 1".to_string(),
3897 group_id: 0,
3898 is_primary: false,
3899 ..Default::default()
3900 }
3901 },
3902 DiagnosticEntry {
3903 range: Point::new(1, 13)..Point::new(1, 15),
3904 diagnostic: Diagnostic {
3905 severity: DiagnosticSeverity::HINT,
3906 message: "error 2 hint 2".to_string(),
3907 group_id: 0,
3908 is_primary: false,
3909 ..Default::default()
3910 }
3911 },
3912 DiagnosticEntry {
3913 range: Point::new(2, 8)..Point::new(2, 17),
3914 diagnostic: Diagnostic {
3915 severity: DiagnosticSeverity::ERROR,
3916 message: "error 2".to_string(),
3917 group_id: 0,
3918 is_primary: true,
3919 ..Default::default()
3920 }
3921 }
3922 ]
3923 );
3924
3925 assert_eq!(
3926 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
3927 &[
3928 DiagnosticEntry {
3929 range: Point::new(1, 8)..Point::new(1, 9),
3930 diagnostic: Diagnostic {
3931 severity: DiagnosticSeverity::WARNING,
3932 message: "error 1".to_string(),
3933 group_id: 1,
3934 is_primary: true,
3935 ..Default::default()
3936 }
3937 },
3938 DiagnosticEntry {
3939 range: Point::new(1, 8)..Point::new(1, 9),
3940 diagnostic: Diagnostic {
3941 severity: DiagnosticSeverity::HINT,
3942 message: "error 1 hint 1".to_string(),
3943 group_id: 1,
3944 is_primary: false,
3945 ..Default::default()
3946 }
3947 },
3948 ]
3949 );
3950}
3951
3952#[gpui::test]
3953async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
3954 init_test(cx);
3955
3956 let fs = FakeFs::new(cx.executor());
3957 fs.insert_tree(
3958 "/dir",
3959 json!({
3960 "one.rs": "const ONE: usize = 1;",
3961 "two": {
3962 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
3963 }
3964
3965 }),
3966 )
3967 .await;
3968 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3969
3970 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3971 language_registry.add(rust_lang());
3972 let watched_paths = lsp::FileOperationRegistrationOptions {
3973 filters: vec![
3974 FileOperationFilter {
3975 scheme: Some("file".to_owned()),
3976 pattern: lsp::FileOperationPattern {
3977 glob: "**/*.rs".to_owned(),
3978 matches: Some(lsp::FileOperationPatternKind::File),
3979 options: None,
3980 },
3981 },
3982 FileOperationFilter {
3983 scheme: Some("file".to_owned()),
3984 pattern: lsp::FileOperationPattern {
3985 glob: "**/**".to_owned(),
3986 matches: Some(lsp::FileOperationPatternKind::Folder),
3987 options: None,
3988 },
3989 },
3990 ],
3991 };
3992 let mut fake_servers = language_registry.register_fake_lsp(
3993 "Rust",
3994 FakeLspAdapter {
3995 capabilities: lsp::ServerCapabilities {
3996 workspace: Some(lsp::WorkspaceServerCapabilities {
3997 workspace_folders: None,
3998 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
3999 did_rename: Some(watched_paths.clone()),
4000 will_rename: Some(watched_paths),
4001 ..Default::default()
4002 }),
4003 }),
4004 ..Default::default()
4005 },
4006 ..Default::default()
4007 },
4008 );
4009
4010 let _ = project
4011 .update(cx, |project, cx| {
4012 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4013 })
4014 .await
4015 .unwrap();
4016
4017 let fake_server = fake_servers.next().await.unwrap();
4018 let response = project.update(cx, |project, cx| {
4019 let worktree = project.worktrees(cx).next().unwrap();
4020 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4021 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4022 });
4023 let expected_edit = lsp::WorkspaceEdit {
4024 changes: None,
4025 document_changes: Some(DocumentChanges::Edits({
4026 vec![TextDocumentEdit {
4027 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4028 range: lsp::Range {
4029 start: lsp::Position {
4030 line: 0,
4031 character: 1,
4032 },
4033 end: lsp::Position {
4034 line: 0,
4035 character: 3,
4036 },
4037 },
4038 new_text: "This is not a drill".to_owned(),
4039 })],
4040 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4041 uri: Url::from_str("file:///dir/two/two.rs").unwrap(),
4042 version: Some(1337),
4043 },
4044 }]
4045 })),
4046 change_annotations: None,
4047 };
4048 let resolved_workspace_edit = Arc::new(OnceLock::new());
4049 fake_server
4050 .handle_request::<WillRenameFiles, _, _>({
4051 let resolved_workspace_edit = resolved_workspace_edit.clone();
4052 let expected_edit = expected_edit.clone();
4053 move |params, _| {
4054 let resolved_workspace_edit = resolved_workspace_edit.clone();
4055 let expected_edit = expected_edit.clone();
4056 async move {
4057 assert_eq!(params.files.len(), 1);
4058 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4059 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4060 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4061 Ok(Some(expected_edit))
4062 }
4063 }
4064 })
4065 .next()
4066 .await
4067 .unwrap();
4068 let _ = response.await.unwrap();
4069 fake_server
4070 .handle_notification::<DidRenameFiles, _>(|params, _| {
4071 assert_eq!(params.files.len(), 1);
4072 assert_eq!(params.files[0].old_uri, "file:///dir/one.rs");
4073 assert_eq!(params.files[0].new_uri, "file:///dir/three.rs");
4074 })
4075 .next()
4076 .await
4077 .unwrap();
4078 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4079}
4080
4081#[gpui::test]
4082async fn test_rename(cx: &mut gpui::TestAppContext) {
4083 // hi
4084 init_test(cx);
4085
4086 let fs = FakeFs::new(cx.executor());
4087 fs.insert_tree(
4088 "/dir",
4089 json!({
4090 "one.rs": "const ONE: usize = 1;",
4091 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4092 }),
4093 )
4094 .await;
4095
4096 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4097
4098 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4099 language_registry.add(rust_lang());
4100 let mut fake_servers = language_registry.register_fake_lsp(
4101 "Rust",
4102 FakeLspAdapter {
4103 capabilities: lsp::ServerCapabilities {
4104 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4105 prepare_provider: Some(true),
4106 work_done_progress_options: Default::default(),
4107 })),
4108 ..Default::default()
4109 },
4110 ..Default::default()
4111 },
4112 );
4113
4114 let (buffer, _handle) = project
4115 .update(cx, |project, cx| {
4116 project.open_local_buffer_with_lsp("/dir/one.rs", cx)
4117 })
4118 .await
4119 .unwrap();
4120
4121 let fake_server = fake_servers.next().await.unwrap();
4122
4123 let response = project.update(cx, |project, cx| {
4124 project.prepare_rename(buffer.clone(), 7, cx)
4125 });
4126 fake_server
4127 .handle_request::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4128 assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
4129 assert_eq!(params.position, lsp::Position::new(0, 7));
4130 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4131 lsp::Position::new(0, 6),
4132 lsp::Position::new(0, 9),
4133 ))))
4134 })
4135 .next()
4136 .await
4137 .unwrap();
4138 let response = response.await.unwrap();
4139 let PrepareRenameResponse::Success(range) = response else {
4140 panic!("{:?}", response);
4141 };
4142 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4143 assert_eq!(range, 6..9);
4144
4145 let response = project.update(cx, |project, cx| {
4146 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4147 });
4148 fake_server
4149 .handle_request::<lsp::request::Rename, _, _>(|params, _| async move {
4150 assert_eq!(
4151 params.text_document_position.text_document.uri.as_str(),
4152 "file:///dir/one.rs"
4153 );
4154 assert_eq!(
4155 params.text_document_position.position,
4156 lsp::Position::new(0, 7)
4157 );
4158 assert_eq!(params.new_name, "THREE");
4159 Ok(Some(lsp::WorkspaceEdit {
4160 changes: Some(
4161 [
4162 (
4163 lsp::Url::from_file_path("/dir/one.rs").unwrap(),
4164 vec![lsp::TextEdit::new(
4165 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4166 "THREE".to_string(),
4167 )],
4168 ),
4169 (
4170 lsp::Url::from_file_path("/dir/two.rs").unwrap(),
4171 vec![
4172 lsp::TextEdit::new(
4173 lsp::Range::new(
4174 lsp::Position::new(0, 24),
4175 lsp::Position::new(0, 27),
4176 ),
4177 "THREE".to_string(),
4178 ),
4179 lsp::TextEdit::new(
4180 lsp::Range::new(
4181 lsp::Position::new(0, 35),
4182 lsp::Position::new(0, 38),
4183 ),
4184 "THREE".to_string(),
4185 ),
4186 ],
4187 ),
4188 ]
4189 .into_iter()
4190 .collect(),
4191 ),
4192 ..Default::default()
4193 }))
4194 })
4195 .next()
4196 .await
4197 .unwrap();
4198 let mut transaction = response.await.unwrap().0;
4199 assert_eq!(transaction.len(), 2);
4200 assert_eq!(
4201 transaction
4202 .remove_entry(&buffer)
4203 .unwrap()
4204 .0
4205 .update(cx, |buffer, _| buffer.text()),
4206 "const THREE: usize = 1;"
4207 );
4208 assert_eq!(
4209 transaction
4210 .into_keys()
4211 .next()
4212 .unwrap()
4213 .update(cx, |buffer, _| buffer.text()),
4214 "const TWO: usize = one::THREE + one::THREE;"
4215 );
4216}
4217
4218#[gpui::test]
4219async fn test_search(cx: &mut gpui::TestAppContext) {
4220 init_test(cx);
4221
4222 let fs = FakeFs::new(cx.executor());
4223 fs.insert_tree(
4224 "/dir",
4225 json!({
4226 "one.rs": "const ONE: usize = 1;",
4227 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4228 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4229 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4230 }),
4231 )
4232 .await;
4233 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4234 assert_eq!(
4235 search(
4236 &project,
4237 SearchQuery::text(
4238 "TWO",
4239 false,
4240 true,
4241 false,
4242 Default::default(),
4243 Default::default(),
4244 None
4245 )
4246 .unwrap(),
4247 cx
4248 )
4249 .await
4250 .unwrap(),
4251 HashMap::from_iter([
4252 ("dir/two.rs".to_string(), vec![6..9]),
4253 ("dir/three.rs".to_string(), vec![37..40])
4254 ])
4255 );
4256
4257 let buffer_4 = project
4258 .update(cx, |project, cx| {
4259 project.open_local_buffer("/dir/four.rs", cx)
4260 })
4261 .await
4262 .unwrap();
4263 buffer_4.update(cx, |buffer, cx| {
4264 let text = "two::TWO";
4265 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4266 });
4267
4268 assert_eq!(
4269 search(
4270 &project,
4271 SearchQuery::text(
4272 "TWO",
4273 false,
4274 true,
4275 false,
4276 Default::default(),
4277 Default::default(),
4278 None,
4279 )
4280 .unwrap(),
4281 cx
4282 )
4283 .await
4284 .unwrap(),
4285 HashMap::from_iter([
4286 ("dir/two.rs".to_string(), vec![6..9]),
4287 ("dir/three.rs".to_string(), vec![37..40]),
4288 ("dir/four.rs".to_string(), vec![25..28, 36..39])
4289 ])
4290 );
4291}
4292
4293#[gpui::test]
4294async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4295 init_test(cx);
4296
4297 let search_query = "file";
4298
4299 let fs = FakeFs::new(cx.executor());
4300 fs.insert_tree(
4301 "/dir",
4302 json!({
4303 "one.rs": r#"// Rust file one"#,
4304 "one.ts": r#"// TypeScript file one"#,
4305 "two.rs": r#"// Rust file two"#,
4306 "two.ts": r#"// TypeScript file two"#,
4307 }),
4308 )
4309 .await;
4310 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4311
4312 assert!(
4313 search(
4314 &project,
4315 SearchQuery::text(
4316 search_query,
4317 false,
4318 true,
4319 false,
4320 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4321 Default::default(),
4322 None
4323 )
4324 .unwrap(),
4325 cx
4326 )
4327 .await
4328 .unwrap()
4329 .is_empty(),
4330 "If no inclusions match, no files should be returned"
4331 );
4332
4333 assert_eq!(
4334 search(
4335 &project,
4336 SearchQuery::text(
4337 search_query,
4338 false,
4339 true,
4340 false,
4341 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4342 Default::default(),
4343 None
4344 )
4345 .unwrap(),
4346 cx
4347 )
4348 .await
4349 .unwrap(),
4350 HashMap::from_iter([
4351 ("dir/one.rs".to_string(), vec![8..12]),
4352 ("dir/two.rs".to_string(), vec![8..12]),
4353 ]),
4354 "Rust only search should give only Rust files"
4355 );
4356
4357 assert_eq!(
4358 search(
4359 &project,
4360 SearchQuery::text(
4361 search_query,
4362 false,
4363 true,
4364 false,
4365
4366 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4367
4368 Default::default(),
4369 None,
4370 ).unwrap(),
4371 cx
4372 )
4373 .await
4374 .unwrap(),
4375 HashMap::from_iter([
4376 ("dir/one.ts".to_string(), vec![14..18]),
4377 ("dir/two.ts".to_string(), vec![14..18]),
4378 ]),
4379 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4380 );
4381
4382 assert_eq!(
4383 search(
4384 &project,
4385 SearchQuery::text(
4386 search_query,
4387 false,
4388 true,
4389 false,
4390
4391 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4392
4393 Default::default(),
4394 None,
4395 ).unwrap(),
4396 cx
4397 )
4398 .await
4399 .unwrap(),
4400 HashMap::from_iter([
4401 ("dir/two.ts".to_string(), vec![14..18]),
4402 ("dir/one.rs".to_string(), vec![8..12]),
4403 ("dir/one.ts".to_string(), vec![14..18]),
4404 ("dir/two.rs".to_string(), vec![8..12]),
4405 ]),
4406 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4407 );
4408}
4409
4410#[gpui::test]
4411async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4412 init_test(cx);
4413
4414 let search_query = "file";
4415
4416 let fs = FakeFs::new(cx.executor());
4417 fs.insert_tree(
4418 "/dir",
4419 json!({
4420 "one.rs": r#"// Rust file one"#,
4421 "one.ts": r#"// TypeScript file one"#,
4422 "two.rs": r#"// Rust file two"#,
4423 "two.ts": r#"// TypeScript file two"#,
4424 }),
4425 )
4426 .await;
4427 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4428
4429 assert_eq!(
4430 search(
4431 &project,
4432 SearchQuery::text(
4433 search_query,
4434 false,
4435 true,
4436 false,
4437 Default::default(),
4438 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4439 None,
4440 )
4441 .unwrap(),
4442 cx
4443 )
4444 .await
4445 .unwrap(),
4446 HashMap::from_iter([
4447 ("dir/one.rs".to_string(), vec![8..12]),
4448 ("dir/one.ts".to_string(), vec![14..18]),
4449 ("dir/two.rs".to_string(), vec![8..12]),
4450 ("dir/two.ts".to_string(), vec![14..18]),
4451 ]),
4452 "If no exclusions match, all files should be returned"
4453 );
4454
4455 assert_eq!(
4456 search(
4457 &project,
4458 SearchQuery::text(
4459 search_query,
4460 false,
4461 true,
4462 false,
4463 Default::default(),
4464 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4465 None,
4466 )
4467 .unwrap(),
4468 cx
4469 )
4470 .await
4471 .unwrap(),
4472 HashMap::from_iter([
4473 ("dir/one.ts".to_string(), vec![14..18]),
4474 ("dir/two.ts".to_string(), vec![14..18]),
4475 ]),
4476 "Rust exclusion search should give only TypeScript files"
4477 );
4478
4479 assert_eq!(
4480 search(
4481 &project,
4482 SearchQuery::text(
4483 search_query,
4484 false,
4485 true,
4486 false,
4487 Default::default(),
4488 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4489 None,
4490 ).unwrap(),
4491 cx
4492 )
4493 .await
4494 .unwrap(),
4495 HashMap::from_iter([
4496 ("dir/one.rs".to_string(), vec![8..12]),
4497 ("dir/two.rs".to_string(), vec![8..12]),
4498 ]),
4499 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4500 );
4501
4502 assert!(
4503 search(
4504 &project,
4505 SearchQuery::text(
4506 search_query,
4507 false,
4508 true,
4509 false,
4510 Default::default(),
4511
4512 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4513 None,
4514
4515 ).unwrap(),
4516 cx
4517 )
4518 .await
4519 .unwrap().is_empty(),
4520 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4521 );
4522}
4523
4524#[gpui::test]
4525async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4526 init_test(cx);
4527
4528 let search_query = "file";
4529
4530 let fs = FakeFs::new(cx.executor());
4531 fs.insert_tree(
4532 "/dir",
4533 json!({
4534 "one.rs": r#"// Rust file one"#,
4535 "one.ts": r#"// TypeScript file one"#,
4536 "two.rs": r#"// Rust file two"#,
4537 "two.ts": r#"// TypeScript file two"#,
4538 }),
4539 )
4540 .await;
4541 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4542
4543 assert!(
4544 search(
4545 &project,
4546 SearchQuery::text(
4547 search_query,
4548 false,
4549 true,
4550 false,
4551 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4552 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4553 None,
4554 )
4555 .unwrap(),
4556 cx
4557 )
4558 .await
4559 .unwrap()
4560 .is_empty(),
4561 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4562 );
4563
4564 assert!(
4565 search(
4566 &project,
4567 SearchQuery::text(
4568 search_query,
4569 false,
4570 true,
4571 false,
4572 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4573 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4574 None,
4575 ).unwrap(),
4576 cx
4577 )
4578 .await
4579 .unwrap()
4580 .is_empty(),
4581 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4582 );
4583
4584 assert!(
4585 search(
4586 &project,
4587 SearchQuery::text(
4588 search_query,
4589 false,
4590 true,
4591 false,
4592 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4593 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4594 None,
4595 )
4596 .unwrap(),
4597 cx
4598 )
4599 .await
4600 .unwrap()
4601 .is_empty(),
4602 "Non-matching inclusions and exclusions should not change that."
4603 );
4604
4605 assert_eq!(
4606 search(
4607 &project,
4608 SearchQuery::text(
4609 search_query,
4610 false,
4611 true,
4612 false,
4613 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4614 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4615 None,
4616 )
4617 .unwrap(),
4618 cx
4619 )
4620 .await
4621 .unwrap(),
4622 HashMap::from_iter([
4623 ("dir/one.ts".to_string(), vec![14..18]),
4624 ("dir/two.ts".to_string(), vec![14..18]),
4625 ]),
4626 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
4627 );
4628}
4629
4630#[gpui::test]
4631async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
4632 init_test(cx);
4633
4634 let fs = FakeFs::new(cx.executor());
4635 fs.insert_tree(
4636 "/worktree-a",
4637 json!({
4638 "haystack.rs": r#"// NEEDLE"#,
4639 "haystack.ts": r#"// NEEDLE"#,
4640 }),
4641 )
4642 .await;
4643 fs.insert_tree(
4644 "/worktree-b",
4645 json!({
4646 "haystack.rs": r#"// NEEDLE"#,
4647 "haystack.ts": r#"// NEEDLE"#,
4648 }),
4649 )
4650 .await;
4651
4652 let project = Project::test(
4653 fs.clone(),
4654 ["/worktree-a".as_ref(), "/worktree-b".as_ref()],
4655 cx,
4656 )
4657 .await;
4658
4659 assert_eq!(
4660 search(
4661 &project,
4662 SearchQuery::text(
4663 "NEEDLE",
4664 false,
4665 true,
4666 false,
4667 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
4668 Default::default(),
4669 None,
4670 )
4671 .unwrap(),
4672 cx
4673 )
4674 .await
4675 .unwrap(),
4676 HashMap::from_iter([("worktree-a/haystack.rs".to_string(), vec![3..9])]),
4677 "should only return results from included worktree"
4678 );
4679 assert_eq!(
4680 search(
4681 &project,
4682 SearchQuery::text(
4683 "NEEDLE",
4684 false,
4685 true,
4686 false,
4687 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
4688 Default::default(),
4689 None,
4690 )
4691 .unwrap(),
4692 cx
4693 )
4694 .await
4695 .unwrap(),
4696 HashMap::from_iter([("worktree-b/haystack.rs".to_string(), vec![3..9])]),
4697 "should only return results from included worktree"
4698 );
4699
4700 assert_eq!(
4701 search(
4702 &project,
4703 SearchQuery::text(
4704 "NEEDLE",
4705 false,
4706 true,
4707 false,
4708 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4709 Default::default(),
4710 None,
4711 )
4712 .unwrap(),
4713 cx
4714 )
4715 .await
4716 .unwrap(),
4717 HashMap::from_iter([
4718 ("worktree-a/haystack.ts".to_string(), vec![3..9]),
4719 ("worktree-b/haystack.ts".to_string(), vec![3..9])
4720 ]),
4721 "should return results from both worktrees"
4722 );
4723}
4724
4725#[gpui::test]
4726async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
4727 init_test(cx);
4728
4729 let fs = FakeFs::new(cx.background_executor.clone());
4730 fs.insert_tree(
4731 "/dir",
4732 json!({
4733 ".git": {},
4734 ".gitignore": "**/target\n/node_modules\n",
4735 "target": {
4736 "index.txt": "index_key:index_value"
4737 },
4738 "node_modules": {
4739 "eslint": {
4740 "index.ts": "const eslint_key = 'eslint value'",
4741 "package.json": r#"{ "some_key": "some value" }"#,
4742 },
4743 "prettier": {
4744 "index.ts": "const prettier_key = 'prettier value'",
4745 "package.json": r#"{ "other_key": "other value" }"#,
4746 },
4747 },
4748 "package.json": r#"{ "main_key": "main value" }"#,
4749 }),
4750 )
4751 .await;
4752 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4753
4754 let query = "key";
4755 assert_eq!(
4756 search(
4757 &project,
4758 SearchQuery::text(
4759 query,
4760 false,
4761 false,
4762 false,
4763 Default::default(),
4764 Default::default(),
4765 None,
4766 )
4767 .unwrap(),
4768 cx
4769 )
4770 .await
4771 .unwrap(),
4772 HashMap::from_iter([("dir/package.json".to_string(), vec![8..11])]),
4773 "Only one non-ignored file should have the query"
4774 );
4775
4776 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4777 assert_eq!(
4778 search(
4779 &project,
4780 SearchQuery::text(
4781 query,
4782 false,
4783 false,
4784 true,
4785 Default::default(),
4786 Default::default(),
4787 None,
4788 )
4789 .unwrap(),
4790 cx
4791 )
4792 .await
4793 .unwrap(),
4794 HashMap::from_iter([
4795 ("dir/package.json".to_string(), vec![8..11]),
4796 ("dir/target/index.txt".to_string(), vec![6..9]),
4797 (
4798 "dir/node_modules/prettier/package.json".to_string(),
4799 vec![9..12]
4800 ),
4801 (
4802 "dir/node_modules/prettier/index.ts".to_string(),
4803 vec![15..18]
4804 ),
4805 ("dir/node_modules/eslint/index.ts".to_string(), vec![13..16]),
4806 (
4807 "dir/node_modules/eslint/package.json".to_string(),
4808 vec![8..11]
4809 ),
4810 ]),
4811 "Unrestricted search with ignored directories should find every file with the query"
4812 );
4813
4814 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
4815 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
4816 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4817 assert_eq!(
4818 search(
4819 &project,
4820 SearchQuery::text(
4821 query,
4822 false,
4823 false,
4824 true,
4825 files_to_include,
4826 files_to_exclude,
4827 None,
4828 )
4829 .unwrap(),
4830 cx
4831 )
4832 .await
4833 .unwrap(),
4834 HashMap::from_iter([(
4835 "dir/node_modules/prettier/package.json".to_string(),
4836 vec![9..12]
4837 )]),
4838 "With search including ignored prettier directory and excluding TS files, only one file should be found"
4839 );
4840}
4841
4842#[gpui::test]
4843async fn test_create_entry(cx: &mut gpui::TestAppContext) {
4844 init_test(cx);
4845
4846 let fs = FakeFs::new(cx.executor().clone());
4847 fs.insert_tree(
4848 "/one/two",
4849 json!({
4850 "three": {
4851 "a.txt": "",
4852 "four": {}
4853 },
4854 "c.rs": ""
4855 }),
4856 )
4857 .await;
4858
4859 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
4860 project
4861 .update(cx, |project, cx| {
4862 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4863 project.create_entry((id, "b.."), true, cx)
4864 })
4865 .await
4866 .unwrap()
4867 .to_included()
4868 .unwrap();
4869
4870 // Can't create paths outside the project
4871 let result = project
4872 .update(cx, |project, cx| {
4873 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4874 project.create_entry((id, "../../boop"), true, cx)
4875 })
4876 .await;
4877 assert!(result.is_err());
4878
4879 // Can't create paths with '..'
4880 let result = project
4881 .update(cx, |project, cx| {
4882 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4883 project.create_entry((id, "four/../beep"), true, cx)
4884 })
4885 .await;
4886 assert!(result.is_err());
4887
4888 assert_eq!(
4889 fs.paths(true),
4890 vec![
4891 PathBuf::from("/"),
4892 PathBuf::from("/one"),
4893 PathBuf::from("/one/two"),
4894 PathBuf::from("/one/two/c.rs"),
4895 PathBuf::from("/one/two/three"),
4896 PathBuf::from("/one/two/three/a.txt"),
4897 PathBuf::from("/one/two/three/b.."),
4898 PathBuf::from("/one/two/three/four"),
4899 ]
4900 );
4901
4902 // And we cannot open buffers with '..'
4903 let result = project
4904 .update(cx, |project, cx| {
4905 let id = project.worktrees(cx).next().unwrap().read(cx).id();
4906 project.open_buffer((id, "../c.rs"), cx)
4907 })
4908 .await;
4909 assert!(result.is_err())
4910}
4911
4912#[gpui::test]
4913async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
4914 init_test(cx);
4915
4916 let fs = FakeFs::new(cx.executor());
4917 fs.insert_tree(
4918 "/dir",
4919 json!({
4920 "a.tsx": "a",
4921 }),
4922 )
4923 .await;
4924
4925 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
4926
4927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4928 language_registry.add(tsx_lang());
4929 let language_server_names = [
4930 "TypeScriptServer",
4931 "TailwindServer",
4932 "ESLintServer",
4933 "NoHoverCapabilitiesServer",
4934 ];
4935 let mut language_servers = [
4936 language_registry.register_fake_lsp(
4937 "tsx",
4938 FakeLspAdapter {
4939 name: language_server_names[0],
4940 capabilities: lsp::ServerCapabilities {
4941 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4942 ..lsp::ServerCapabilities::default()
4943 },
4944 ..FakeLspAdapter::default()
4945 },
4946 ),
4947 language_registry.register_fake_lsp(
4948 "tsx",
4949 FakeLspAdapter {
4950 name: language_server_names[1],
4951 capabilities: lsp::ServerCapabilities {
4952 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4953 ..lsp::ServerCapabilities::default()
4954 },
4955 ..FakeLspAdapter::default()
4956 },
4957 ),
4958 language_registry.register_fake_lsp(
4959 "tsx",
4960 FakeLspAdapter {
4961 name: language_server_names[2],
4962 capabilities: lsp::ServerCapabilities {
4963 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
4964 ..lsp::ServerCapabilities::default()
4965 },
4966 ..FakeLspAdapter::default()
4967 },
4968 ),
4969 language_registry.register_fake_lsp(
4970 "tsx",
4971 FakeLspAdapter {
4972 name: language_server_names[3],
4973 capabilities: lsp::ServerCapabilities {
4974 hover_provider: None,
4975 ..lsp::ServerCapabilities::default()
4976 },
4977 ..FakeLspAdapter::default()
4978 },
4979 ),
4980 ];
4981
4982 let (buffer, _handle) = project
4983 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
4984 .await
4985 .unwrap();
4986 cx.executor().run_until_parked();
4987
4988 let mut servers_with_hover_requests = HashMap::default();
4989 for i in 0..language_server_names.len() {
4990 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
4991 panic!(
4992 "Failed to get language server #{i} with name {}",
4993 &language_server_names[i]
4994 )
4995 });
4996 let new_server_name = new_server.server.name();
4997 assert!(
4998 !servers_with_hover_requests.contains_key(&new_server_name),
4999 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5000 );
5001 match new_server_name.as_ref() {
5002 "TailwindServer" | "TypeScriptServer" => {
5003 servers_with_hover_requests.insert(
5004 new_server_name.clone(),
5005 new_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| {
5006 let name = new_server_name.clone();
5007 async move {
5008 Ok(Some(lsp::Hover {
5009 contents: lsp::HoverContents::Scalar(lsp::MarkedString::String(
5010 format!("{name} hover"),
5011 )),
5012 range: None,
5013 }))
5014 }
5015 }),
5016 );
5017 }
5018 "ESLintServer" => {
5019 servers_with_hover_requests.insert(
5020 new_server_name,
5021 new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5022 |_, _| async move { Ok(None) },
5023 ),
5024 );
5025 }
5026 "NoHoverCapabilitiesServer" => {
5027 let _never_handled = new_server.handle_request::<lsp::request::HoverRequest, _, _>(
5028 |_, _| async move {
5029 panic!(
5030 "Should not call for hovers server with no corresponding capabilities"
5031 )
5032 },
5033 );
5034 }
5035 unexpected => panic!("Unexpected server name: {unexpected}"),
5036 }
5037 }
5038
5039 let hover_task = project.update(cx, |project, cx| {
5040 project.hover(&buffer, Point::new(0, 0), cx)
5041 });
5042 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5043 |mut hover_request| async move {
5044 hover_request
5045 .next()
5046 .await
5047 .expect("All hover requests should have been triggered")
5048 },
5049 ))
5050 .await;
5051 assert_eq!(
5052 vec!["TailwindServer hover", "TypeScriptServer hover"],
5053 hover_task
5054 .await
5055 .into_iter()
5056 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5057 .sorted()
5058 .collect::<Vec<_>>(),
5059 "Should receive hover responses from all related servers with hover capabilities"
5060 );
5061}
5062
5063#[gpui::test]
5064async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5065 init_test(cx);
5066
5067 let fs = FakeFs::new(cx.executor());
5068 fs.insert_tree(
5069 "/dir",
5070 json!({
5071 "a.ts": "a",
5072 }),
5073 )
5074 .await;
5075
5076 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5077
5078 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5079 language_registry.add(typescript_lang());
5080 let mut fake_language_servers = language_registry.register_fake_lsp(
5081 "TypeScript",
5082 FakeLspAdapter {
5083 capabilities: lsp::ServerCapabilities {
5084 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5085 ..lsp::ServerCapabilities::default()
5086 },
5087 ..FakeLspAdapter::default()
5088 },
5089 );
5090
5091 let (buffer, _handle) = project
5092 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5093 .await
5094 .unwrap();
5095 cx.executor().run_until_parked();
5096
5097 let fake_server = fake_language_servers
5098 .next()
5099 .await
5100 .expect("failed to get the language server");
5101
5102 let mut request_handled =
5103 fake_server.handle_request::<lsp::request::HoverRequest, _, _>(move |_, _| async move {
5104 Ok(Some(lsp::Hover {
5105 contents: lsp::HoverContents::Array(vec![
5106 lsp::MarkedString::String("".to_string()),
5107 lsp::MarkedString::String(" ".to_string()),
5108 lsp::MarkedString::String("\n\n\n".to_string()),
5109 ]),
5110 range: None,
5111 }))
5112 });
5113
5114 let hover_task = project.update(cx, |project, cx| {
5115 project.hover(&buffer, Point::new(0, 0), cx)
5116 });
5117 let () = request_handled
5118 .next()
5119 .await
5120 .expect("All hover requests should have been triggered");
5121 assert_eq!(
5122 Vec::<String>::new(),
5123 hover_task
5124 .await
5125 .into_iter()
5126 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5127 .sorted()
5128 .collect::<Vec<_>>(),
5129 "Empty hover parts should be ignored"
5130 );
5131}
5132
5133#[gpui::test]
5134async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5135 init_test(cx);
5136
5137 let fs = FakeFs::new(cx.executor());
5138 fs.insert_tree(
5139 "/dir",
5140 json!({
5141 "a.ts": "a",
5142 }),
5143 )
5144 .await;
5145
5146 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5147
5148 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5149 language_registry.add(typescript_lang());
5150 let mut fake_language_servers = language_registry.register_fake_lsp(
5151 "TypeScript",
5152 FakeLspAdapter {
5153 capabilities: lsp::ServerCapabilities {
5154 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5155 ..lsp::ServerCapabilities::default()
5156 },
5157 ..FakeLspAdapter::default()
5158 },
5159 );
5160
5161 let (buffer, _handle) = project
5162 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.ts", cx))
5163 .await
5164 .unwrap();
5165 cx.executor().run_until_parked();
5166
5167 let fake_server = fake_language_servers
5168 .next()
5169 .await
5170 .expect("failed to get the language server");
5171
5172 let mut request_handled = fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5173 move |_, _| async move {
5174 Ok(Some(vec![
5175 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5176 title: "organize imports".to_string(),
5177 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5178 ..lsp::CodeAction::default()
5179 }),
5180 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5181 title: "fix code".to_string(),
5182 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5183 ..lsp::CodeAction::default()
5184 }),
5185 ]))
5186 },
5187 );
5188
5189 let code_actions_task = project.update(cx, |project, cx| {
5190 project.code_actions(
5191 &buffer,
5192 0..buffer.read(cx).len(),
5193 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5194 cx,
5195 )
5196 });
5197
5198 let () = request_handled
5199 .next()
5200 .await
5201 .expect("The code action request should have been triggered");
5202
5203 let code_actions = code_actions_task.await.unwrap();
5204 assert_eq!(code_actions.len(), 1);
5205 assert_eq!(
5206 code_actions[0].lsp_action.kind,
5207 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5208 );
5209}
5210
5211#[gpui::test]
5212async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5213 init_test(cx);
5214
5215 let fs = FakeFs::new(cx.executor());
5216 fs.insert_tree(
5217 "/dir",
5218 json!({
5219 "a.tsx": "a",
5220 }),
5221 )
5222 .await;
5223
5224 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
5225
5226 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5227 language_registry.add(tsx_lang());
5228 let language_server_names = [
5229 "TypeScriptServer",
5230 "TailwindServer",
5231 "ESLintServer",
5232 "NoActionsCapabilitiesServer",
5233 ];
5234
5235 let mut language_server_rxs = [
5236 language_registry.register_fake_lsp(
5237 "tsx",
5238 FakeLspAdapter {
5239 name: language_server_names[0],
5240 capabilities: lsp::ServerCapabilities {
5241 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5242 ..lsp::ServerCapabilities::default()
5243 },
5244 ..FakeLspAdapter::default()
5245 },
5246 ),
5247 language_registry.register_fake_lsp(
5248 "tsx",
5249 FakeLspAdapter {
5250 name: language_server_names[1],
5251 capabilities: lsp::ServerCapabilities {
5252 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5253 ..lsp::ServerCapabilities::default()
5254 },
5255 ..FakeLspAdapter::default()
5256 },
5257 ),
5258 language_registry.register_fake_lsp(
5259 "tsx",
5260 FakeLspAdapter {
5261 name: language_server_names[2],
5262 capabilities: lsp::ServerCapabilities {
5263 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5264 ..lsp::ServerCapabilities::default()
5265 },
5266 ..FakeLspAdapter::default()
5267 },
5268 ),
5269 language_registry.register_fake_lsp(
5270 "tsx",
5271 FakeLspAdapter {
5272 name: language_server_names[3],
5273 capabilities: lsp::ServerCapabilities {
5274 code_action_provider: None,
5275 ..lsp::ServerCapabilities::default()
5276 },
5277 ..FakeLspAdapter::default()
5278 },
5279 ),
5280 ];
5281
5282 let (buffer, _handle) = project
5283 .update(cx, |p, cx| p.open_local_buffer_with_lsp("/dir/a.tsx", cx))
5284 .await
5285 .unwrap();
5286 cx.executor().run_until_parked();
5287
5288 let mut servers_with_actions_requests = HashMap::default();
5289 for i in 0..language_server_names.len() {
5290 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5291 panic!(
5292 "Failed to get language server #{i} with name {}",
5293 &language_server_names[i]
5294 )
5295 });
5296 let new_server_name = new_server.server.name();
5297
5298 assert!(
5299 !servers_with_actions_requests.contains_key(&new_server_name),
5300 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5301 );
5302 match new_server_name.0.as_ref() {
5303 "TailwindServer" | "TypeScriptServer" => {
5304 servers_with_actions_requests.insert(
5305 new_server_name.clone(),
5306 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5307 move |_, _| {
5308 let name = new_server_name.clone();
5309 async move {
5310 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5311 lsp::CodeAction {
5312 title: format!("{name} code action"),
5313 ..lsp::CodeAction::default()
5314 },
5315 )]))
5316 }
5317 },
5318 ),
5319 );
5320 }
5321 "ESLintServer" => {
5322 servers_with_actions_requests.insert(
5323 new_server_name,
5324 new_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
5325 |_, _| async move { Ok(None) },
5326 ),
5327 );
5328 }
5329 "NoActionsCapabilitiesServer" => {
5330 let _never_handled = new_server
5331 .handle_request::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5332 panic!(
5333 "Should not call for code actions server with no corresponding capabilities"
5334 )
5335 });
5336 }
5337 unexpected => panic!("Unexpected server name: {unexpected}"),
5338 }
5339 }
5340
5341 let code_actions_task = project.update(cx, |project, cx| {
5342 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5343 });
5344
5345 // cx.run_until_parked();
5346 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5347 |mut code_actions_request| async move {
5348 code_actions_request
5349 .next()
5350 .await
5351 .expect("All code actions requests should have been triggered")
5352 },
5353 ))
5354 .await;
5355 assert_eq!(
5356 vec!["TailwindServer code action", "TypeScriptServer code action"],
5357 code_actions_task
5358 .await
5359 .unwrap()
5360 .into_iter()
5361 .map(|code_action| code_action.lsp_action.title)
5362 .sorted()
5363 .collect::<Vec<_>>(),
5364 "Should receive code actions responses from all related servers with hover capabilities"
5365 );
5366}
5367
5368#[gpui::test]
5369async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5370 init_test(cx);
5371
5372 let fs = FakeFs::new(cx.executor());
5373 fs.insert_tree(
5374 "/dir",
5375 json!({
5376 "a.rs": "let a = 1;",
5377 "b.rs": "let b = 2;",
5378 "c.rs": "let c = 2;",
5379 }),
5380 )
5381 .await;
5382
5383 let project = Project::test(
5384 fs,
5385 [
5386 "/dir/a.rs".as_ref(),
5387 "/dir/b.rs".as_ref(),
5388 "/dir/c.rs".as_ref(),
5389 ],
5390 cx,
5391 )
5392 .await;
5393
5394 // check the initial state and get the worktrees
5395 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5396 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5397 assert_eq!(worktrees.len(), 3);
5398
5399 let worktree_a = worktrees[0].read(cx);
5400 let worktree_b = worktrees[1].read(cx);
5401 let worktree_c = worktrees[2].read(cx);
5402
5403 // check they start in the right order
5404 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5405 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5406 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5407
5408 (
5409 worktrees[0].clone(),
5410 worktrees[1].clone(),
5411 worktrees[2].clone(),
5412 )
5413 });
5414
5415 // move first worktree to after the second
5416 // [a, b, c] -> [b, a, c]
5417 project
5418 .update(cx, |project, cx| {
5419 let first = worktree_a.read(cx);
5420 let second = worktree_b.read(cx);
5421 project.move_worktree(first.id(), second.id(), cx)
5422 })
5423 .expect("moving first after second");
5424
5425 // check the state after moving
5426 project.update(cx, |project, cx| {
5427 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5428 assert_eq!(worktrees.len(), 3);
5429
5430 let first = worktrees[0].read(cx);
5431 let second = worktrees[1].read(cx);
5432 let third = worktrees[2].read(cx);
5433
5434 // check they are now in the right order
5435 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5436 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5437 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5438 });
5439
5440 // move the second worktree to before the first
5441 // [b, a, c] -> [a, b, c]
5442 project
5443 .update(cx, |project, cx| {
5444 let second = worktree_a.read(cx);
5445 let first = worktree_b.read(cx);
5446 project.move_worktree(first.id(), second.id(), cx)
5447 })
5448 .expect("moving second before first");
5449
5450 // check the state after moving
5451 project.update(cx, |project, cx| {
5452 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5453 assert_eq!(worktrees.len(), 3);
5454
5455 let first = worktrees[0].read(cx);
5456 let second = worktrees[1].read(cx);
5457 let third = worktrees[2].read(cx);
5458
5459 // check they are now in the right order
5460 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5461 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5462 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5463 });
5464
5465 // move the second worktree to after the third
5466 // [a, b, c] -> [a, c, b]
5467 project
5468 .update(cx, |project, cx| {
5469 let second = worktree_b.read(cx);
5470 let third = worktree_c.read(cx);
5471 project.move_worktree(second.id(), third.id(), cx)
5472 })
5473 .expect("moving second after third");
5474
5475 // check the state after moving
5476 project.update(cx, |project, cx| {
5477 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5478 assert_eq!(worktrees.len(), 3);
5479
5480 let first = worktrees[0].read(cx);
5481 let second = worktrees[1].read(cx);
5482 let third = worktrees[2].read(cx);
5483
5484 // check they are now in the right order
5485 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5486 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5487 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5488 });
5489
5490 // move the third worktree to before the second
5491 // [a, c, b] -> [a, b, c]
5492 project
5493 .update(cx, |project, cx| {
5494 let third = worktree_c.read(cx);
5495 let second = worktree_b.read(cx);
5496 project.move_worktree(third.id(), second.id(), cx)
5497 })
5498 .expect("moving third before second");
5499
5500 // check the state after moving
5501 project.update(cx, |project, cx| {
5502 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5503 assert_eq!(worktrees.len(), 3);
5504
5505 let first = worktrees[0].read(cx);
5506 let second = worktrees[1].read(cx);
5507 let third = worktrees[2].read(cx);
5508
5509 // check they are now in the right order
5510 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5511 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5512 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5513 });
5514
5515 // move the first worktree to after the third
5516 // [a, b, c] -> [b, c, a]
5517 project
5518 .update(cx, |project, cx| {
5519 let first = worktree_a.read(cx);
5520 let third = worktree_c.read(cx);
5521 project.move_worktree(first.id(), third.id(), cx)
5522 })
5523 .expect("moving first after third");
5524
5525 // check the state after moving
5526 project.update(cx, |project, cx| {
5527 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5528 assert_eq!(worktrees.len(), 3);
5529
5530 let first = worktrees[0].read(cx);
5531 let second = worktrees[1].read(cx);
5532 let third = worktrees[2].read(cx);
5533
5534 // check they are now in the right order
5535 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5536 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5537 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5538 });
5539
5540 // move the third worktree to before the first
5541 // [b, c, a] -> [a, b, c]
5542 project
5543 .update(cx, |project, cx| {
5544 let third = worktree_a.read(cx);
5545 let first = worktree_b.read(cx);
5546 project.move_worktree(third.id(), first.id(), cx)
5547 })
5548 .expect("moving third before first");
5549
5550 // check the state after moving
5551 project.update(cx, |project, cx| {
5552 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5553 assert_eq!(worktrees.len(), 3);
5554
5555 let first = worktrees[0].read(cx);
5556 let second = worktrees[1].read(cx);
5557 let third = worktrees[2].read(cx);
5558
5559 // check they are now in the right order
5560 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5561 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5562 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5563 });
5564}
5565
5566#[gpui::test]
5567async fn test_unstaged_changes_for_buffer(cx: &mut gpui::TestAppContext) {
5568 init_test(cx);
5569
5570 let staged_contents = r#"
5571 fn main() {
5572 println!("hello world");
5573 }
5574 "#
5575 .unindent();
5576 let file_contents = r#"
5577 // print goodbye
5578 fn main() {
5579 println!("goodbye world");
5580 }
5581 "#
5582 .unindent();
5583
5584 let fs = FakeFs::new(cx.background_executor.clone());
5585 fs.insert_tree(
5586 "/dir",
5587 json!({
5588 ".git": {},
5589 "src": {
5590 "main.rs": file_contents,
5591 }
5592 }),
5593 )
5594 .await;
5595
5596 fs.set_index_for_repo(
5597 Path::new("/dir/.git"),
5598 &[(Path::new("src/main.rs"), staged_contents)],
5599 );
5600
5601 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5602
5603 let buffer = project
5604 .update(cx, |project, cx| {
5605 project.open_local_buffer("/dir/src/main.rs", cx)
5606 })
5607 .await
5608 .unwrap();
5609 let unstaged_changes = project
5610 .update(cx, |project, cx| {
5611 project.open_unstaged_changes(buffer.clone(), cx)
5612 })
5613 .await
5614 .unwrap();
5615
5616 cx.run_until_parked();
5617 unstaged_changes.update(cx, |unstaged_changes, cx| {
5618 let snapshot = buffer.read(cx).snapshot();
5619 assert_hunks(
5620 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5621 &snapshot,
5622 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5623 &[
5624 (0..1, "", "// print goodbye\n"),
5625 (
5626 2..3,
5627 " println!(\"hello world\");\n",
5628 " println!(\"goodbye world\");\n",
5629 ),
5630 ],
5631 );
5632 });
5633
5634 let staged_contents = r#"
5635 // print goodbye
5636 fn main() {
5637 }
5638 "#
5639 .unindent();
5640
5641 fs.set_index_for_repo(
5642 Path::new("/dir/.git"),
5643 &[(Path::new("src/main.rs"), staged_contents)],
5644 );
5645
5646 cx.run_until_parked();
5647 unstaged_changes.update(cx, |unstaged_changes, cx| {
5648 let snapshot = buffer.read(cx).snapshot();
5649 assert_hunks(
5650 unstaged_changes.diff_hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
5651 &snapshot,
5652 &unstaged_changes.base_text.as_ref().unwrap().read(cx).text(),
5653 &[(2..3, "", " println!(\"goodbye world\");\n")],
5654 );
5655 });
5656}
5657
5658async fn search(
5659 project: &Model<Project>,
5660 query: SearchQuery,
5661 cx: &mut gpui::TestAppContext,
5662) -> Result<HashMap<String, Vec<Range<usize>>>> {
5663 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
5664 let mut results = HashMap::default();
5665 while let Ok(search_result) = search_rx.recv().await {
5666 match search_result {
5667 SearchResult::Buffer { buffer, ranges } => {
5668 results.entry(buffer).or_insert(ranges);
5669 }
5670 SearchResult::LimitReached => {}
5671 }
5672 }
5673 Ok(results
5674 .into_iter()
5675 .map(|(buffer, ranges)| {
5676 buffer.update(cx, |buffer, cx| {
5677 let path = buffer
5678 .file()
5679 .unwrap()
5680 .full_path(cx)
5681 .to_string_lossy()
5682 .to_string();
5683 let ranges = ranges
5684 .into_iter()
5685 .map(|range| range.to_offset(buffer))
5686 .collect::<Vec<_>>();
5687 (path, ranges)
5688 })
5689 })
5690 .collect())
5691}
5692
5693pub fn init_test(cx: &mut gpui::TestAppContext) {
5694 if std::env::var("RUST_LOG").is_ok() {
5695 env_logger::try_init().ok();
5696 }
5697
5698 cx.update(|cx| {
5699 let settings_store = SettingsStore::test(cx);
5700 cx.set_global(settings_store);
5701 release_channel::init(SemanticVersion::default(), cx);
5702 language::init(cx);
5703 Project::init_settings(cx);
5704 });
5705}
5706
5707fn json_lang() -> Arc<Language> {
5708 Arc::new(Language::new(
5709 LanguageConfig {
5710 name: "JSON".into(),
5711 matcher: LanguageMatcher {
5712 path_suffixes: vec!["json".to_string()],
5713 ..Default::default()
5714 },
5715 ..Default::default()
5716 },
5717 None,
5718 ))
5719}
5720
5721fn js_lang() -> Arc<Language> {
5722 Arc::new(Language::new(
5723 LanguageConfig {
5724 name: "JavaScript".into(),
5725 matcher: LanguageMatcher {
5726 path_suffixes: vec!["js".to_string()],
5727 ..Default::default()
5728 },
5729 ..Default::default()
5730 },
5731 None,
5732 ))
5733}
5734
5735fn rust_lang() -> Arc<Language> {
5736 Arc::new(Language::new(
5737 LanguageConfig {
5738 name: "Rust".into(),
5739 matcher: LanguageMatcher {
5740 path_suffixes: vec!["rs".to_string()],
5741 ..Default::default()
5742 },
5743 ..Default::default()
5744 },
5745 Some(tree_sitter_rust::LANGUAGE.into()),
5746 ))
5747}
5748
5749fn typescript_lang() -> Arc<Language> {
5750 Arc::new(Language::new(
5751 LanguageConfig {
5752 name: "TypeScript".into(),
5753 matcher: LanguageMatcher {
5754 path_suffixes: vec!["ts".to_string()],
5755 ..Default::default()
5756 },
5757 ..Default::default()
5758 },
5759 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
5760 ))
5761}
5762
5763fn tsx_lang() -> Arc<Language> {
5764 Arc::new(Language::new(
5765 LanguageConfig {
5766 name: "tsx".into(),
5767 matcher: LanguageMatcher {
5768 path_suffixes: vec!["tsx".to_string()],
5769 ..Default::default()
5770 },
5771 ..Default::default()
5772 },
5773 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
5774 ))
5775}
5776
5777fn get_all_tasks(
5778 project: &Model<Project>,
5779 worktree_id: Option<WorktreeId>,
5780 task_context: &TaskContext,
5781 cx: &mut AppContext,
5782) -> Vec<(TaskSourceKind, ResolvedTask)> {
5783 let (mut old, new) = project.update(cx, |project, cx| {
5784 project
5785 .task_store
5786 .read(cx)
5787 .task_inventory()
5788 .unwrap()
5789 .read(cx)
5790 .used_and_current_resolved_tasks(worktree_id, None, task_context, cx)
5791 });
5792 old.extend(new);
5793 old
5794}