1#![allow(clippy::format_collect)]
2
3use crate::{task_inventory::TaskContexts, task_store::TaskSettingsLocation, Event, *};
4use buffer_diff::{
5 assert_hunks, BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind,
6};
7use fs::FakeFs;
8use futures::{future, StreamExt};
9use git::repository::RepoPath;
10use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
11use http_client::Url;
12use language::{
13 language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
14 tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticEntry, DiagnosticSet,
15 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
16 OffsetRangeExt, Point, ToPoint,
17};
18use lsp::{
19 notification::DidRenameFiles, DiagnosticSeverity, DocumentChanges, FileOperationFilter,
20 NumberOrString, TextDocumentEdit, WillRenameFiles,
21};
22use parking_lot::Mutex;
23use paths::tasks_file;
24use pretty_assertions::{assert_eq, assert_matches};
25use serde_json::json;
26#[cfg(not(windows))]
27use std::os;
28use std::{mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
29use task::{ResolvedTask, TaskContext};
30use unindent::Unindent as _;
31use util::{
32 assert_set_eq, path,
33 paths::PathMatcher,
34 separator,
35 test::{marked_text_offsets, TempTree},
36 uri, TryFutureExt as _,
37};
38use worktree::WorktreeModelHandle as _;
39
40#[gpui::test]
41async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
42 cx.executor().allow_parking();
43
44 let (tx, mut rx) = futures::channel::mpsc::unbounded();
45 let _thread = std::thread::spawn(move || {
46 #[cfg(not(target_os = "windows"))]
47 std::fs::metadata("/tmp").unwrap();
48 #[cfg(target_os = "windows")]
49 std::fs::metadata("C:/Windows").unwrap();
50 std::thread::sleep(Duration::from_millis(1000));
51 tx.unbounded_send(1).unwrap();
52 });
53 rx.next().await.unwrap();
54}
55
56#[gpui::test]
57async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
58 cx.executor().allow_parking();
59
60 let io_task = smol::unblock(move || {
61 println!("sleeping on thread {:?}", std::thread::current().id());
62 std::thread::sleep(Duration::from_millis(10));
63 1
64 });
65
66 let task = cx.foreground_executor().spawn(async move {
67 io_task.await;
68 });
69
70 task.await;
71}
72
73#[cfg(not(windows))]
74#[gpui::test]
75async fn test_symlinks(cx: &mut gpui::TestAppContext) {
76 init_test(cx);
77 cx.executor().allow_parking();
78
79 let dir = TempTree::new(json!({
80 "root": {
81 "apple": "",
82 "banana": {
83 "carrot": {
84 "date": "",
85 "endive": "",
86 }
87 },
88 "fennel": {
89 "grape": "",
90 }
91 }
92 }));
93
94 let root_link_path = dir.path().join("root_link");
95 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
96 os::unix::fs::symlink(
97 dir.path().join("root/fennel"),
98 dir.path().join("root/finnochio"),
99 )
100 .unwrap();
101
102 let project = Project::test(
103 Arc::new(RealFs::new(None, cx.executor())),
104 [root_link_path.as_ref()],
105 cx,
106 )
107 .await;
108
109 project.update(cx, |project, cx| {
110 let tree = project.worktrees(cx).next().unwrap().read(cx);
111 assert_eq!(tree.file_count(), 5);
112 assert_eq!(
113 tree.inode_for_path("fennel/grape"),
114 tree.inode_for_path("finnochio/grape")
115 );
116 });
117}
118
119#[gpui::test]
120async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
121 init_test(cx);
122
123 let dir = TempTree::new(json!({
124 ".editorconfig": r#"
125 root = true
126 [*.rs]
127 indent_style = tab
128 indent_size = 3
129 end_of_line = lf
130 insert_final_newline = true
131 trim_trailing_whitespace = true
132 [*.js]
133 tab_width = 10
134 "#,
135 ".zed": {
136 "settings.json": r#"{
137 "tab_size": 8,
138 "hard_tabs": false,
139 "ensure_final_newline_on_save": false,
140 "remove_trailing_whitespace_on_save": false,
141 "soft_wrap": "editor_width"
142 }"#,
143 },
144 "a.rs": "fn a() {\n A\n}",
145 "b": {
146 ".editorconfig": r#"
147 [*.rs]
148 indent_size = 2
149 "#,
150 "b.rs": "fn b() {\n B\n}",
151 },
152 "c.js": "def c\n C\nend",
153 "README.json": "tabs are better\n",
154 }));
155
156 let path = dir.path();
157 let fs = FakeFs::new(cx.executor());
158 fs.insert_tree_from_real_fs(path, path).await;
159 let project = Project::test(fs, [path], cx).await;
160
161 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
162 language_registry.add(js_lang());
163 language_registry.add(json_lang());
164 language_registry.add(rust_lang());
165
166 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
167
168 cx.executor().run_until_parked();
169
170 cx.update(|cx| {
171 let tree = worktree.read(cx);
172 let settings_for = |path: &str| {
173 let file_entry = tree.entry_for_path(path).unwrap().clone();
174 let file = File::for_entry(file_entry, worktree.clone());
175 let file_language = project
176 .read(cx)
177 .languages()
178 .language_for_file_path(file.path.as_ref());
179 let file_language = cx
180 .background_executor()
181 .block(file_language)
182 .expect("Failed to get file language");
183 let file = file as _;
184 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
185 };
186
187 let settings_a = settings_for("a.rs");
188 let settings_b = settings_for("b/b.rs");
189 let settings_c = settings_for("c.js");
190 let settings_readme = settings_for("README.json");
191
192 // .editorconfig overrides .zed/settings
193 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
194 assert_eq!(settings_a.hard_tabs, true);
195 assert_eq!(settings_a.ensure_final_newline_on_save, true);
196 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
197
198 // .editorconfig in b/ overrides .editorconfig in root
199 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
200
201 // "indent_size" is not set, so "tab_width" is used
202 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
203
204 // README.md should not be affected by .editorconfig's globe "*.rs"
205 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
206 });
207}
208
209#[gpui::test]
210async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
211 init_test(cx);
212 TaskStore::init(None);
213
214 let fs = FakeFs::new(cx.executor());
215 fs.insert_tree(
216 path!("/dir"),
217 json!({
218 ".zed": {
219 "settings.json": r#"{ "tab_size": 8 }"#,
220 "tasks.json": r#"[{
221 "label": "cargo check all",
222 "command": "cargo",
223 "args": ["check", "--all"]
224 },]"#,
225 },
226 "a": {
227 "a.rs": "fn a() {\n A\n}"
228 },
229 "b": {
230 ".zed": {
231 "settings.json": r#"{ "tab_size": 2 }"#,
232 "tasks.json": r#"[{
233 "label": "cargo check",
234 "command": "cargo",
235 "args": ["check"]
236 },]"#,
237 },
238 "b.rs": "fn b() {\n B\n}"
239 }
240 }),
241 )
242 .await;
243
244 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
245 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
246
247 cx.executor().run_until_parked();
248 let worktree_id = cx.update(|cx| {
249 project.update(cx, |project, cx| {
250 project.worktrees(cx).next().unwrap().read(cx).id()
251 })
252 });
253
254 let mut task_contexts = TaskContexts::default();
255 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
256
257 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
258 id: worktree_id,
259 directory_in_worktree: PathBuf::from(".zed"),
260 id_base: "local worktree tasks from directory \".zed\"".into(),
261 };
262
263 let all_tasks = cx
264 .update(|cx| {
265 let tree = worktree.read(cx);
266
267 let file_a = File::for_entry(
268 tree.entry_for_path("a/a.rs").unwrap().clone(),
269 worktree.clone(),
270 ) as _;
271 let settings_a = language_settings(None, Some(&file_a), cx);
272 let file_b = File::for_entry(
273 tree.entry_for_path("b/b.rs").unwrap().clone(),
274 worktree.clone(),
275 ) as _;
276 let settings_b = language_settings(None, Some(&file_b), cx);
277
278 assert_eq!(settings_a.tab_size.get(), 8);
279 assert_eq!(settings_b.tab_size.get(), 2);
280
281 get_all_tasks(&project, &task_contexts, cx)
282 })
283 .into_iter()
284 .map(|(source_kind, task)| {
285 let resolved = task.resolved.unwrap();
286 (
287 source_kind,
288 task.resolved_label,
289 resolved.args,
290 resolved.env,
291 )
292 })
293 .collect::<Vec<_>>();
294 assert_eq!(
295 all_tasks,
296 vec![
297 (
298 TaskSourceKind::Worktree {
299 id: worktree_id,
300 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
301 id_base: if cfg!(windows) {
302 "local worktree tasks from directory \"b\\\\.zed\"".into()
303 } else {
304 "local worktree tasks from directory \"b/.zed\"".into()
305 },
306 },
307 "cargo check".to_string(),
308 vec!["check".to_string()],
309 HashMap::default(),
310 ),
311 (
312 topmost_local_task_source_kind.clone(),
313 "cargo check all".to_string(),
314 vec!["check".to_string(), "--all".to_string()],
315 HashMap::default(),
316 ),
317 ]
318 );
319
320 let (_, resolved_task) = cx
321 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
322 .into_iter()
323 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
324 .expect("should have one global task");
325 project.update(cx, |project, cx| {
326 let task_inventory = project
327 .task_store
328 .read(cx)
329 .task_inventory()
330 .cloned()
331 .unwrap();
332 task_inventory.update(cx, |inventory, _| {
333 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
334 inventory
335 .update_file_based_tasks(
336 TaskSettingsLocation::Global(tasks_file()),
337 Some(
338 &json!([{
339 "label": "cargo check unstable",
340 "command": "cargo",
341 "args": [
342 "check",
343 "--all",
344 "--all-targets"
345 ],
346 "env": {
347 "RUSTFLAGS": "-Zunstable-options"
348 }
349 }])
350 .to_string(),
351 ),
352 settings::TaskKind::Script,
353 )
354 .unwrap();
355 });
356 });
357 cx.run_until_parked();
358
359 let all_tasks = cx
360 .update(|cx| get_all_tasks(&project, &task_contexts, cx))
361 .into_iter()
362 .map(|(source_kind, task)| {
363 let resolved = task.resolved.unwrap();
364 (
365 source_kind,
366 task.resolved_label,
367 resolved.args,
368 resolved.env,
369 )
370 })
371 .collect::<Vec<_>>();
372 assert_eq!(
373 all_tasks,
374 vec![
375 (
376 topmost_local_task_source_kind.clone(),
377 "cargo check all".to_string(),
378 vec!["check".to_string(), "--all".to_string()],
379 HashMap::default(),
380 ),
381 (
382 TaskSourceKind::Worktree {
383 id: worktree_id,
384 directory_in_worktree: PathBuf::from(separator!("b/.zed")),
385 id_base: if cfg!(windows) {
386 "local worktree tasks from directory \"b\\\\.zed\"".into()
387 } else {
388 "local worktree tasks from directory \"b/.zed\"".into()
389 },
390 },
391 "cargo check".to_string(),
392 vec!["check".to_string()],
393 HashMap::default(),
394 ),
395 (
396 TaskSourceKind::AbsPath {
397 abs_path: paths::tasks_file().clone(),
398 id_base: "global tasks.json".into(),
399 },
400 "cargo check unstable".to_string(),
401 vec![
402 "check".to_string(),
403 "--all".to_string(),
404 "--all-targets".to_string(),
405 ],
406 HashMap::from_iter(Some((
407 "RUSTFLAGS".to_string(),
408 "-Zunstable-options".to_string()
409 ))),
410 ),
411 ]
412 );
413}
414
415#[gpui::test]
416async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
417 init_test(cx);
418 TaskStore::init(None);
419
420 let fs = FakeFs::new(cx.executor());
421 fs.insert_tree(
422 path!("/dir"),
423 json!({
424 ".zed": {
425 "tasks.json": r#"[{
426 "label": "test worktree root",
427 "command": "echo $ZED_WORKTREE_ROOT"
428 }]"#,
429 },
430 "a": {
431 "a.rs": "fn a() {\n A\n}"
432 },
433 }),
434 )
435 .await;
436
437 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
438 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
439
440 cx.executor().run_until_parked();
441 let worktree_id = cx.update(|cx| {
442 project.update(cx, |project, cx| {
443 project.worktrees(cx).next().unwrap().read(cx).id()
444 })
445 });
446
447 let active_non_worktree_item_tasks = cx.update(|cx| {
448 get_all_tasks(
449 &project,
450 &TaskContexts {
451 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
452 active_worktree_context: None,
453 other_worktree_contexts: Vec::new(),
454 },
455 cx,
456 )
457 });
458 assert!(
459 active_non_worktree_item_tasks.is_empty(),
460 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
461 );
462
463 let active_worktree_tasks = cx.update(|cx| {
464 get_all_tasks(
465 &project,
466 &TaskContexts {
467 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
468 active_worktree_context: Some((worktree_id, {
469 let mut worktree_context = TaskContext::default();
470 worktree_context
471 .task_variables
472 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
473 worktree_context
474 })),
475 other_worktree_contexts: Vec::new(),
476 },
477 cx,
478 )
479 });
480 assert_eq!(
481 active_worktree_tasks
482 .into_iter()
483 .map(|(source_kind, task)| {
484 let resolved = task.resolved.unwrap();
485 (source_kind, resolved.command)
486 })
487 .collect::<Vec<_>>(),
488 vec![(
489 TaskSourceKind::Worktree {
490 id: worktree_id,
491 directory_in_worktree: PathBuf::from(separator!(".zed")),
492 id_base: if cfg!(windows) {
493 "local worktree tasks from directory \".zed\"".into()
494 } else {
495 "local worktree tasks from directory \".zed\"".into()
496 },
497 },
498 "echo /dir".to_string(),
499 )]
500 );
501}
502
503#[gpui::test]
504async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506
507 let fs = FakeFs::new(cx.executor());
508 fs.insert_tree(
509 path!("/dir"),
510 json!({
511 "test.rs": "const A: i32 = 1;",
512 "test2.rs": "",
513 "Cargo.toml": "a = 1",
514 "package.json": "{\"a\": 1}",
515 }),
516 )
517 .await;
518
519 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
520 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
521
522 let mut fake_rust_servers = language_registry.register_fake_lsp(
523 "Rust",
524 FakeLspAdapter {
525 name: "the-rust-language-server",
526 capabilities: lsp::ServerCapabilities {
527 completion_provider: Some(lsp::CompletionOptions {
528 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
529 ..Default::default()
530 }),
531 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
532 lsp::TextDocumentSyncOptions {
533 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
534 ..Default::default()
535 },
536 )),
537 ..Default::default()
538 },
539 ..Default::default()
540 },
541 );
542 let mut fake_json_servers = language_registry.register_fake_lsp(
543 "JSON",
544 FakeLspAdapter {
545 name: "the-json-language-server",
546 capabilities: lsp::ServerCapabilities {
547 completion_provider: Some(lsp::CompletionOptions {
548 trigger_characters: Some(vec![":".to_string()]),
549 ..Default::default()
550 }),
551 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
552 lsp::TextDocumentSyncOptions {
553 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
554 ..Default::default()
555 },
556 )),
557 ..Default::default()
558 },
559 ..Default::default()
560 },
561 );
562
563 // Open a buffer without an associated language server.
564 let (toml_buffer, _handle) = project
565 .update(cx, |project, cx| {
566 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
567 })
568 .await
569 .unwrap();
570
571 // Open a buffer with an associated language server before the language for it has been loaded.
572 let (rust_buffer, _handle2) = project
573 .update(cx, |project, cx| {
574 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
575 })
576 .await
577 .unwrap();
578 rust_buffer.update(cx, |buffer, _| {
579 assert_eq!(buffer.language().map(|l| l.name()), None);
580 });
581
582 // Now we add the languages to the project, and ensure they get assigned to all
583 // the relevant open buffers.
584 language_registry.add(json_lang());
585 language_registry.add(rust_lang());
586 cx.executor().run_until_parked();
587 rust_buffer.update(cx, |buffer, _| {
588 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
589 });
590
591 // A server is started up, and it is notified about Rust files.
592 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
593 assert_eq!(
594 fake_rust_server
595 .receive_notification::<lsp::notification::DidOpenTextDocument>()
596 .await
597 .text_document,
598 lsp::TextDocumentItem {
599 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
600 version: 0,
601 text: "const A: i32 = 1;".to_string(),
602 language_id: "rust".to_string(),
603 }
604 );
605
606 // The buffer is configured based on the language server's capabilities.
607 rust_buffer.update(cx, |buffer, _| {
608 assert_eq!(
609 buffer
610 .completion_triggers()
611 .into_iter()
612 .cloned()
613 .collect::<Vec<_>>(),
614 &[".".to_string(), "::".to_string()]
615 );
616 });
617 toml_buffer.update(cx, |buffer, _| {
618 assert!(buffer.completion_triggers().is_empty());
619 });
620
621 // Edit a buffer. The changes are reported to the language server.
622 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
623 assert_eq!(
624 fake_rust_server
625 .receive_notification::<lsp::notification::DidChangeTextDocument>()
626 .await
627 .text_document,
628 lsp::VersionedTextDocumentIdentifier::new(
629 lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
630 1
631 )
632 );
633
634 // Open a third buffer with a different associated language server.
635 let (json_buffer, _json_handle) = project
636 .update(cx, |project, cx| {
637 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
638 })
639 .await
640 .unwrap();
641
642 // A json language server is started up and is only notified about the json buffer.
643 let mut fake_json_server = fake_json_servers.next().await.unwrap();
644 assert_eq!(
645 fake_json_server
646 .receive_notification::<lsp::notification::DidOpenTextDocument>()
647 .await
648 .text_document,
649 lsp::TextDocumentItem {
650 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
651 version: 0,
652 text: "{\"a\": 1}".to_string(),
653 language_id: "json".to_string(),
654 }
655 );
656
657 // This buffer is configured based on the second language server's
658 // capabilities.
659 json_buffer.update(cx, |buffer, _| {
660 assert_eq!(
661 buffer
662 .completion_triggers()
663 .into_iter()
664 .cloned()
665 .collect::<Vec<_>>(),
666 &[":".to_string()]
667 );
668 });
669
670 // When opening another buffer whose language server is already running,
671 // it is also configured based on the existing language server's capabilities.
672 let (rust_buffer2, _handle4) = project
673 .update(cx, |project, cx| {
674 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
675 })
676 .await
677 .unwrap();
678 rust_buffer2.update(cx, |buffer, _| {
679 assert_eq!(
680 buffer
681 .completion_triggers()
682 .into_iter()
683 .cloned()
684 .collect::<Vec<_>>(),
685 &[".".to_string(), "::".to_string()]
686 );
687 });
688
689 // Changes are reported only to servers matching the buffer's language.
690 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
691 rust_buffer2.update(cx, |buffer, cx| {
692 buffer.edit([(0..0, "let x = 1;")], None, cx)
693 });
694 assert_eq!(
695 fake_rust_server
696 .receive_notification::<lsp::notification::DidChangeTextDocument>()
697 .await
698 .text_document,
699 lsp::VersionedTextDocumentIdentifier::new(
700 lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap(),
701 1
702 )
703 );
704
705 // Save notifications are reported to all servers.
706 project
707 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
708 .await
709 .unwrap();
710 assert_eq!(
711 fake_rust_server
712 .receive_notification::<lsp::notification::DidSaveTextDocument>()
713 .await
714 .text_document,
715 lsp::TextDocumentIdentifier::new(
716 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
717 )
718 );
719 assert_eq!(
720 fake_json_server
721 .receive_notification::<lsp::notification::DidSaveTextDocument>()
722 .await
723 .text_document,
724 lsp::TextDocumentIdentifier::new(
725 lsp::Url::from_file_path(path!("/dir/Cargo.toml")).unwrap()
726 )
727 );
728
729 // Renames are reported only to servers matching the buffer's language.
730 fs.rename(
731 Path::new(path!("/dir/test2.rs")),
732 Path::new(path!("/dir/test3.rs")),
733 Default::default(),
734 )
735 .await
736 .unwrap();
737 assert_eq!(
738 fake_rust_server
739 .receive_notification::<lsp::notification::DidCloseTextDocument>()
740 .await
741 .text_document,
742 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test2.rs")).unwrap()),
743 );
744 assert_eq!(
745 fake_rust_server
746 .receive_notification::<lsp::notification::DidOpenTextDocument>()
747 .await
748 .text_document,
749 lsp::TextDocumentItem {
750 uri: lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),
751 version: 0,
752 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
753 language_id: "rust".to_string(),
754 },
755 );
756
757 rust_buffer2.update(cx, |buffer, cx| {
758 buffer.update_diagnostics(
759 LanguageServerId(0),
760 DiagnosticSet::from_sorted_entries(
761 vec![DiagnosticEntry {
762 diagnostic: Default::default(),
763 range: Anchor::MIN..Anchor::MAX,
764 }],
765 &buffer.snapshot(),
766 ),
767 cx,
768 );
769 assert_eq!(
770 buffer
771 .snapshot()
772 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
773 .count(),
774 1
775 );
776 });
777
778 // When the rename changes the extension of the file, the buffer gets closed on the old
779 // language server and gets opened on the new one.
780 fs.rename(
781 Path::new(path!("/dir/test3.rs")),
782 Path::new(path!("/dir/test3.json")),
783 Default::default(),
784 )
785 .await
786 .unwrap();
787 assert_eq!(
788 fake_rust_server
789 .receive_notification::<lsp::notification::DidCloseTextDocument>()
790 .await
791 .text_document,
792 lsp::TextDocumentIdentifier::new(lsp::Url::from_file_path(path!("/dir/test3.rs")).unwrap(),),
793 );
794 assert_eq!(
795 fake_json_server
796 .receive_notification::<lsp::notification::DidOpenTextDocument>()
797 .await
798 .text_document,
799 lsp::TextDocumentItem {
800 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
801 version: 0,
802 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
803 language_id: "json".to_string(),
804 },
805 );
806
807 // We clear the diagnostics, since the language has changed.
808 rust_buffer2.update(cx, |buffer, _| {
809 assert_eq!(
810 buffer
811 .snapshot()
812 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
813 .count(),
814 0
815 );
816 });
817
818 // The renamed file's version resets after changing language server.
819 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
820 assert_eq!(
821 fake_json_server
822 .receive_notification::<lsp::notification::DidChangeTextDocument>()
823 .await
824 .text_document,
825 lsp::VersionedTextDocumentIdentifier::new(
826 lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
827 1
828 )
829 );
830
831 // Restart language servers
832 project.update(cx, |project, cx| {
833 project.restart_language_servers_for_buffers(
834 vec![rust_buffer.clone(), json_buffer.clone()],
835 cx,
836 );
837 });
838
839 let mut rust_shutdown_requests = fake_rust_server
840 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
841 let mut json_shutdown_requests = fake_json_server
842 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
843 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
844
845 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
846 let mut fake_json_server = fake_json_servers.next().await.unwrap();
847
848 // Ensure rust document is reopened in new rust language server
849 assert_eq!(
850 fake_rust_server
851 .receive_notification::<lsp::notification::DidOpenTextDocument>()
852 .await
853 .text_document,
854 lsp::TextDocumentItem {
855 uri: lsp::Url::from_file_path(path!("/dir/test.rs")).unwrap(),
856 version: 0,
857 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
858 language_id: "rust".to_string(),
859 }
860 );
861
862 // Ensure json documents are reopened in new json language server
863 assert_set_eq!(
864 [
865 fake_json_server
866 .receive_notification::<lsp::notification::DidOpenTextDocument>()
867 .await
868 .text_document,
869 fake_json_server
870 .receive_notification::<lsp::notification::DidOpenTextDocument>()
871 .await
872 .text_document,
873 ],
874 [
875 lsp::TextDocumentItem {
876 uri: lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
877 version: 0,
878 text: json_buffer.update(cx, |buffer, _| buffer.text()),
879 language_id: "json".to_string(),
880 },
881 lsp::TextDocumentItem {
882 uri: lsp::Url::from_file_path(path!("/dir/test3.json")).unwrap(),
883 version: 0,
884 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
885 language_id: "json".to_string(),
886 }
887 ]
888 );
889
890 // Close notifications are reported only to servers matching the buffer's language.
891 cx.update(|_| drop(_json_handle));
892 let close_message = lsp::DidCloseTextDocumentParams {
893 text_document: lsp::TextDocumentIdentifier::new(
894 lsp::Url::from_file_path(path!("/dir/package.json")).unwrap(),
895 ),
896 };
897 assert_eq!(
898 fake_json_server
899 .receive_notification::<lsp::notification::DidCloseTextDocument>()
900 .await,
901 close_message,
902 );
903}
904
905#[gpui::test]
906async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
907 init_test(cx);
908
909 let fs = FakeFs::new(cx.executor());
910 fs.insert_tree(
911 path!("/the-root"),
912 json!({
913 ".gitignore": "target\n",
914 "src": {
915 "a.rs": "",
916 "b.rs": "",
917 },
918 "target": {
919 "x": {
920 "out": {
921 "x.rs": ""
922 }
923 },
924 "y": {
925 "out": {
926 "y.rs": "",
927 }
928 },
929 "z": {
930 "out": {
931 "z.rs": ""
932 }
933 }
934 }
935 }),
936 )
937 .await;
938
939 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
940 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
941 language_registry.add(rust_lang());
942 let mut fake_servers = language_registry.register_fake_lsp(
943 "Rust",
944 FakeLspAdapter {
945 name: "the-language-server",
946 ..Default::default()
947 },
948 );
949
950 cx.executor().run_until_parked();
951
952 // Start the language server by opening a buffer with a compatible file extension.
953 project
954 .update(cx, |project, cx| {
955 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
956 })
957 .await
958 .unwrap();
959
960 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
961 project.update(cx, |project, cx| {
962 let worktree = project.worktrees(cx).next().unwrap();
963 assert_eq!(
964 worktree
965 .read(cx)
966 .snapshot()
967 .entries(true, 0)
968 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
969 .collect::<Vec<_>>(),
970 &[
971 (Path::new(""), false),
972 (Path::new(".gitignore"), false),
973 (Path::new("src"), false),
974 (Path::new("src/a.rs"), false),
975 (Path::new("src/b.rs"), false),
976 (Path::new("target"), true),
977 ]
978 );
979 });
980
981 let prev_read_dir_count = fs.read_dir_call_count();
982
983 // Keep track of the FS events reported to the language server.
984 let fake_server = fake_servers.next().await.unwrap();
985 let file_changes = Arc::new(Mutex::new(Vec::new()));
986 fake_server
987 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
988 registrations: vec![lsp::Registration {
989 id: Default::default(),
990 method: "workspace/didChangeWatchedFiles".to_string(),
991 register_options: serde_json::to_value(
992 lsp::DidChangeWatchedFilesRegistrationOptions {
993 watchers: vec![
994 lsp::FileSystemWatcher {
995 glob_pattern: lsp::GlobPattern::String(
996 path!("/the-root/Cargo.toml").to_string(),
997 ),
998 kind: None,
999 },
1000 lsp::FileSystemWatcher {
1001 glob_pattern: lsp::GlobPattern::String(
1002 path!("/the-root/src/*.{rs,c}").to_string(),
1003 ),
1004 kind: None,
1005 },
1006 lsp::FileSystemWatcher {
1007 glob_pattern: lsp::GlobPattern::String(
1008 path!("/the-root/target/y/**/*.rs").to_string(),
1009 ),
1010 kind: None,
1011 },
1012 ],
1013 },
1014 )
1015 .ok(),
1016 }],
1017 })
1018 .await
1019 .unwrap();
1020 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1021 let file_changes = file_changes.clone();
1022 move |params, _| {
1023 let mut file_changes = file_changes.lock();
1024 file_changes.extend(params.changes);
1025 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1026 }
1027 });
1028
1029 cx.executor().run_until_parked();
1030 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1031 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1032
1033 // Now the language server has asked us to watch an ignored directory path,
1034 // so we recursively load it.
1035 project.update(cx, |project, cx| {
1036 let worktree = project.worktrees(cx).next().unwrap();
1037 assert_eq!(
1038 worktree
1039 .read(cx)
1040 .snapshot()
1041 .entries(true, 0)
1042 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1043 .collect::<Vec<_>>(),
1044 &[
1045 (Path::new(""), false),
1046 (Path::new(".gitignore"), false),
1047 (Path::new("src"), false),
1048 (Path::new("src/a.rs"), false),
1049 (Path::new("src/b.rs"), false),
1050 (Path::new("target"), true),
1051 (Path::new("target/x"), true),
1052 (Path::new("target/y"), true),
1053 (Path::new("target/y/out"), true),
1054 (Path::new("target/y/out/y.rs"), true),
1055 (Path::new("target/z"), true),
1056 ]
1057 );
1058 });
1059
1060 // Perform some file system mutations, two of which match the watched patterns,
1061 // and one of which does not.
1062 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1063 .await
1064 .unwrap();
1065 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1066 .await
1067 .unwrap();
1068 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1069 .await
1070 .unwrap();
1071 fs.create_file(
1072 path!("/the-root/target/x/out/x2.rs").as_ref(),
1073 Default::default(),
1074 )
1075 .await
1076 .unwrap();
1077 fs.create_file(
1078 path!("/the-root/target/y/out/y2.rs").as_ref(),
1079 Default::default(),
1080 )
1081 .await
1082 .unwrap();
1083
1084 // The language server receives events for the FS mutations that match its watch patterns.
1085 cx.executor().run_until_parked();
1086 assert_eq!(
1087 &*file_changes.lock(),
1088 &[
1089 lsp::FileEvent {
1090 uri: lsp::Url::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1091 typ: lsp::FileChangeType::DELETED,
1092 },
1093 lsp::FileEvent {
1094 uri: lsp::Url::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1095 typ: lsp::FileChangeType::CREATED,
1096 },
1097 lsp::FileEvent {
1098 uri: lsp::Url::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1099 typ: lsp::FileChangeType::CREATED,
1100 },
1101 ]
1102 );
1103}
1104
1105#[gpui::test]
1106async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1107 init_test(cx);
1108
1109 let fs = FakeFs::new(cx.executor());
1110 fs.insert_tree(
1111 path!("/dir"),
1112 json!({
1113 "a.rs": "let a = 1;",
1114 "b.rs": "let b = 2;"
1115 }),
1116 )
1117 .await;
1118
1119 let project = Project::test(
1120 fs,
1121 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1122 cx,
1123 )
1124 .await;
1125 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1126
1127 let buffer_a = project
1128 .update(cx, |project, cx| {
1129 project.open_local_buffer(path!("/dir/a.rs"), cx)
1130 })
1131 .await
1132 .unwrap();
1133 let buffer_b = project
1134 .update(cx, |project, cx| {
1135 project.open_local_buffer(path!("/dir/b.rs"), cx)
1136 })
1137 .await
1138 .unwrap();
1139
1140 lsp_store.update(cx, |lsp_store, cx| {
1141 lsp_store
1142 .update_diagnostics(
1143 LanguageServerId(0),
1144 lsp::PublishDiagnosticsParams {
1145 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1146 version: None,
1147 diagnostics: vec![lsp::Diagnostic {
1148 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1149 severity: Some(lsp::DiagnosticSeverity::ERROR),
1150 message: "error 1".to_string(),
1151 ..Default::default()
1152 }],
1153 },
1154 &[],
1155 cx,
1156 )
1157 .unwrap();
1158 lsp_store
1159 .update_diagnostics(
1160 LanguageServerId(0),
1161 lsp::PublishDiagnosticsParams {
1162 uri: Url::from_file_path(path!("/dir/b.rs")).unwrap(),
1163 version: None,
1164 diagnostics: vec![lsp::Diagnostic {
1165 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1166 severity: Some(DiagnosticSeverity::WARNING),
1167 message: "error 2".to_string(),
1168 ..Default::default()
1169 }],
1170 },
1171 &[],
1172 cx,
1173 )
1174 .unwrap();
1175 });
1176
1177 buffer_a.update(cx, |buffer, _| {
1178 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1179 assert_eq!(
1180 chunks
1181 .iter()
1182 .map(|(s, d)| (s.as_str(), *d))
1183 .collect::<Vec<_>>(),
1184 &[
1185 ("let ", None),
1186 ("a", Some(DiagnosticSeverity::ERROR)),
1187 (" = 1;", None),
1188 ]
1189 );
1190 });
1191 buffer_b.update(cx, |buffer, _| {
1192 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1193 assert_eq!(
1194 chunks
1195 .iter()
1196 .map(|(s, d)| (s.as_str(), *d))
1197 .collect::<Vec<_>>(),
1198 &[
1199 ("let ", None),
1200 ("b", Some(DiagnosticSeverity::WARNING)),
1201 (" = 2;", None),
1202 ]
1203 );
1204 });
1205}
1206
1207#[gpui::test]
1208async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1209 init_test(cx);
1210
1211 let fs = FakeFs::new(cx.executor());
1212 fs.insert_tree(
1213 path!("/root"),
1214 json!({
1215 "dir": {
1216 ".git": {
1217 "HEAD": "ref: refs/heads/main",
1218 },
1219 ".gitignore": "b.rs",
1220 "a.rs": "let a = 1;",
1221 "b.rs": "let b = 2;",
1222 },
1223 "other.rs": "let b = c;"
1224 }),
1225 )
1226 .await;
1227
1228 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1229 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1230 let (worktree, _) = project
1231 .update(cx, |project, cx| {
1232 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1233 })
1234 .await
1235 .unwrap();
1236 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1237
1238 let (worktree, _) = project
1239 .update(cx, |project, cx| {
1240 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1241 })
1242 .await
1243 .unwrap();
1244 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1245
1246 let server_id = LanguageServerId(0);
1247 lsp_store.update(cx, |lsp_store, cx| {
1248 lsp_store
1249 .update_diagnostics(
1250 server_id,
1251 lsp::PublishDiagnosticsParams {
1252 uri: Url::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1253 version: None,
1254 diagnostics: vec![lsp::Diagnostic {
1255 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1256 severity: Some(lsp::DiagnosticSeverity::ERROR),
1257 message: "unused variable 'b'".to_string(),
1258 ..Default::default()
1259 }],
1260 },
1261 &[],
1262 cx,
1263 )
1264 .unwrap();
1265 lsp_store
1266 .update_diagnostics(
1267 server_id,
1268 lsp::PublishDiagnosticsParams {
1269 uri: Url::from_file_path(path!("/root/other.rs")).unwrap(),
1270 version: None,
1271 diagnostics: vec![lsp::Diagnostic {
1272 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1273 severity: Some(lsp::DiagnosticSeverity::ERROR),
1274 message: "unknown variable 'c'".to_string(),
1275 ..Default::default()
1276 }],
1277 },
1278 &[],
1279 cx,
1280 )
1281 .unwrap();
1282 });
1283
1284 let main_ignored_buffer = project
1285 .update(cx, |project, cx| {
1286 project.open_buffer((main_worktree_id, "b.rs"), cx)
1287 })
1288 .await
1289 .unwrap();
1290 main_ignored_buffer.update(cx, |buffer, _| {
1291 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1292 assert_eq!(
1293 chunks
1294 .iter()
1295 .map(|(s, d)| (s.as_str(), *d))
1296 .collect::<Vec<_>>(),
1297 &[
1298 ("let ", None),
1299 ("b", Some(DiagnosticSeverity::ERROR)),
1300 (" = 2;", None),
1301 ],
1302 "Gigitnored buffers should still get in-buffer diagnostics",
1303 );
1304 });
1305 let other_buffer = project
1306 .update(cx, |project, cx| {
1307 project.open_buffer((other_worktree_id, ""), cx)
1308 })
1309 .await
1310 .unwrap();
1311 other_buffer.update(cx, |buffer, _| {
1312 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1313 assert_eq!(
1314 chunks
1315 .iter()
1316 .map(|(s, d)| (s.as_str(), *d))
1317 .collect::<Vec<_>>(),
1318 &[
1319 ("let b = ", None),
1320 ("c", Some(DiagnosticSeverity::ERROR)),
1321 (";", None),
1322 ],
1323 "Buffers from hidden projects should still get in-buffer diagnostics"
1324 );
1325 });
1326
1327 project.update(cx, |project, cx| {
1328 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1329 assert_eq!(
1330 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1331 vec![(
1332 ProjectPath {
1333 worktree_id: main_worktree_id,
1334 path: Arc::from(Path::new("b.rs")),
1335 },
1336 server_id,
1337 DiagnosticSummary {
1338 error_count: 1,
1339 warning_count: 0,
1340 }
1341 )]
1342 );
1343 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1344 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1345 });
1346}
1347
1348#[gpui::test]
1349async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1350 init_test(cx);
1351
1352 let progress_token = "the-progress-token";
1353
1354 let fs = FakeFs::new(cx.executor());
1355 fs.insert_tree(
1356 path!("/dir"),
1357 json!({
1358 "a.rs": "fn a() { A }",
1359 "b.rs": "const y: i32 = 1",
1360 }),
1361 )
1362 .await;
1363
1364 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1365 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1366
1367 language_registry.add(rust_lang());
1368 let mut fake_servers = language_registry.register_fake_lsp(
1369 "Rust",
1370 FakeLspAdapter {
1371 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1372 disk_based_diagnostics_sources: vec!["disk".into()],
1373 ..Default::default()
1374 },
1375 );
1376
1377 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1378
1379 // Cause worktree to start the fake language server
1380 let _ = project
1381 .update(cx, |project, cx| {
1382 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1383 })
1384 .await
1385 .unwrap();
1386
1387 let mut events = cx.events(&project);
1388
1389 let fake_server = fake_servers.next().await.unwrap();
1390 assert_eq!(
1391 events.next().await.unwrap(),
1392 Event::LanguageServerAdded(
1393 LanguageServerId(0),
1394 fake_server.server.name(),
1395 Some(worktree_id)
1396 ),
1397 );
1398
1399 fake_server
1400 .start_progress(format!("{}/0", progress_token))
1401 .await;
1402 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1403 assert_eq!(
1404 events.next().await.unwrap(),
1405 Event::DiskBasedDiagnosticsStarted {
1406 language_server_id: LanguageServerId(0),
1407 }
1408 );
1409
1410 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1411 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1412 version: None,
1413 diagnostics: vec![lsp::Diagnostic {
1414 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1415 severity: Some(lsp::DiagnosticSeverity::ERROR),
1416 message: "undefined variable 'A'".to_string(),
1417 ..Default::default()
1418 }],
1419 });
1420 assert_eq!(
1421 events.next().await.unwrap(),
1422 Event::DiagnosticsUpdated {
1423 language_server_id: LanguageServerId(0),
1424 path: (worktree_id, Path::new("a.rs")).into()
1425 }
1426 );
1427
1428 fake_server.end_progress(format!("{}/0", progress_token));
1429 assert_eq!(
1430 events.next().await.unwrap(),
1431 Event::DiskBasedDiagnosticsFinished {
1432 language_server_id: LanguageServerId(0)
1433 }
1434 );
1435
1436 let buffer = project
1437 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1438 .await
1439 .unwrap();
1440
1441 buffer.update(cx, |buffer, _| {
1442 let snapshot = buffer.snapshot();
1443 let diagnostics = snapshot
1444 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1445 .collect::<Vec<_>>();
1446 assert_eq!(
1447 diagnostics,
1448 &[DiagnosticEntry {
1449 range: Point::new(0, 9)..Point::new(0, 10),
1450 diagnostic: Diagnostic {
1451 severity: lsp::DiagnosticSeverity::ERROR,
1452 message: "undefined variable 'A'".to_string(),
1453 group_id: 0,
1454 is_primary: true,
1455 ..Default::default()
1456 }
1457 }]
1458 )
1459 });
1460
1461 // Ensure publishing empty diagnostics twice only results in one update event.
1462 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1463 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1464 version: None,
1465 diagnostics: Default::default(),
1466 });
1467 assert_eq!(
1468 events.next().await.unwrap(),
1469 Event::DiagnosticsUpdated {
1470 language_server_id: LanguageServerId(0),
1471 path: (worktree_id, Path::new("a.rs")).into()
1472 }
1473 );
1474
1475 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1476 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1477 version: None,
1478 diagnostics: Default::default(),
1479 });
1480 cx.executor().run_until_parked();
1481 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1482}
1483
1484#[gpui::test]
1485async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1486 init_test(cx);
1487
1488 let progress_token = "the-progress-token";
1489
1490 let fs = FakeFs::new(cx.executor());
1491 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1492
1493 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1494
1495 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1496 language_registry.add(rust_lang());
1497 let mut fake_servers = language_registry.register_fake_lsp(
1498 "Rust",
1499 FakeLspAdapter {
1500 name: "the-language-server",
1501 disk_based_diagnostics_sources: vec!["disk".into()],
1502 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1503 ..Default::default()
1504 },
1505 );
1506
1507 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1508
1509 let (buffer, _handle) = project
1510 .update(cx, |project, cx| {
1511 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1512 })
1513 .await
1514 .unwrap();
1515 // Simulate diagnostics starting to update.
1516 let fake_server = fake_servers.next().await.unwrap();
1517 fake_server.start_progress(progress_token).await;
1518
1519 // Restart the server before the diagnostics finish updating.
1520 project.update(cx, |project, cx| {
1521 project.restart_language_servers_for_buffers(vec![buffer], cx);
1522 });
1523 let mut events = cx.events(&project);
1524
1525 // Simulate the newly started server sending more diagnostics.
1526 let fake_server = fake_servers.next().await.unwrap();
1527 assert_eq!(
1528 events.next().await.unwrap(),
1529 Event::LanguageServerAdded(
1530 LanguageServerId(1),
1531 fake_server.server.name(),
1532 Some(worktree_id)
1533 )
1534 );
1535 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1536 fake_server.start_progress(progress_token).await;
1537 assert_eq!(
1538 events.next().await.unwrap(),
1539 Event::DiskBasedDiagnosticsStarted {
1540 language_server_id: LanguageServerId(1)
1541 }
1542 );
1543 project.update(cx, |project, cx| {
1544 assert_eq!(
1545 project
1546 .language_servers_running_disk_based_diagnostics(cx)
1547 .collect::<Vec<_>>(),
1548 [LanguageServerId(1)]
1549 );
1550 });
1551
1552 // All diagnostics are considered done, despite the old server's diagnostic
1553 // task never completing.
1554 fake_server.end_progress(progress_token);
1555 assert_eq!(
1556 events.next().await.unwrap(),
1557 Event::DiskBasedDiagnosticsFinished {
1558 language_server_id: LanguageServerId(1)
1559 }
1560 );
1561 project.update(cx, |project, cx| {
1562 assert_eq!(
1563 project
1564 .language_servers_running_disk_based_diagnostics(cx)
1565 .collect::<Vec<_>>(),
1566 [] as [language::LanguageServerId; 0]
1567 );
1568 });
1569}
1570
1571#[gpui::test]
1572async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1573 init_test(cx);
1574
1575 let fs = FakeFs::new(cx.executor());
1576 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1577
1578 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1579
1580 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1581 language_registry.add(rust_lang());
1582 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1583
1584 let (buffer, _) = project
1585 .update(cx, |project, cx| {
1586 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1587 })
1588 .await
1589 .unwrap();
1590
1591 // Publish diagnostics
1592 let fake_server = fake_servers.next().await.unwrap();
1593 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1594 uri: Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1595 version: None,
1596 diagnostics: vec![lsp::Diagnostic {
1597 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
1598 severity: Some(lsp::DiagnosticSeverity::ERROR),
1599 message: "the message".to_string(),
1600 ..Default::default()
1601 }],
1602 });
1603
1604 cx.executor().run_until_parked();
1605 buffer.update(cx, |buffer, _| {
1606 assert_eq!(
1607 buffer
1608 .snapshot()
1609 .diagnostics_in_range::<_, usize>(0..1, false)
1610 .map(|entry| entry.diagnostic.message.clone())
1611 .collect::<Vec<_>>(),
1612 ["the message".to_string()]
1613 );
1614 });
1615 project.update(cx, |project, cx| {
1616 assert_eq!(
1617 project.diagnostic_summary(false, cx),
1618 DiagnosticSummary {
1619 error_count: 1,
1620 warning_count: 0,
1621 }
1622 );
1623 });
1624
1625 project.update(cx, |project, cx| {
1626 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1627 });
1628
1629 // The diagnostics are cleared.
1630 cx.executor().run_until_parked();
1631 buffer.update(cx, |buffer, _| {
1632 assert_eq!(
1633 buffer
1634 .snapshot()
1635 .diagnostics_in_range::<_, usize>(0..1, false)
1636 .map(|entry| entry.diagnostic.message.clone())
1637 .collect::<Vec<_>>(),
1638 Vec::<String>::new(),
1639 );
1640 });
1641 project.update(cx, |project, cx| {
1642 assert_eq!(
1643 project.diagnostic_summary(false, cx),
1644 DiagnosticSummary {
1645 error_count: 0,
1646 warning_count: 0,
1647 }
1648 );
1649 });
1650}
1651
1652#[gpui::test]
1653async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
1654 init_test(cx);
1655
1656 let fs = FakeFs::new(cx.executor());
1657 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1658
1659 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1661
1662 language_registry.add(rust_lang());
1663 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
1664
1665 let (buffer, _handle) = project
1666 .update(cx, |project, cx| {
1667 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1668 })
1669 .await
1670 .unwrap();
1671
1672 // Before restarting the server, report diagnostics with an unknown buffer version.
1673 let fake_server = fake_servers.next().await.unwrap();
1674 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1675 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1676 version: Some(10000),
1677 diagnostics: Vec::new(),
1678 });
1679 cx.executor().run_until_parked();
1680 project.update(cx, |project, cx| {
1681 project.restart_language_servers_for_buffers(vec![buffer.clone()], cx);
1682 });
1683
1684 let mut fake_server = fake_servers.next().await.unwrap();
1685 let notification = fake_server
1686 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1687 .await
1688 .text_document;
1689 assert_eq!(notification.version, 0);
1690}
1691
1692#[gpui::test]
1693async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
1694 init_test(cx);
1695
1696 let progress_token = "the-progress-token";
1697
1698 let fs = FakeFs::new(cx.executor());
1699 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1700
1701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1702
1703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1704 language_registry.add(rust_lang());
1705 let mut fake_servers = language_registry.register_fake_lsp(
1706 "Rust",
1707 FakeLspAdapter {
1708 name: "the-language-server",
1709 disk_based_diagnostics_sources: vec!["disk".into()],
1710 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1711 ..Default::default()
1712 },
1713 );
1714
1715 let (buffer, _handle) = project
1716 .update(cx, |project, cx| {
1717 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1718 })
1719 .await
1720 .unwrap();
1721
1722 // Simulate diagnostics starting to update.
1723 let mut fake_server = fake_servers.next().await.unwrap();
1724 fake_server
1725 .start_progress_with(
1726 "another-token",
1727 lsp::WorkDoneProgressBegin {
1728 cancellable: Some(false),
1729 ..Default::default()
1730 },
1731 )
1732 .await;
1733 fake_server
1734 .start_progress_with(
1735 progress_token,
1736 lsp::WorkDoneProgressBegin {
1737 cancellable: Some(true),
1738 ..Default::default()
1739 },
1740 )
1741 .await;
1742 cx.executor().run_until_parked();
1743
1744 project.update(cx, |project, cx| {
1745 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
1746 });
1747
1748 let cancel_notification = fake_server
1749 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
1750 .await;
1751 assert_eq!(
1752 cancel_notification.token,
1753 NumberOrString::String(progress_token.into())
1754 );
1755}
1756
1757#[gpui::test]
1758async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
1759 init_test(cx);
1760
1761 let fs = FakeFs::new(cx.executor());
1762 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
1763 .await;
1764
1765 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1767
1768 let mut fake_rust_servers = language_registry.register_fake_lsp(
1769 "Rust",
1770 FakeLspAdapter {
1771 name: "rust-lsp",
1772 ..Default::default()
1773 },
1774 );
1775 let mut fake_js_servers = language_registry.register_fake_lsp(
1776 "JavaScript",
1777 FakeLspAdapter {
1778 name: "js-lsp",
1779 ..Default::default()
1780 },
1781 );
1782 language_registry.add(rust_lang());
1783 language_registry.add(js_lang());
1784
1785 let _rs_buffer = project
1786 .update(cx, |project, cx| {
1787 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1788 })
1789 .await
1790 .unwrap();
1791 let _js_buffer = project
1792 .update(cx, |project, cx| {
1793 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
1794 })
1795 .await
1796 .unwrap();
1797
1798 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
1799 assert_eq!(
1800 fake_rust_server_1
1801 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1802 .await
1803 .text_document
1804 .uri
1805 .as_str(),
1806 uri!("file:///dir/a.rs")
1807 );
1808
1809 let mut fake_js_server = fake_js_servers.next().await.unwrap();
1810 assert_eq!(
1811 fake_js_server
1812 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1813 .await
1814 .text_document
1815 .uri
1816 .as_str(),
1817 uri!("file:///dir/b.js")
1818 );
1819
1820 // Disable Rust language server, ensuring only that server gets stopped.
1821 cx.update(|cx| {
1822 SettingsStore::update_global(cx, |settings, cx| {
1823 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1824 settings.languages.insert(
1825 "Rust".into(),
1826 LanguageSettingsContent {
1827 enable_language_server: Some(false),
1828 ..Default::default()
1829 },
1830 );
1831 });
1832 })
1833 });
1834 fake_rust_server_1
1835 .receive_notification::<lsp::notification::Exit>()
1836 .await;
1837
1838 // Enable Rust and disable JavaScript language servers, ensuring that the
1839 // former gets started again and that the latter stops.
1840 cx.update(|cx| {
1841 SettingsStore::update_global(cx, |settings, cx| {
1842 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
1843 settings.languages.insert(
1844 LanguageName::new("Rust"),
1845 LanguageSettingsContent {
1846 enable_language_server: Some(true),
1847 ..Default::default()
1848 },
1849 );
1850 settings.languages.insert(
1851 LanguageName::new("JavaScript"),
1852 LanguageSettingsContent {
1853 enable_language_server: Some(false),
1854 ..Default::default()
1855 },
1856 );
1857 });
1858 })
1859 });
1860 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
1861 assert_eq!(
1862 fake_rust_server_2
1863 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1864 .await
1865 .text_document
1866 .uri
1867 .as_str(),
1868 uri!("file:///dir/a.rs")
1869 );
1870 fake_js_server
1871 .receive_notification::<lsp::notification::Exit>()
1872 .await;
1873}
1874
1875#[gpui::test(iterations = 3)]
1876async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
1877 init_test(cx);
1878
1879 let text = "
1880 fn a() { A }
1881 fn b() { BB }
1882 fn c() { CCC }
1883 "
1884 .unindent();
1885
1886 let fs = FakeFs::new(cx.executor());
1887 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
1888
1889 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1890 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1891
1892 language_registry.add(rust_lang());
1893 let mut fake_servers = language_registry.register_fake_lsp(
1894 "Rust",
1895 FakeLspAdapter {
1896 disk_based_diagnostics_sources: vec!["disk".into()],
1897 ..Default::default()
1898 },
1899 );
1900
1901 let buffer = project
1902 .update(cx, |project, cx| {
1903 project.open_local_buffer(path!("/dir/a.rs"), cx)
1904 })
1905 .await
1906 .unwrap();
1907
1908 let _handle = project.update(cx, |project, cx| {
1909 project.register_buffer_with_language_servers(&buffer, cx)
1910 });
1911
1912 let mut fake_server = fake_servers.next().await.unwrap();
1913 let open_notification = fake_server
1914 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1915 .await;
1916
1917 // Edit the buffer, moving the content down
1918 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
1919 let change_notification_1 = fake_server
1920 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1921 .await;
1922 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
1923
1924 // Report some diagnostics for the initial version of the buffer
1925 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1926 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
1927 version: Some(open_notification.text_document.version),
1928 diagnostics: vec![
1929 lsp::Diagnostic {
1930 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1931 severity: Some(DiagnosticSeverity::ERROR),
1932 message: "undefined variable 'A'".to_string(),
1933 source: Some("disk".to_string()),
1934 ..Default::default()
1935 },
1936 lsp::Diagnostic {
1937 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
1938 severity: Some(DiagnosticSeverity::ERROR),
1939 message: "undefined variable 'BB'".to_string(),
1940 source: Some("disk".to_string()),
1941 ..Default::default()
1942 },
1943 lsp::Diagnostic {
1944 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
1945 severity: Some(DiagnosticSeverity::ERROR),
1946 source: Some("disk".to_string()),
1947 message: "undefined variable 'CCC'".to_string(),
1948 ..Default::default()
1949 },
1950 ],
1951 });
1952
1953 // The diagnostics have moved down since they were created.
1954 cx.executor().run_until_parked();
1955 buffer.update(cx, |buffer, _| {
1956 assert_eq!(
1957 buffer
1958 .snapshot()
1959 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
1960 .collect::<Vec<_>>(),
1961 &[
1962 DiagnosticEntry {
1963 range: Point::new(3, 9)..Point::new(3, 11),
1964 diagnostic: Diagnostic {
1965 source: Some("disk".into()),
1966 severity: DiagnosticSeverity::ERROR,
1967 message: "undefined variable 'BB'".to_string(),
1968 is_disk_based: true,
1969 group_id: 1,
1970 is_primary: true,
1971 ..Default::default()
1972 },
1973 },
1974 DiagnosticEntry {
1975 range: Point::new(4, 9)..Point::new(4, 12),
1976 diagnostic: Diagnostic {
1977 source: Some("disk".into()),
1978 severity: DiagnosticSeverity::ERROR,
1979 message: "undefined variable 'CCC'".to_string(),
1980 is_disk_based: true,
1981 group_id: 2,
1982 is_primary: true,
1983 ..Default::default()
1984 }
1985 }
1986 ]
1987 );
1988 assert_eq!(
1989 chunks_with_diagnostics(buffer, 0..buffer.len()),
1990 [
1991 ("\n\nfn a() { ".to_string(), None),
1992 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
1993 (" }\nfn b() { ".to_string(), None),
1994 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
1995 (" }\nfn c() { ".to_string(), None),
1996 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
1997 (" }\n".to_string(), None),
1998 ]
1999 );
2000 assert_eq!(
2001 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2002 [
2003 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2004 (" }\nfn c() { ".to_string(), None),
2005 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2006 ]
2007 );
2008 });
2009
2010 // Ensure overlapping diagnostics are highlighted correctly.
2011 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2012 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2013 version: Some(open_notification.text_document.version),
2014 diagnostics: vec![
2015 lsp::Diagnostic {
2016 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2017 severity: Some(DiagnosticSeverity::ERROR),
2018 message: "undefined variable 'A'".to_string(),
2019 source: Some("disk".to_string()),
2020 ..Default::default()
2021 },
2022 lsp::Diagnostic {
2023 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2024 severity: Some(DiagnosticSeverity::WARNING),
2025 message: "unreachable statement".to_string(),
2026 source: Some("disk".to_string()),
2027 ..Default::default()
2028 },
2029 ],
2030 });
2031
2032 cx.executor().run_until_parked();
2033 buffer.update(cx, |buffer, _| {
2034 assert_eq!(
2035 buffer
2036 .snapshot()
2037 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2038 .collect::<Vec<_>>(),
2039 &[
2040 DiagnosticEntry {
2041 range: Point::new(2, 9)..Point::new(2, 12),
2042 diagnostic: Diagnostic {
2043 source: Some("disk".into()),
2044 severity: DiagnosticSeverity::WARNING,
2045 message: "unreachable statement".to_string(),
2046 is_disk_based: true,
2047 group_id: 4,
2048 is_primary: true,
2049 ..Default::default()
2050 }
2051 },
2052 DiagnosticEntry {
2053 range: Point::new(2, 9)..Point::new(2, 10),
2054 diagnostic: Diagnostic {
2055 source: Some("disk".into()),
2056 severity: DiagnosticSeverity::ERROR,
2057 message: "undefined variable 'A'".to_string(),
2058 is_disk_based: true,
2059 group_id: 3,
2060 is_primary: true,
2061 ..Default::default()
2062 },
2063 }
2064 ]
2065 );
2066 assert_eq!(
2067 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2068 [
2069 ("fn a() { ".to_string(), None),
2070 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2071 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2072 ("\n".to_string(), None),
2073 ]
2074 );
2075 assert_eq!(
2076 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2077 [
2078 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2079 ("\n".to_string(), None),
2080 ]
2081 );
2082 });
2083
2084 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2085 // changes since the last save.
2086 buffer.update(cx, |buffer, cx| {
2087 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2088 buffer.edit(
2089 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2090 None,
2091 cx,
2092 );
2093 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2094 });
2095 let change_notification_2 = fake_server
2096 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2097 .await;
2098 assert!(
2099 change_notification_2.text_document.version > change_notification_1.text_document.version
2100 );
2101
2102 // Handle out-of-order diagnostics
2103 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2104 uri: lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2105 version: Some(change_notification_2.text_document.version),
2106 diagnostics: vec![
2107 lsp::Diagnostic {
2108 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2109 severity: Some(DiagnosticSeverity::ERROR),
2110 message: "undefined variable 'BB'".to_string(),
2111 source: Some("disk".to_string()),
2112 ..Default::default()
2113 },
2114 lsp::Diagnostic {
2115 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2116 severity: Some(DiagnosticSeverity::WARNING),
2117 message: "undefined variable 'A'".to_string(),
2118 source: Some("disk".to_string()),
2119 ..Default::default()
2120 },
2121 ],
2122 });
2123
2124 cx.executor().run_until_parked();
2125 buffer.update(cx, |buffer, _| {
2126 assert_eq!(
2127 buffer
2128 .snapshot()
2129 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2130 .collect::<Vec<_>>(),
2131 &[
2132 DiagnosticEntry {
2133 range: Point::new(2, 21)..Point::new(2, 22),
2134 diagnostic: Diagnostic {
2135 source: Some("disk".into()),
2136 severity: DiagnosticSeverity::WARNING,
2137 message: "undefined variable 'A'".to_string(),
2138 is_disk_based: true,
2139 group_id: 6,
2140 is_primary: true,
2141 ..Default::default()
2142 }
2143 },
2144 DiagnosticEntry {
2145 range: Point::new(3, 9)..Point::new(3, 14),
2146 diagnostic: Diagnostic {
2147 source: Some("disk".into()),
2148 severity: DiagnosticSeverity::ERROR,
2149 message: "undefined variable 'BB'".to_string(),
2150 is_disk_based: true,
2151 group_id: 5,
2152 is_primary: true,
2153 ..Default::default()
2154 },
2155 }
2156 ]
2157 );
2158 });
2159}
2160
2161#[gpui::test]
2162async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2163 init_test(cx);
2164
2165 let text = concat!(
2166 "let one = ;\n", //
2167 "let two = \n",
2168 "let three = 3;\n",
2169 );
2170
2171 let fs = FakeFs::new(cx.executor());
2172 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2173
2174 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2175 let buffer = project
2176 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2177 .await
2178 .unwrap();
2179
2180 project.update(cx, |project, cx| {
2181 project.lsp_store.update(cx, |lsp_store, cx| {
2182 lsp_store
2183 .update_diagnostic_entries(
2184 LanguageServerId(0),
2185 PathBuf::from("/dir/a.rs"),
2186 None,
2187 vec![
2188 DiagnosticEntry {
2189 range: Unclipped(PointUtf16::new(0, 10))
2190 ..Unclipped(PointUtf16::new(0, 10)),
2191 diagnostic: Diagnostic {
2192 severity: DiagnosticSeverity::ERROR,
2193 message: "syntax error 1".to_string(),
2194 ..Default::default()
2195 },
2196 },
2197 DiagnosticEntry {
2198 range: Unclipped(PointUtf16::new(1, 10))
2199 ..Unclipped(PointUtf16::new(1, 10)),
2200 diagnostic: Diagnostic {
2201 severity: DiagnosticSeverity::ERROR,
2202 message: "syntax error 2".to_string(),
2203 ..Default::default()
2204 },
2205 },
2206 ],
2207 cx,
2208 )
2209 .unwrap();
2210 })
2211 });
2212
2213 // An empty range is extended forward to include the following character.
2214 // At the end of a line, an empty range is extended backward to include
2215 // the preceding character.
2216 buffer.update(cx, |buffer, _| {
2217 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2218 assert_eq!(
2219 chunks
2220 .iter()
2221 .map(|(s, d)| (s.as_str(), *d))
2222 .collect::<Vec<_>>(),
2223 &[
2224 ("let one = ", None),
2225 (";", Some(DiagnosticSeverity::ERROR)),
2226 ("\nlet two =", None),
2227 (" ", Some(DiagnosticSeverity::ERROR)),
2228 ("\nlet three = 3;\n", None)
2229 ]
2230 );
2231 });
2232}
2233
2234#[gpui::test]
2235async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2236 init_test(cx);
2237
2238 let fs = FakeFs::new(cx.executor());
2239 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2240 .await;
2241
2242 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2243 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2244
2245 lsp_store.update(cx, |lsp_store, cx| {
2246 lsp_store
2247 .update_diagnostic_entries(
2248 LanguageServerId(0),
2249 Path::new("/dir/a.rs").to_owned(),
2250 None,
2251 vec![DiagnosticEntry {
2252 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2253 diagnostic: Diagnostic {
2254 severity: DiagnosticSeverity::ERROR,
2255 is_primary: true,
2256 message: "syntax error a1".to_string(),
2257 ..Default::default()
2258 },
2259 }],
2260 cx,
2261 )
2262 .unwrap();
2263 lsp_store
2264 .update_diagnostic_entries(
2265 LanguageServerId(1),
2266 Path::new("/dir/a.rs").to_owned(),
2267 None,
2268 vec![DiagnosticEntry {
2269 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2270 diagnostic: Diagnostic {
2271 severity: DiagnosticSeverity::ERROR,
2272 is_primary: true,
2273 message: "syntax error b1".to_string(),
2274 ..Default::default()
2275 },
2276 }],
2277 cx,
2278 )
2279 .unwrap();
2280
2281 assert_eq!(
2282 lsp_store.diagnostic_summary(false, cx),
2283 DiagnosticSummary {
2284 error_count: 2,
2285 warning_count: 0,
2286 }
2287 );
2288 });
2289}
2290
2291#[gpui::test]
2292async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2293 init_test(cx);
2294
2295 let text = "
2296 fn a() {
2297 f1();
2298 }
2299 fn b() {
2300 f2();
2301 }
2302 fn c() {
2303 f3();
2304 }
2305 "
2306 .unindent();
2307
2308 let fs = FakeFs::new(cx.executor());
2309 fs.insert_tree(
2310 path!("/dir"),
2311 json!({
2312 "a.rs": text.clone(),
2313 }),
2314 )
2315 .await;
2316
2317 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2318 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2319
2320 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2321 language_registry.add(rust_lang());
2322 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2323
2324 let (buffer, _handle) = project
2325 .update(cx, |project, cx| {
2326 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2327 })
2328 .await
2329 .unwrap();
2330
2331 let mut fake_server = fake_servers.next().await.unwrap();
2332 let lsp_document_version = fake_server
2333 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2334 .await
2335 .text_document
2336 .version;
2337
2338 // Simulate editing the buffer after the language server computes some edits.
2339 buffer.update(cx, |buffer, cx| {
2340 buffer.edit(
2341 [(
2342 Point::new(0, 0)..Point::new(0, 0),
2343 "// above first function\n",
2344 )],
2345 None,
2346 cx,
2347 );
2348 buffer.edit(
2349 [(
2350 Point::new(2, 0)..Point::new(2, 0),
2351 " // inside first function\n",
2352 )],
2353 None,
2354 cx,
2355 );
2356 buffer.edit(
2357 [(
2358 Point::new(6, 4)..Point::new(6, 4),
2359 "// inside second function ",
2360 )],
2361 None,
2362 cx,
2363 );
2364
2365 assert_eq!(
2366 buffer.text(),
2367 "
2368 // above first function
2369 fn a() {
2370 // inside first function
2371 f1();
2372 }
2373 fn b() {
2374 // inside second function f2();
2375 }
2376 fn c() {
2377 f3();
2378 }
2379 "
2380 .unindent()
2381 );
2382 });
2383
2384 let edits = lsp_store
2385 .update(cx, |lsp_store, cx| {
2386 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2387 &buffer,
2388 vec![
2389 // replace body of first function
2390 lsp::TextEdit {
2391 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2392 new_text: "
2393 fn a() {
2394 f10();
2395 }
2396 "
2397 .unindent(),
2398 },
2399 // edit inside second function
2400 lsp::TextEdit {
2401 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2402 new_text: "00".into(),
2403 },
2404 // edit inside third function via two distinct edits
2405 lsp::TextEdit {
2406 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2407 new_text: "4000".into(),
2408 },
2409 lsp::TextEdit {
2410 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2411 new_text: "".into(),
2412 },
2413 ],
2414 LanguageServerId(0),
2415 Some(lsp_document_version),
2416 cx,
2417 )
2418 })
2419 .await
2420 .unwrap();
2421
2422 buffer.update(cx, |buffer, cx| {
2423 for (range, new_text) in edits {
2424 buffer.edit([(range, new_text)], None, cx);
2425 }
2426 assert_eq!(
2427 buffer.text(),
2428 "
2429 // above first function
2430 fn a() {
2431 // inside first function
2432 f10();
2433 }
2434 fn b() {
2435 // inside second function f200();
2436 }
2437 fn c() {
2438 f4000();
2439 }
2440 "
2441 .unindent()
2442 );
2443 });
2444}
2445
2446#[gpui::test]
2447async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2448 init_test(cx);
2449
2450 let text = "
2451 use a::b;
2452 use a::c;
2453
2454 fn f() {
2455 b();
2456 c();
2457 }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(
2463 path!("/dir"),
2464 json!({
2465 "a.rs": text.clone(),
2466 }),
2467 )
2468 .await;
2469
2470 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2471 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2472 let buffer = project
2473 .update(cx, |project, cx| {
2474 project.open_local_buffer(path!("/dir/a.rs"), cx)
2475 })
2476 .await
2477 .unwrap();
2478
2479 // Simulate the language server sending us a small edit in the form of a very large diff.
2480 // Rust-analyzer does this when performing a merge-imports code action.
2481 let edits = lsp_store
2482 .update(cx, |lsp_store, cx| {
2483 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2484 &buffer,
2485 [
2486 // Replace the first use statement without editing the semicolon.
2487 lsp::TextEdit {
2488 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2489 new_text: "a::{b, c}".into(),
2490 },
2491 // Reinsert the remainder of the file between the semicolon and the final
2492 // newline of the file.
2493 lsp::TextEdit {
2494 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2495 new_text: "\n\n".into(),
2496 },
2497 lsp::TextEdit {
2498 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2499 new_text: "
2500 fn f() {
2501 b();
2502 c();
2503 }"
2504 .unindent(),
2505 },
2506 // Delete everything after the first newline of the file.
2507 lsp::TextEdit {
2508 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2509 new_text: "".into(),
2510 },
2511 ],
2512 LanguageServerId(0),
2513 None,
2514 cx,
2515 )
2516 })
2517 .await
2518 .unwrap();
2519
2520 buffer.update(cx, |buffer, cx| {
2521 let edits = edits
2522 .into_iter()
2523 .map(|(range, text)| {
2524 (
2525 range.start.to_point(buffer)..range.end.to_point(buffer),
2526 text,
2527 )
2528 })
2529 .collect::<Vec<_>>();
2530
2531 assert_eq!(
2532 edits,
2533 [
2534 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2535 (Point::new(1, 0)..Point::new(2, 0), "".into())
2536 ]
2537 );
2538
2539 for (range, new_text) in edits {
2540 buffer.edit([(range, new_text)], None, cx);
2541 }
2542 assert_eq!(
2543 buffer.text(),
2544 "
2545 use a::{b, c};
2546
2547 fn f() {
2548 b();
2549 c();
2550 }
2551 "
2552 .unindent()
2553 );
2554 });
2555}
2556
2557#[gpui::test]
2558async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
2559 init_test(cx);
2560
2561 let text = "
2562 use a::b;
2563 use a::c;
2564
2565 fn f() {
2566 b();
2567 c();
2568 }
2569 "
2570 .unindent();
2571
2572 let fs = FakeFs::new(cx.executor());
2573 fs.insert_tree(
2574 path!("/dir"),
2575 json!({
2576 "a.rs": text.clone(),
2577 }),
2578 )
2579 .await;
2580
2581 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2582 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2583 let buffer = project
2584 .update(cx, |project, cx| {
2585 project.open_local_buffer(path!("/dir/a.rs"), cx)
2586 })
2587 .await
2588 .unwrap();
2589
2590 // Simulate the language server sending us edits in a non-ordered fashion,
2591 // with ranges sometimes being inverted or pointing to invalid locations.
2592 let edits = lsp_store
2593 .update(cx, |lsp_store, cx| {
2594 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2595 &buffer,
2596 [
2597 lsp::TextEdit {
2598 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2599 new_text: "\n\n".into(),
2600 },
2601 lsp::TextEdit {
2602 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
2603 new_text: "a::{b, c}".into(),
2604 },
2605 lsp::TextEdit {
2606 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
2607 new_text: "".into(),
2608 },
2609 lsp::TextEdit {
2610 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2611 new_text: "
2612 fn f() {
2613 b();
2614 c();
2615 }"
2616 .unindent(),
2617 },
2618 ],
2619 LanguageServerId(0),
2620 None,
2621 cx,
2622 )
2623 })
2624 .await
2625 .unwrap();
2626
2627 buffer.update(cx, |buffer, cx| {
2628 let edits = edits
2629 .into_iter()
2630 .map(|(range, text)| {
2631 (
2632 range.start.to_point(buffer)..range.end.to_point(buffer),
2633 text,
2634 )
2635 })
2636 .collect::<Vec<_>>();
2637
2638 assert_eq!(
2639 edits,
2640 [
2641 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2642 (Point::new(1, 0)..Point::new(2, 0), "".into())
2643 ]
2644 );
2645
2646 for (range, new_text) in edits {
2647 buffer.edit([(range, new_text)], None, cx);
2648 }
2649 assert_eq!(
2650 buffer.text(),
2651 "
2652 use a::{b, c};
2653
2654 fn f() {
2655 b();
2656 c();
2657 }
2658 "
2659 .unindent()
2660 );
2661 });
2662}
2663
2664fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
2665 buffer: &Buffer,
2666 range: Range<T>,
2667) -> Vec<(String, Option<DiagnosticSeverity>)> {
2668 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
2669 for chunk in buffer.snapshot().chunks(range, true) {
2670 if chunks.last().map_or(false, |prev_chunk| {
2671 prev_chunk.1 == chunk.diagnostic_severity
2672 }) {
2673 chunks.last_mut().unwrap().0.push_str(chunk.text);
2674 } else {
2675 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
2676 }
2677 }
2678 chunks
2679}
2680
2681#[gpui::test(iterations = 10)]
2682async fn test_definition(cx: &mut gpui::TestAppContext) {
2683 init_test(cx);
2684
2685 let fs = FakeFs::new(cx.executor());
2686 fs.insert_tree(
2687 path!("/dir"),
2688 json!({
2689 "a.rs": "const fn a() { A }",
2690 "b.rs": "const y: i32 = crate::a()",
2691 }),
2692 )
2693 .await;
2694
2695 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
2696
2697 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2698 language_registry.add(rust_lang());
2699 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2700
2701 let (buffer, _handle) = project
2702 .update(cx, |project, cx| {
2703 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
2704 })
2705 .await
2706 .unwrap();
2707
2708 let fake_server = fake_servers.next().await.unwrap();
2709 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
2710 let params = params.text_document_position_params;
2711 assert_eq!(
2712 params.text_document.uri.to_file_path().unwrap(),
2713 Path::new(path!("/dir/b.rs")),
2714 );
2715 assert_eq!(params.position, lsp::Position::new(0, 22));
2716
2717 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
2718 lsp::Location::new(
2719 lsp::Url::from_file_path(path!("/dir/a.rs")).unwrap(),
2720 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2721 ),
2722 )))
2723 });
2724 let mut definitions = project
2725 .update(cx, |project, cx| project.definition(&buffer, 22, cx))
2726 .await
2727 .unwrap();
2728
2729 // Assert no new language server started
2730 cx.executor().run_until_parked();
2731 assert!(fake_servers.try_next().is_err());
2732
2733 assert_eq!(definitions.len(), 1);
2734 let definition = definitions.pop().unwrap();
2735 cx.update(|cx| {
2736 let target_buffer = definition.target.buffer.read(cx);
2737 assert_eq!(
2738 target_buffer
2739 .file()
2740 .unwrap()
2741 .as_local()
2742 .unwrap()
2743 .abs_path(cx),
2744 Path::new(path!("/dir/a.rs")),
2745 );
2746 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
2747 assert_eq!(
2748 list_worktrees(&project, cx),
2749 [
2750 (path!("/dir/a.rs").as_ref(), false),
2751 (path!("/dir/b.rs").as_ref(), true)
2752 ],
2753 );
2754
2755 drop(definition);
2756 });
2757 cx.update(|cx| {
2758 assert_eq!(
2759 list_worktrees(&project, cx),
2760 [(path!("/dir/b.rs").as_ref(), true)]
2761 );
2762 });
2763
2764 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
2765 project
2766 .read(cx)
2767 .worktrees(cx)
2768 .map(|worktree| {
2769 let worktree = worktree.read(cx);
2770 (
2771 worktree.as_local().unwrap().abs_path().as_ref(),
2772 worktree.is_visible(),
2773 )
2774 })
2775 .collect::<Vec<_>>()
2776 }
2777}
2778
2779#[gpui::test]
2780async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
2781 init_test(cx);
2782
2783 let fs = FakeFs::new(cx.executor());
2784 fs.insert_tree(
2785 path!("/dir"),
2786 json!({
2787 "a.ts": "",
2788 }),
2789 )
2790 .await;
2791
2792 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2793
2794 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2795 language_registry.add(typescript_lang());
2796 let mut fake_language_servers = language_registry.register_fake_lsp(
2797 "TypeScript",
2798 FakeLspAdapter {
2799 capabilities: lsp::ServerCapabilities {
2800 completion_provider: Some(lsp::CompletionOptions {
2801 trigger_characters: Some(vec![".".to_string()]),
2802 ..Default::default()
2803 }),
2804 ..Default::default()
2805 },
2806 ..Default::default()
2807 },
2808 );
2809
2810 let (buffer, _handle) = project
2811 .update(cx, |p, cx| {
2812 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2813 })
2814 .await
2815 .unwrap();
2816
2817 let fake_server = fake_language_servers.next().await.unwrap();
2818
2819 // When text_edit exists, it takes precedence over insert_text and label
2820 let text = "let a = obj.fqn";
2821 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2822 let completions = project.update(cx, |project, cx| {
2823 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2824 });
2825
2826 fake_server
2827 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2828 Ok(Some(lsp::CompletionResponse::Array(vec![
2829 lsp::CompletionItem {
2830 label: "labelText".into(),
2831 insert_text: Some("insertText".into()),
2832 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
2833 range: lsp::Range::new(
2834 lsp::Position::new(0, text.len() as u32 - 3),
2835 lsp::Position::new(0, text.len() as u32),
2836 ),
2837 new_text: "textEditText".into(),
2838 })),
2839 ..Default::default()
2840 },
2841 ])))
2842 })
2843 .next()
2844 .await;
2845
2846 let completions = completions.await.unwrap().unwrap();
2847 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2848
2849 assert_eq!(completions.len(), 1);
2850 assert_eq!(completions[0].new_text, "textEditText");
2851 assert_eq!(
2852 completions[0].old_range.to_offset(&snapshot),
2853 text.len() - 3..text.len()
2854 );
2855}
2856
2857#[gpui::test]
2858async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
2859 init_test(cx);
2860
2861 let fs = FakeFs::new(cx.executor());
2862 fs.insert_tree(
2863 path!("/dir"),
2864 json!({
2865 "a.ts": "",
2866 }),
2867 )
2868 .await;
2869
2870 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2871
2872 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2873 language_registry.add(typescript_lang());
2874 let mut fake_language_servers = language_registry.register_fake_lsp(
2875 "TypeScript",
2876 FakeLspAdapter {
2877 capabilities: lsp::ServerCapabilities {
2878 completion_provider: Some(lsp::CompletionOptions {
2879 trigger_characters: Some(vec![".".to_string()]),
2880 ..Default::default()
2881 }),
2882 ..Default::default()
2883 },
2884 ..Default::default()
2885 },
2886 );
2887
2888 let (buffer, _handle) = project
2889 .update(cx, |p, cx| {
2890 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
2891 })
2892 .await
2893 .unwrap();
2894
2895 let fake_server = fake_language_servers.next().await.unwrap();
2896 let text = "let a = obj.fqn";
2897
2898 // Test 1: When text_edit is None but insert_text exists with default edit_range
2899 {
2900 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2901 let completions = project.update(cx, |project, cx| {
2902 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2903 });
2904
2905 fake_server
2906 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2907 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2908 is_incomplete: false,
2909 item_defaults: Some(lsp::CompletionListItemDefaults {
2910 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2911 lsp::Range::new(
2912 lsp::Position::new(0, text.len() as u32 - 3),
2913 lsp::Position::new(0, text.len() as u32),
2914 ),
2915 )),
2916 ..Default::default()
2917 }),
2918 items: vec![lsp::CompletionItem {
2919 label: "labelText".into(),
2920 insert_text: Some("insertText".into()),
2921 text_edit: None,
2922 ..Default::default()
2923 }],
2924 })))
2925 })
2926 .next()
2927 .await;
2928
2929 let completions = completions.await.unwrap().unwrap();
2930 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2931
2932 assert_eq!(completions.len(), 1);
2933 assert_eq!(completions[0].new_text, "insertText");
2934 assert_eq!(
2935 completions[0].old_range.to_offset(&snapshot),
2936 text.len() - 3..text.len()
2937 );
2938 }
2939
2940 // Test 2: When both text_edit and insert_text are None with default edit_range
2941 {
2942 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
2943 let completions = project.update(cx, |project, cx| {
2944 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
2945 });
2946
2947 fake_server
2948 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
2949 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
2950 is_incomplete: false,
2951 item_defaults: Some(lsp::CompletionListItemDefaults {
2952 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
2953 lsp::Range::new(
2954 lsp::Position::new(0, text.len() as u32 - 3),
2955 lsp::Position::new(0, text.len() as u32),
2956 ),
2957 )),
2958 ..Default::default()
2959 }),
2960 items: vec![lsp::CompletionItem {
2961 label: "labelText".into(),
2962 insert_text: None,
2963 text_edit: None,
2964 ..Default::default()
2965 }],
2966 })))
2967 })
2968 .next()
2969 .await;
2970
2971 let completions = completions.await.unwrap().unwrap();
2972 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
2973
2974 assert_eq!(completions.len(), 1);
2975 assert_eq!(completions[0].new_text, "labelText");
2976 assert_eq!(
2977 completions[0].old_range.to_offset(&snapshot),
2978 text.len() - 3..text.len()
2979 );
2980 }
2981}
2982
2983#[gpui::test]
2984async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
2985 init_test(cx);
2986
2987 let fs = FakeFs::new(cx.executor());
2988 fs.insert_tree(
2989 path!("/dir"),
2990 json!({
2991 "a.ts": "",
2992 }),
2993 )
2994 .await;
2995
2996 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2997
2998 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2999 language_registry.add(typescript_lang());
3000 let mut fake_language_servers = language_registry.register_fake_lsp(
3001 "TypeScript",
3002 FakeLspAdapter {
3003 capabilities: lsp::ServerCapabilities {
3004 completion_provider: Some(lsp::CompletionOptions {
3005 trigger_characters: Some(vec![":".to_string()]),
3006 ..Default::default()
3007 }),
3008 ..Default::default()
3009 },
3010 ..Default::default()
3011 },
3012 );
3013
3014 let (buffer, _handle) = project
3015 .update(cx, |p, cx| {
3016 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3017 })
3018 .await
3019 .unwrap();
3020
3021 let fake_server = fake_language_servers.next().await.unwrap();
3022
3023 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3024 let text = "let a = b.fqn";
3025 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3026 let completions = project.update(cx, |project, cx| {
3027 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3028 });
3029
3030 fake_server
3031 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3032 Ok(Some(lsp::CompletionResponse::Array(vec![
3033 lsp::CompletionItem {
3034 label: "fullyQualifiedName?".into(),
3035 insert_text: Some("fullyQualifiedName".into()),
3036 ..Default::default()
3037 },
3038 ])))
3039 })
3040 .next()
3041 .await;
3042 let completions = completions.await.unwrap().unwrap();
3043 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3044 assert_eq!(completions.len(), 1);
3045 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3046 assert_eq!(
3047 completions[0].old_range.to_offset(&snapshot),
3048 text.len() - 3..text.len()
3049 );
3050
3051 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3052 let text = "let a = \"atoms/cmp\"";
3053 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3054 let completions = project.update(cx, |project, cx| {
3055 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3056 });
3057
3058 fake_server
3059 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3060 Ok(Some(lsp::CompletionResponse::Array(vec![
3061 lsp::CompletionItem {
3062 label: "component".into(),
3063 ..Default::default()
3064 },
3065 ])))
3066 })
3067 .next()
3068 .await;
3069 let completions = completions.await.unwrap().unwrap();
3070 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3071 assert_eq!(completions.len(), 1);
3072 assert_eq!(completions[0].new_text, "component");
3073 assert_eq!(
3074 completions[0].old_range.to_offset(&snapshot),
3075 text.len() - 4..text.len() - 1
3076 );
3077}
3078
3079#[gpui::test]
3080async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3081 init_test(cx);
3082
3083 let fs = FakeFs::new(cx.executor());
3084 fs.insert_tree(
3085 path!("/dir"),
3086 json!({
3087 "a.ts": "",
3088 }),
3089 )
3090 .await;
3091
3092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3093
3094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3095 language_registry.add(typescript_lang());
3096 let mut fake_language_servers = language_registry.register_fake_lsp(
3097 "TypeScript",
3098 FakeLspAdapter {
3099 capabilities: lsp::ServerCapabilities {
3100 completion_provider: Some(lsp::CompletionOptions {
3101 trigger_characters: Some(vec![":".to_string()]),
3102 ..Default::default()
3103 }),
3104 ..Default::default()
3105 },
3106 ..Default::default()
3107 },
3108 );
3109
3110 let (buffer, _handle) = project
3111 .update(cx, |p, cx| {
3112 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3113 })
3114 .await
3115 .unwrap();
3116
3117 let fake_server = fake_language_servers.next().await.unwrap();
3118
3119 let text = "let a = b.fqn";
3120 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3121 let completions = project.update(cx, |project, cx| {
3122 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3123 });
3124
3125 fake_server
3126 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3127 Ok(Some(lsp::CompletionResponse::Array(vec![
3128 lsp::CompletionItem {
3129 label: "fullyQualifiedName?".into(),
3130 insert_text: Some("fully\rQualified\r\nName".into()),
3131 ..Default::default()
3132 },
3133 ])))
3134 })
3135 .next()
3136 .await;
3137 let completions = completions.await.unwrap().unwrap();
3138 assert_eq!(completions.len(), 1);
3139 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3140}
3141
3142#[gpui::test(iterations = 10)]
3143async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3144 init_test(cx);
3145
3146 let fs = FakeFs::new(cx.executor());
3147 fs.insert_tree(
3148 path!("/dir"),
3149 json!({
3150 "a.ts": "a",
3151 }),
3152 )
3153 .await;
3154
3155 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3156
3157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3158 language_registry.add(typescript_lang());
3159 let mut fake_language_servers = language_registry.register_fake_lsp(
3160 "TypeScript",
3161 FakeLspAdapter {
3162 capabilities: lsp::ServerCapabilities {
3163 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3164 lsp::CodeActionOptions {
3165 resolve_provider: Some(true),
3166 ..lsp::CodeActionOptions::default()
3167 },
3168 )),
3169 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3170 commands: vec!["_the/command".to_string()],
3171 ..lsp::ExecuteCommandOptions::default()
3172 }),
3173 ..lsp::ServerCapabilities::default()
3174 },
3175 ..FakeLspAdapter::default()
3176 },
3177 );
3178
3179 let (buffer, _handle) = project
3180 .update(cx, |p, cx| {
3181 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3182 })
3183 .await
3184 .unwrap();
3185
3186 let fake_server = fake_language_servers.next().await.unwrap();
3187
3188 // Language server returns code actions that contain commands, and not edits.
3189 let actions = project.update(cx, |project, cx| {
3190 project.code_actions(&buffer, 0..0, None, cx)
3191 });
3192 fake_server
3193 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3194 Ok(Some(vec![
3195 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3196 title: "The code action".into(),
3197 data: Some(serde_json::json!({
3198 "command": "_the/command",
3199 })),
3200 ..lsp::CodeAction::default()
3201 }),
3202 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3203 title: "two".into(),
3204 ..lsp::CodeAction::default()
3205 }),
3206 ]))
3207 })
3208 .next()
3209 .await;
3210
3211 let action = actions.await.unwrap()[0].clone();
3212 let apply = project.update(cx, |project, cx| {
3213 project.apply_code_action(buffer.clone(), action, true, cx)
3214 });
3215
3216 // Resolving the code action does not populate its edits. In absence of
3217 // edits, we must execute the given command.
3218 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3219 |mut action, _| async move {
3220 if action.data.is_some() {
3221 action.command = Some(lsp::Command {
3222 title: "The command".into(),
3223 command: "_the/command".into(),
3224 arguments: Some(vec![json!("the-argument")]),
3225 });
3226 }
3227 Ok(action)
3228 },
3229 );
3230
3231 // While executing the command, the language server sends the editor
3232 // a `workspaceEdit` request.
3233 fake_server
3234 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3235 let fake = fake_server.clone();
3236 move |params, _| {
3237 assert_eq!(params.command, "_the/command");
3238 let fake = fake.clone();
3239 async move {
3240 fake.server
3241 .request::<lsp::request::ApplyWorkspaceEdit>(
3242 lsp::ApplyWorkspaceEditParams {
3243 label: None,
3244 edit: lsp::WorkspaceEdit {
3245 changes: Some(
3246 [(
3247 lsp::Url::from_file_path(path!("/dir/a.ts")).unwrap(),
3248 vec![lsp::TextEdit {
3249 range: lsp::Range::new(
3250 lsp::Position::new(0, 0),
3251 lsp::Position::new(0, 0),
3252 ),
3253 new_text: "X".into(),
3254 }],
3255 )]
3256 .into_iter()
3257 .collect(),
3258 ),
3259 ..Default::default()
3260 },
3261 },
3262 )
3263 .await
3264 .unwrap();
3265 Ok(Some(json!(null)))
3266 }
3267 }
3268 })
3269 .next()
3270 .await;
3271
3272 // Applying the code action returns a project transaction containing the edits
3273 // sent by the language server in its `workspaceEdit` request.
3274 let transaction = apply.await.unwrap();
3275 assert!(transaction.0.contains_key(&buffer));
3276 buffer.update(cx, |buffer, cx| {
3277 assert_eq!(buffer.text(), "Xa");
3278 buffer.undo(cx);
3279 assert_eq!(buffer.text(), "a");
3280 });
3281}
3282
3283#[gpui::test(iterations = 10)]
3284async fn test_save_file(cx: &mut gpui::TestAppContext) {
3285 init_test(cx);
3286
3287 let fs = FakeFs::new(cx.executor());
3288 fs.insert_tree(
3289 path!("/dir"),
3290 json!({
3291 "file1": "the old contents",
3292 }),
3293 )
3294 .await;
3295
3296 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3297 let buffer = project
3298 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3299 .await
3300 .unwrap();
3301 buffer.update(cx, |buffer, cx| {
3302 assert_eq!(buffer.text(), "the old contents");
3303 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3304 });
3305
3306 project
3307 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3308 .await
3309 .unwrap();
3310
3311 let new_text = fs
3312 .load(Path::new(path!("/dir/file1")))
3313 .await
3314 .unwrap()
3315 .replace("\r\n", "\n");
3316 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3317}
3318
3319#[gpui::test(iterations = 30)]
3320async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3321 init_test(cx);
3322
3323 let fs = FakeFs::new(cx.executor().clone());
3324 fs.insert_tree(
3325 path!("/dir"),
3326 json!({
3327 "file1": "the original contents",
3328 }),
3329 )
3330 .await;
3331
3332 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3333 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3334 let buffer = project
3335 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3336 .await
3337 .unwrap();
3338
3339 // Simulate buffer diffs being slow, so that they don't complete before
3340 // the next file change occurs.
3341 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3342
3343 // Change the buffer's file on disk, and then wait for the file change
3344 // to be detected by the worktree, so that the buffer starts reloading.
3345 fs.save(
3346 path!("/dir/file1").as_ref(),
3347 &"the first contents".into(),
3348 Default::default(),
3349 )
3350 .await
3351 .unwrap();
3352 worktree.next_event(cx).await;
3353
3354 // Change the buffer's file again. Depending on the random seed, the
3355 // previous file change may still be in progress.
3356 fs.save(
3357 path!("/dir/file1").as_ref(),
3358 &"the second contents".into(),
3359 Default::default(),
3360 )
3361 .await
3362 .unwrap();
3363 worktree.next_event(cx).await;
3364
3365 cx.executor().run_until_parked();
3366 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3367 buffer.read_with(cx, |buffer, _| {
3368 assert_eq!(buffer.text(), on_disk_text);
3369 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3370 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3371 });
3372}
3373
3374#[gpui::test(iterations = 30)]
3375async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3376 init_test(cx);
3377
3378 let fs = FakeFs::new(cx.executor().clone());
3379 fs.insert_tree(
3380 path!("/dir"),
3381 json!({
3382 "file1": "the original contents",
3383 }),
3384 )
3385 .await;
3386
3387 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3388 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3389 let buffer = project
3390 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3391 .await
3392 .unwrap();
3393
3394 // Simulate buffer diffs being slow, so that they don't complete before
3395 // the next file change occurs.
3396 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
3397
3398 // Change the buffer's file on disk, and then wait for the file change
3399 // to be detected by the worktree, so that the buffer starts reloading.
3400 fs.save(
3401 path!("/dir/file1").as_ref(),
3402 &"the first contents".into(),
3403 Default::default(),
3404 )
3405 .await
3406 .unwrap();
3407 worktree.next_event(cx).await;
3408
3409 cx.executor()
3410 .spawn(cx.executor().simulate_random_delay())
3411 .await;
3412
3413 // Perform a noop edit, causing the buffer's version to increase.
3414 buffer.update(cx, |buffer, cx| {
3415 buffer.edit([(0..0, " ")], None, cx);
3416 buffer.undo(cx);
3417 });
3418
3419 cx.executor().run_until_parked();
3420 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3421 buffer.read_with(cx, |buffer, _| {
3422 let buffer_text = buffer.text();
3423 if buffer_text == on_disk_text {
3424 assert!(
3425 !buffer.is_dirty() && !buffer.has_conflict(),
3426 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
3427 );
3428 }
3429 // If the file change occurred while the buffer was processing the first
3430 // change, the buffer will be in a conflicting state.
3431 else {
3432 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3433 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
3434 }
3435 });
3436}
3437
3438#[gpui::test]
3439async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
3440 init_test(cx);
3441
3442 let fs = FakeFs::new(cx.executor());
3443 fs.insert_tree(
3444 path!("/dir"),
3445 json!({
3446 "file1": "the old contents",
3447 }),
3448 )
3449 .await;
3450
3451 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
3452 let buffer = project
3453 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3454 .await
3455 .unwrap();
3456 buffer.update(cx, |buffer, cx| {
3457 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3458 });
3459
3460 project
3461 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3462 .await
3463 .unwrap();
3464
3465 let new_text = fs
3466 .load(Path::new(path!("/dir/file1")))
3467 .await
3468 .unwrap()
3469 .replace("\r\n", "\n");
3470 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3471}
3472
3473#[gpui::test]
3474async fn test_save_as(cx: &mut gpui::TestAppContext) {
3475 init_test(cx);
3476
3477 let fs = FakeFs::new(cx.executor());
3478 fs.insert_tree("/dir", json!({})).await;
3479
3480 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3481
3482 let languages = project.update(cx, |project, _| project.languages().clone());
3483 languages.add(rust_lang());
3484
3485 let buffer = project.update(cx, |project, cx| project.create_local_buffer("", None, cx));
3486 buffer.update(cx, |buffer, cx| {
3487 buffer.edit([(0..0, "abc")], None, cx);
3488 assert!(buffer.is_dirty());
3489 assert!(!buffer.has_conflict());
3490 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
3491 });
3492 project
3493 .update(cx, |project, cx| {
3494 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
3495 let path = ProjectPath {
3496 worktree_id,
3497 path: Arc::from(Path::new("file1.rs")),
3498 };
3499 project.save_buffer_as(buffer.clone(), path, cx)
3500 })
3501 .await
3502 .unwrap();
3503 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
3504
3505 cx.executor().run_until_parked();
3506 buffer.update(cx, |buffer, cx| {
3507 assert_eq!(
3508 buffer.file().unwrap().full_path(cx),
3509 Path::new("dir/file1.rs")
3510 );
3511 assert!(!buffer.is_dirty());
3512 assert!(!buffer.has_conflict());
3513 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
3514 });
3515
3516 let opened_buffer = project
3517 .update(cx, |project, cx| {
3518 project.open_local_buffer("/dir/file1.rs", cx)
3519 })
3520 .await
3521 .unwrap();
3522 assert_eq!(opened_buffer, buffer);
3523}
3524
3525#[gpui::test(retries = 5)]
3526async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
3527 use worktree::WorktreeModelHandle as _;
3528
3529 init_test(cx);
3530 cx.executor().allow_parking();
3531
3532 let dir = TempTree::new(json!({
3533 "a": {
3534 "file1": "",
3535 "file2": "",
3536 "file3": "",
3537 },
3538 "b": {
3539 "c": {
3540 "file4": "",
3541 "file5": "",
3542 }
3543 }
3544 }));
3545
3546 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
3547
3548 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3549 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
3550 async move { buffer.await.unwrap() }
3551 };
3552 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3553 project.update(cx, |project, cx| {
3554 let tree = project.worktrees(cx).next().unwrap();
3555 tree.read(cx)
3556 .entry_for_path(path)
3557 .unwrap_or_else(|| panic!("no entry for path {}", path))
3558 .id
3559 })
3560 };
3561
3562 let buffer2 = buffer_for_path("a/file2", cx).await;
3563 let buffer3 = buffer_for_path("a/file3", cx).await;
3564 let buffer4 = buffer_for_path("b/c/file4", cx).await;
3565 let buffer5 = buffer_for_path("b/c/file5", cx).await;
3566
3567 let file2_id = id_for_path("a/file2", cx);
3568 let file3_id = id_for_path("a/file3", cx);
3569 let file4_id = id_for_path("b/c/file4", cx);
3570
3571 // Create a remote copy of this worktree.
3572 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3573 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
3574
3575 let updates = Arc::new(Mutex::new(Vec::new()));
3576 tree.update(cx, |tree, cx| {
3577 let updates = updates.clone();
3578 tree.observe_updates(0, cx, move |update| {
3579 updates.lock().push(update);
3580 async { true }
3581 });
3582 });
3583
3584 let remote =
3585 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
3586
3587 cx.executor().run_until_parked();
3588
3589 cx.update(|cx| {
3590 assert!(!buffer2.read(cx).is_dirty());
3591 assert!(!buffer3.read(cx).is_dirty());
3592 assert!(!buffer4.read(cx).is_dirty());
3593 assert!(!buffer5.read(cx).is_dirty());
3594 });
3595
3596 // Rename and delete files and directories.
3597 tree.flush_fs_events(cx).await;
3598 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
3599 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
3600 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
3601 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
3602 tree.flush_fs_events(cx).await;
3603
3604 cx.update(|app| {
3605 assert_eq!(
3606 tree.read(app)
3607 .paths()
3608 .map(|p| p.to_str().unwrap())
3609 .collect::<Vec<_>>(),
3610 vec![
3611 "a",
3612 separator!("a/file1"),
3613 separator!("a/file2.new"),
3614 "b",
3615 "d",
3616 separator!("d/file3"),
3617 separator!("d/file4"),
3618 ]
3619 );
3620 });
3621
3622 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
3623 assert_eq!(id_for_path("d/file3", cx), file3_id);
3624 assert_eq!(id_for_path("d/file4", cx), file4_id);
3625
3626 cx.update(|cx| {
3627 assert_eq!(
3628 buffer2.read(cx).file().unwrap().path().as_ref(),
3629 Path::new("a/file2.new")
3630 );
3631 assert_eq!(
3632 buffer3.read(cx).file().unwrap().path().as_ref(),
3633 Path::new("d/file3")
3634 );
3635 assert_eq!(
3636 buffer4.read(cx).file().unwrap().path().as_ref(),
3637 Path::new("d/file4")
3638 );
3639 assert_eq!(
3640 buffer5.read(cx).file().unwrap().path().as_ref(),
3641 Path::new("b/c/file5")
3642 );
3643
3644 assert_matches!(
3645 buffer2.read(cx).file().unwrap().disk_state(),
3646 DiskState::Present { .. }
3647 );
3648 assert_matches!(
3649 buffer3.read(cx).file().unwrap().disk_state(),
3650 DiskState::Present { .. }
3651 );
3652 assert_matches!(
3653 buffer4.read(cx).file().unwrap().disk_state(),
3654 DiskState::Present { .. }
3655 );
3656 assert_eq!(
3657 buffer5.read(cx).file().unwrap().disk_state(),
3658 DiskState::Deleted
3659 );
3660 });
3661
3662 // Update the remote worktree. Check that it becomes consistent with the
3663 // local worktree.
3664 cx.executor().run_until_parked();
3665
3666 remote.update(cx, |remote, _| {
3667 for update in updates.lock().drain(..) {
3668 remote.as_remote_mut().unwrap().update_from_remote(update);
3669 }
3670 });
3671 cx.executor().run_until_parked();
3672 remote.update(cx, |remote, _| {
3673 assert_eq!(
3674 remote
3675 .paths()
3676 .map(|p| p.to_str().unwrap())
3677 .collect::<Vec<_>>(),
3678 vec![
3679 "a",
3680 separator!("a/file1"),
3681 separator!("a/file2.new"),
3682 "b",
3683 "d",
3684 separator!("d/file3"),
3685 separator!("d/file4"),
3686 ]
3687 );
3688 });
3689}
3690
3691#[gpui::test(iterations = 10)]
3692async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
3693 init_test(cx);
3694
3695 let fs = FakeFs::new(cx.executor());
3696 fs.insert_tree(
3697 path!("/dir"),
3698 json!({
3699 "a": {
3700 "file1": "",
3701 }
3702 }),
3703 )
3704 .await;
3705
3706 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
3707 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
3708 let tree_id = tree.update(cx, |tree, _| tree.id());
3709
3710 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
3711 project.update(cx, |project, cx| {
3712 let tree = project.worktrees(cx).next().unwrap();
3713 tree.read(cx)
3714 .entry_for_path(path)
3715 .unwrap_or_else(|| panic!("no entry for path {}", path))
3716 .id
3717 })
3718 };
3719
3720 let dir_id = id_for_path("a", cx);
3721 let file_id = id_for_path("a/file1", cx);
3722 let buffer = project
3723 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
3724 .await
3725 .unwrap();
3726 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3727
3728 project
3729 .update(cx, |project, cx| {
3730 project.rename_entry(dir_id, Path::new("b"), cx)
3731 })
3732 .unwrap()
3733 .await
3734 .to_included()
3735 .unwrap();
3736 cx.executor().run_until_parked();
3737
3738 assert_eq!(id_for_path("b", cx), dir_id);
3739 assert_eq!(id_for_path("b/file1", cx), file_id);
3740 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
3741}
3742
3743#[gpui::test]
3744async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
3745 init_test(cx);
3746
3747 let fs = FakeFs::new(cx.executor());
3748 fs.insert_tree(
3749 "/dir",
3750 json!({
3751 "a.txt": "a-contents",
3752 "b.txt": "b-contents",
3753 }),
3754 )
3755 .await;
3756
3757 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
3758
3759 // Spawn multiple tasks to open paths, repeating some paths.
3760 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
3761 (
3762 p.open_local_buffer("/dir/a.txt", cx),
3763 p.open_local_buffer("/dir/b.txt", cx),
3764 p.open_local_buffer("/dir/a.txt", cx),
3765 )
3766 });
3767
3768 let buffer_a_1 = buffer_a_1.await.unwrap();
3769 let buffer_a_2 = buffer_a_2.await.unwrap();
3770 let buffer_b = buffer_b.await.unwrap();
3771 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
3772 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
3773
3774 // There is only one buffer per path.
3775 let buffer_a_id = buffer_a_1.entity_id();
3776 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
3777
3778 // Open the same path again while it is still open.
3779 drop(buffer_a_1);
3780 let buffer_a_3 = project
3781 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
3782 .await
3783 .unwrap();
3784
3785 // There's still only one buffer per path.
3786 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
3787}
3788
3789#[gpui::test]
3790async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
3791 init_test(cx);
3792
3793 let fs = FakeFs::new(cx.executor());
3794 fs.insert_tree(
3795 path!("/dir"),
3796 json!({
3797 "file1": "abc",
3798 "file2": "def",
3799 "file3": "ghi",
3800 }),
3801 )
3802 .await;
3803
3804 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3805
3806 let buffer1 = project
3807 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3808 .await
3809 .unwrap();
3810 let events = Arc::new(Mutex::new(Vec::new()));
3811
3812 // initially, the buffer isn't dirty.
3813 buffer1.update(cx, |buffer, cx| {
3814 cx.subscribe(&buffer1, {
3815 let events = events.clone();
3816 move |_, _, event, _| match event {
3817 BufferEvent::Operation { .. } => {}
3818 _ => events.lock().push(event.clone()),
3819 }
3820 })
3821 .detach();
3822
3823 assert!(!buffer.is_dirty());
3824 assert!(events.lock().is_empty());
3825
3826 buffer.edit([(1..2, "")], None, cx);
3827 });
3828
3829 // after the first edit, the buffer is dirty, and emits a dirtied event.
3830 buffer1.update(cx, |buffer, cx| {
3831 assert!(buffer.text() == "ac");
3832 assert!(buffer.is_dirty());
3833 assert_eq!(
3834 *events.lock(),
3835 &[
3836 language::BufferEvent::Edited,
3837 language::BufferEvent::DirtyChanged
3838 ]
3839 );
3840 events.lock().clear();
3841 buffer.did_save(
3842 buffer.version(),
3843 buffer.file().unwrap().disk_state().mtime(),
3844 cx,
3845 );
3846 });
3847
3848 // after saving, the buffer is not dirty, and emits a saved event.
3849 buffer1.update(cx, |buffer, cx| {
3850 assert!(!buffer.is_dirty());
3851 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
3852 events.lock().clear();
3853
3854 buffer.edit([(1..1, "B")], None, cx);
3855 buffer.edit([(2..2, "D")], None, cx);
3856 });
3857
3858 // after editing again, the buffer is dirty, and emits another dirty event.
3859 buffer1.update(cx, |buffer, cx| {
3860 assert!(buffer.text() == "aBDc");
3861 assert!(buffer.is_dirty());
3862 assert_eq!(
3863 *events.lock(),
3864 &[
3865 language::BufferEvent::Edited,
3866 language::BufferEvent::DirtyChanged,
3867 language::BufferEvent::Edited,
3868 ],
3869 );
3870 events.lock().clear();
3871
3872 // After restoring the buffer to its previously-saved state,
3873 // the buffer is not considered dirty anymore.
3874 buffer.edit([(1..3, "")], None, cx);
3875 assert!(buffer.text() == "ac");
3876 assert!(!buffer.is_dirty());
3877 });
3878
3879 assert_eq!(
3880 *events.lock(),
3881 &[
3882 language::BufferEvent::Edited,
3883 language::BufferEvent::DirtyChanged
3884 ]
3885 );
3886
3887 // When a file is deleted, the buffer is considered dirty.
3888 let events = Arc::new(Mutex::new(Vec::new()));
3889 let buffer2 = project
3890 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
3891 .await
3892 .unwrap();
3893 buffer2.update(cx, |_, cx| {
3894 cx.subscribe(&buffer2, {
3895 let events = events.clone();
3896 move |_, _, event, _| events.lock().push(event.clone())
3897 })
3898 .detach();
3899 });
3900
3901 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
3902 .await
3903 .unwrap();
3904 cx.executor().run_until_parked();
3905 buffer2.update(cx, |buffer, _| assert!(buffer.is_dirty()));
3906 assert_eq!(
3907 *events.lock(),
3908 &[
3909 language::BufferEvent::DirtyChanged,
3910 language::BufferEvent::FileHandleChanged
3911 ]
3912 );
3913
3914 // When a file is already dirty when deleted, we don't emit a Dirtied event.
3915 let events = Arc::new(Mutex::new(Vec::new()));
3916 let buffer3 = project
3917 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
3918 .await
3919 .unwrap();
3920 buffer3.update(cx, |_, cx| {
3921 cx.subscribe(&buffer3, {
3922 let events = events.clone();
3923 move |_, _, event, _| events.lock().push(event.clone())
3924 })
3925 .detach();
3926 });
3927
3928 buffer3.update(cx, |buffer, cx| {
3929 buffer.edit([(0..0, "x")], None, cx);
3930 });
3931 events.lock().clear();
3932 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
3933 .await
3934 .unwrap();
3935 cx.executor().run_until_parked();
3936 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
3937 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
3938}
3939
3940#[gpui::test]
3941async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
3942 init_test(cx);
3943
3944 let (initial_contents, initial_offsets) =
3945 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
3946 let fs = FakeFs::new(cx.executor());
3947 fs.insert_tree(
3948 path!("/dir"),
3949 json!({
3950 "the-file": initial_contents,
3951 }),
3952 )
3953 .await;
3954 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3955 let buffer = project
3956 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
3957 .await
3958 .unwrap();
3959
3960 let anchors = initial_offsets
3961 .iter()
3962 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
3963 .collect::<Vec<_>>();
3964
3965 // Change the file on disk, adding two new lines of text, and removing
3966 // one line.
3967 buffer.update(cx, |buffer, _| {
3968 assert!(!buffer.is_dirty());
3969 assert!(!buffer.has_conflict());
3970 });
3971
3972 let (new_contents, new_offsets) =
3973 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
3974 fs.save(
3975 path!("/dir/the-file").as_ref(),
3976 &new_contents.as_str().into(),
3977 LineEnding::Unix,
3978 )
3979 .await
3980 .unwrap();
3981
3982 // Because the buffer was not modified, it is reloaded from disk. Its
3983 // contents are edited according to the diff between the old and new
3984 // file contents.
3985 cx.executor().run_until_parked();
3986 buffer.update(cx, |buffer, _| {
3987 assert_eq!(buffer.text(), new_contents);
3988 assert!(!buffer.is_dirty());
3989 assert!(!buffer.has_conflict());
3990
3991 let anchor_offsets = anchors
3992 .iter()
3993 .map(|anchor| anchor.to_offset(&*buffer))
3994 .collect::<Vec<_>>();
3995 assert_eq!(anchor_offsets, new_offsets);
3996 });
3997
3998 // Modify the buffer
3999 buffer.update(cx, |buffer, cx| {
4000 buffer.edit([(0..0, " ")], None, cx);
4001 assert!(buffer.is_dirty());
4002 assert!(!buffer.has_conflict());
4003 });
4004
4005 // Change the file on disk again, adding blank lines to the beginning.
4006 fs.save(
4007 path!("/dir/the-file").as_ref(),
4008 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4009 LineEnding::Unix,
4010 )
4011 .await
4012 .unwrap();
4013
4014 // Because the buffer is modified, it doesn't reload from disk, but is
4015 // marked as having a conflict.
4016 cx.executor().run_until_parked();
4017 buffer.update(cx, |buffer, _| {
4018 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4019 assert!(buffer.has_conflict());
4020 });
4021}
4022
4023#[gpui::test]
4024async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4025 init_test(cx);
4026
4027 let fs = FakeFs::new(cx.executor());
4028 fs.insert_tree(
4029 path!("/dir"),
4030 json!({
4031 "file1": "a\nb\nc\n",
4032 "file2": "one\r\ntwo\r\nthree\r\n",
4033 }),
4034 )
4035 .await;
4036
4037 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4038 let buffer1 = project
4039 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4040 .await
4041 .unwrap();
4042 let buffer2 = project
4043 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4044 .await
4045 .unwrap();
4046
4047 buffer1.update(cx, |buffer, _| {
4048 assert_eq!(buffer.text(), "a\nb\nc\n");
4049 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4050 });
4051 buffer2.update(cx, |buffer, _| {
4052 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4053 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4054 });
4055
4056 // Change a file's line endings on disk from unix to windows. The buffer's
4057 // state updates correctly.
4058 fs.save(
4059 path!("/dir/file1").as_ref(),
4060 &"aaa\nb\nc\n".into(),
4061 LineEnding::Windows,
4062 )
4063 .await
4064 .unwrap();
4065 cx.executor().run_until_parked();
4066 buffer1.update(cx, |buffer, _| {
4067 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4068 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4069 });
4070
4071 // Save a file with windows line endings. The file is written correctly.
4072 buffer2.update(cx, |buffer, cx| {
4073 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4074 });
4075 project
4076 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4077 .await
4078 .unwrap();
4079 assert_eq!(
4080 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4081 "one\r\ntwo\r\nthree\r\nfour\r\n",
4082 );
4083}
4084
4085#[gpui::test]
4086async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4087 init_test(cx);
4088
4089 let fs = FakeFs::new(cx.executor());
4090 fs.insert_tree(
4091 path!("/dir"),
4092 json!({
4093 "a.rs": "
4094 fn foo(mut v: Vec<usize>) {
4095 for x in &v {
4096 v.push(1);
4097 }
4098 }
4099 "
4100 .unindent(),
4101 }),
4102 )
4103 .await;
4104
4105 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4106 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4107 let buffer = project
4108 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4109 .await
4110 .unwrap();
4111
4112 let buffer_uri = Url::from_file_path(path!("/dir/a.rs")).unwrap();
4113 let message = lsp::PublishDiagnosticsParams {
4114 uri: buffer_uri.clone(),
4115 diagnostics: vec![
4116 lsp::Diagnostic {
4117 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4118 severity: Some(DiagnosticSeverity::WARNING),
4119 message: "error 1".to_string(),
4120 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4121 location: lsp::Location {
4122 uri: buffer_uri.clone(),
4123 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4124 },
4125 message: "error 1 hint 1".to_string(),
4126 }]),
4127 ..Default::default()
4128 },
4129 lsp::Diagnostic {
4130 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4131 severity: Some(DiagnosticSeverity::HINT),
4132 message: "error 1 hint 1".to_string(),
4133 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4134 location: lsp::Location {
4135 uri: buffer_uri.clone(),
4136 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4137 },
4138 message: "original diagnostic".to_string(),
4139 }]),
4140 ..Default::default()
4141 },
4142 lsp::Diagnostic {
4143 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4144 severity: Some(DiagnosticSeverity::ERROR),
4145 message: "error 2".to_string(),
4146 related_information: Some(vec![
4147 lsp::DiagnosticRelatedInformation {
4148 location: lsp::Location {
4149 uri: buffer_uri.clone(),
4150 range: lsp::Range::new(
4151 lsp::Position::new(1, 13),
4152 lsp::Position::new(1, 15),
4153 ),
4154 },
4155 message: "error 2 hint 1".to_string(),
4156 },
4157 lsp::DiagnosticRelatedInformation {
4158 location: lsp::Location {
4159 uri: buffer_uri.clone(),
4160 range: lsp::Range::new(
4161 lsp::Position::new(1, 13),
4162 lsp::Position::new(1, 15),
4163 ),
4164 },
4165 message: "error 2 hint 2".to_string(),
4166 },
4167 ]),
4168 ..Default::default()
4169 },
4170 lsp::Diagnostic {
4171 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4172 severity: Some(DiagnosticSeverity::HINT),
4173 message: "error 2 hint 1".to_string(),
4174 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4175 location: lsp::Location {
4176 uri: buffer_uri.clone(),
4177 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4178 },
4179 message: "original diagnostic".to_string(),
4180 }]),
4181 ..Default::default()
4182 },
4183 lsp::Diagnostic {
4184 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4185 severity: Some(DiagnosticSeverity::HINT),
4186 message: "error 2 hint 2".to_string(),
4187 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4188 location: lsp::Location {
4189 uri: buffer_uri,
4190 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4191 },
4192 message: "original diagnostic".to_string(),
4193 }]),
4194 ..Default::default()
4195 },
4196 ],
4197 version: None,
4198 };
4199
4200 lsp_store
4201 .update(cx, |lsp_store, cx| {
4202 lsp_store.update_diagnostics(LanguageServerId(0), message, &[], cx)
4203 })
4204 .unwrap();
4205 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4206
4207 assert_eq!(
4208 buffer
4209 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4210 .collect::<Vec<_>>(),
4211 &[
4212 DiagnosticEntry {
4213 range: Point::new(1, 8)..Point::new(1, 9),
4214 diagnostic: Diagnostic {
4215 severity: DiagnosticSeverity::WARNING,
4216 message: "error 1".to_string(),
4217 group_id: 1,
4218 is_primary: true,
4219 ..Default::default()
4220 }
4221 },
4222 DiagnosticEntry {
4223 range: Point::new(1, 8)..Point::new(1, 9),
4224 diagnostic: Diagnostic {
4225 severity: DiagnosticSeverity::HINT,
4226 message: "error 1 hint 1".to_string(),
4227 group_id: 1,
4228 is_primary: false,
4229 ..Default::default()
4230 }
4231 },
4232 DiagnosticEntry {
4233 range: Point::new(1, 13)..Point::new(1, 15),
4234 diagnostic: Diagnostic {
4235 severity: DiagnosticSeverity::HINT,
4236 message: "error 2 hint 1".to_string(),
4237 group_id: 0,
4238 is_primary: false,
4239 ..Default::default()
4240 }
4241 },
4242 DiagnosticEntry {
4243 range: Point::new(1, 13)..Point::new(1, 15),
4244 diagnostic: Diagnostic {
4245 severity: DiagnosticSeverity::HINT,
4246 message: "error 2 hint 2".to_string(),
4247 group_id: 0,
4248 is_primary: false,
4249 ..Default::default()
4250 }
4251 },
4252 DiagnosticEntry {
4253 range: Point::new(2, 8)..Point::new(2, 17),
4254 diagnostic: Diagnostic {
4255 severity: DiagnosticSeverity::ERROR,
4256 message: "error 2".to_string(),
4257 group_id: 0,
4258 is_primary: true,
4259 ..Default::default()
4260 }
4261 }
4262 ]
4263 );
4264
4265 assert_eq!(
4266 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4267 &[
4268 DiagnosticEntry {
4269 range: Point::new(1, 13)..Point::new(1, 15),
4270 diagnostic: Diagnostic {
4271 severity: DiagnosticSeverity::HINT,
4272 message: "error 2 hint 1".to_string(),
4273 group_id: 0,
4274 is_primary: false,
4275 ..Default::default()
4276 }
4277 },
4278 DiagnosticEntry {
4279 range: Point::new(1, 13)..Point::new(1, 15),
4280 diagnostic: Diagnostic {
4281 severity: DiagnosticSeverity::HINT,
4282 message: "error 2 hint 2".to_string(),
4283 group_id: 0,
4284 is_primary: false,
4285 ..Default::default()
4286 }
4287 },
4288 DiagnosticEntry {
4289 range: Point::new(2, 8)..Point::new(2, 17),
4290 diagnostic: Diagnostic {
4291 severity: DiagnosticSeverity::ERROR,
4292 message: "error 2".to_string(),
4293 group_id: 0,
4294 is_primary: true,
4295 ..Default::default()
4296 }
4297 }
4298 ]
4299 );
4300
4301 assert_eq!(
4302 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4303 &[
4304 DiagnosticEntry {
4305 range: Point::new(1, 8)..Point::new(1, 9),
4306 diagnostic: Diagnostic {
4307 severity: DiagnosticSeverity::WARNING,
4308 message: "error 1".to_string(),
4309 group_id: 1,
4310 is_primary: true,
4311 ..Default::default()
4312 }
4313 },
4314 DiagnosticEntry {
4315 range: Point::new(1, 8)..Point::new(1, 9),
4316 diagnostic: Diagnostic {
4317 severity: DiagnosticSeverity::HINT,
4318 message: "error 1 hint 1".to_string(),
4319 group_id: 1,
4320 is_primary: false,
4321 ..Default::default()
4322 }
4323 },
4324 ]
4325 );
4326}
4327
4328#[gpui::test]
4329async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4330 init_test(cx);
4331
4332 let fs = FakeFs::new(cx.executor());
4333 fs.insert_tree(
4334 path!("/dir"),
4335 json!({
4336 "one.rs": "const ONE: usize = 1;",
4337 "two": {
4338 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4339 }
4340
4341 }),
4342 )
4343 .await;
4344 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4345
4346 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4347 language_registry.add(rust_lang());
4348 let watched_paths = lsp::FileOperationRegistrationOptions {
4349 filters: vec![
4350 FileOperationFilter {
4351 scheme: Some("file".to_owned()),
4352 pattern: lsp::FileOperationPattern {
4353 glob: "**/*.rs".to_owned(),
4354 matches: Some(lsp::FileOperationPatternKind::File),
4355 options: None,
4356 },
4357 },
4358 FileOperationFilter {
4359 scheme: Some("file".to_owned()),
4360 pattern: lsp::FileOperationPattern {
4361 glob: "**/**".to_owned(),
4362 matches: Some(lsp::FileOperationPatternKind::Folder),
4363 options: None,
4364 },
4365 },
4366 ],
4367 };
4368 let mut fake_servers = language_registry.register_fake_lsp(
4369 "Rust",
4370 FakeLspAdapter {
4371 capabilities: lsp::ServerCapabilities {
4372 workspace: Some(lsp::WorkspaceServerCapabilities {
4373 workspace_folders: None,
4374 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
4375 did_rename: Some(watched_paths.clone()),
4376 will_rename: Some(watched_paths),
4377 ..Default::default()
4378 }),
4379 }),
4380 ..Default::default()
4381 },
4382 ..Default::default()
4383 },
4384 );
4385
4386 let _ = project
4387 .update(cx, |project, cx| {
4388 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4389 })
4390 .await
4391 .unwrap();
4392
4393 let fake_server = fake_servers.next().await.unwrap();
4394 let response = project.update(cx, |project, cx| {
4395 let worktree = project.worktrees(cx).next().unwrap();
4396 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
4397 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
4398 });
4399 let expected_edit = lsp::WorkspaceEdit {
4400 changes: None,
4401 document_changes: Some(DocumentChanges::Edits({
4402 vec![TextDocumentEdit {
4403 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
4404 range: lsp::Range {
4405 start: lsp::Position {
4406 line: 0,
4407 character: 1,
4408 },
4409 end: lsp::Position {
4410 line: 0,
4411 character: 3,
4412 },
4413 },
4414 new_text: "This is not a drill".to_owned(),
4415 })],
4416 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
4417 uri: Url::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
4418 version: Some(1337),
4419 },
4420 }]
4421 })),
4422 change_annotations: None,
4423 };
4424 let resolved_workspace_edit = Arc::new(OnceLock::new());
4425 fake_server
4426 .set_request_handler::<WillRenameFiles, _, _>({
4427 let resolved_workspace_edit = resolved_workspace_edit.clone();
4428 let expected_edit = expected_edit.clone();
4429 move |params, _| {
4430 let resolved_workspace_edit = resolved_workspace_edit.clone();
4431 let expected_edit = expected_edit.clone();
4432 async move {
4433 assert_eq!(params.files.len(), 1);
4434 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4435 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4436 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
4437 Ok(Some(expected_edit))
4438 }
4439 }
4440 })
4441 .next()
4442 .await
4443 .unwrap();
4444 let _ = response.await.unwrap();
4445 fake_server
4446 .handle_notification::<DidRenameFiles, _>(|params, _| {
4447 assert_eq!(params.files.len(), 1);
4448 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
4449 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
4450 })
4451 .next()
4452 .await
4453 .unwrap();
4454 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
4455}
4456
4457#[gpui::test]
4458async fn test_rename(cx: &mut gpui::TestAppContext) {
4459 // hi
4460 init_test(cx);
4461
4462 let fs = FakeFs::new(cx.executor());
4463 fs.insert_tree(
4464 path!("/dir"),
4465 json!({
4466 "one.rs": "const ONE: usize = 1;",
4467 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4468 }),
4469 )
4470 .await;
4471
4472 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4473
4474 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4475 language_registry.add(rust_lang());
4476 let mut fake_servers = language_registry.register_fake_lsp(
4477 "Rust",
4478 FakeLspAdapter {
4479 capabilities: lsp::ServerCapabilities {
4480 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
4481 prepare_provider: Some(true),
4482 work_done_progress_options: Default::default(),
4483 })),
4484 ..Default::default()
4485 },
4486 ..Default::default()
4487 },
4488 );
4489
4490 let (buffer, _handle) = project
4491 .update(cx, |project, cx| {
4492 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
4493 })
4494 .await
4495 .unwrap();
4496
4497 let fake_server = fake_servers.next().await.unwrap();
4498
4499 let response = project.update(cx, |project, cx| {
4500 project.prepare_rename(buffer.clone(), 7, cx)
4501 });
4502 fake_server
4503 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
4504 assert_eq!(
4505 params.text_document.uri.as_str(),
4506 uri!("file:///dir/one.rs")
4507 );
4508 assert_eq!(params.position, lsp::Position::new(0, 7));
4509 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
4510 lsp::Position::new(0, 6),
4511 lsp::Position::new(0, 9),
4512 ))))
4513 })
4514 .next()
4515 .await
4516 .unwrap();
4517 let response = response.await.unwrap();
4518 let PrepareRenameResponse::Success(range) = response else {
4519 panic!("{:?}", response);
4520 };
4521 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
4522 assert_eq!(range, 6..9);
4523
4524 let response = project.update(cx, |project, cx| {
4525 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
4526 });
4527 fake_server
4528 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
4529 assert_eq!(
4530 params.text_document_position.text_document.uri.as_str(),
4531 uri!("file:///dir/one.rs")
4532 );
4533 assert_eq!(
4534 params.text_document_position.position,
4535 lsp::Position::new(0, 7)
4536 );
4537 assert_eq!(params.new_name, "THREE");
4538 Ok(Some(lsp::WorkspaceEdit {
4539 changes: Some(
4540 [
4541 (
4542 lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
4543 vec![lsp::TextEdit::new(
4544 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
4545 "THREE".to_string(),
4546 )],
4547 ),
4548 (
4549 lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
4550 vec![
4551 lsp::TextEdit::new(
4552 lsp::Range::new(
4553 lsp::Position::new(0, 24),
4554 lsp::Position::new(0, 27),
4555 ),
4556 "THREE".to_string(),
4557 ),
4558 lsp::TextEdit::new(
4559 lsp::Range::new(
4560 lsp::Position::new(0, 35),
4561 lsp::Position::new(0, 38),
4562 ),
4563 "THREE".to_string(),
4564 ),
4565 ],
4566 ),
4567 ]
4568 .into_iter()
4569 .collect(),
4570 ),
4571 ..Default::default()
4572 }))
4573 })
4574 .next()
4575 .await
4576 .unwrap();
4577 let mut transaction = response.await.unwrap().0;
4578 assert_eq!(transaction.len(), 2);
4579 assert_eq!(
4580 transaction
4581 .remove_entry(&buffer)
4582 .unwrap()
4583 .0
4584 .update(cx, |buffer, _| buffer.text()),
4585 "const THREE: usize = 1;"
4586 );
4587 assert_eq!(
4588 transaction
4589 .into_keys()
4590 .next()
4591 .unwrap()
4592 .update(cx, |buffer, _| buffer.text()),
4593 "const TWO: usize = one::THREE + one::THREE;"
4594 );
4595}
4596
4597#[gpui::test]
4598async fn test_search(cx: &mut gpui::TestAppContext) {
4599 init_test(cx);
4600
4601 let fs = FakeFs::new(cx.executor());
4602 fs.insert_tree(
4603 path!("/dir"),
4604 json!({
4605 "one.rs": "const ONE: usize = 1;",
4606 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
4607 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
4608 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
4609 }),
4610 )
4611 .await;
4612 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4613 assert_eq!(
4614 search(
4615 &project,
4616 SearchQuery::text(
4617 "TWO",
4618 false,
4619 true,
4620 false,
4621 Default::default(),
4622 Default::default(),
4623 None
4624 )
4625 .unwrap(),
4626 cx
4627 )
4628 .await
4629 .unwrap(),
4630 HashMap::from_iter([
4631 (separator!("dir/two.rs").to_string(), vec![6..9]),
4632 (separator!("dir/three.rs").to_string(), vec![37..40])
4633 ])
4634 );
4635
4636 let buffer_4 = project
4637 .update(cx, |project, cx| {
4638 project.open_local_buffer(path!("/dir/four.rs"), cx)
4639 })
4640 .await
4641 .unwrap();
4642 buffer_4.update(cx, |buffer, cx| {
4643 let text = "two::TWO";
4644 buffer.edit([(20..28, text), (31..43, text)], None, cx);
4645 });
4646
4647 assert_eq!(
4648 search(
4649 &project,
4650 SearchQuery::text(
4651 "TWO",
4652 false,
4653 true,
4654 false,
4655 Default::default(),
4656 Default::default(),
4657 None,
4658 )
4659 .unwrap(),
4660 cx
4661 )
4662 .await
4663 .unwrap(),
4664 HashMap::from_iter([
4665 (separator!("dir/two.rs").to_string(), vec![6..9]),
4666 (separator!("dir/three.rs").to_string(), vec![37..40]),
4667 (separator!("dir/four.rs").to_string(), vec![25..28, 36..39])
4668 ])
4669 );
4670}
4671
4672#[gpui::test]
4673async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
4674 init_test(cx);
4675
4676 let search_query = "file";
4677
4678 let fs = FakeFs::new(cx.executor());
4679 fs.insert_tree(
4680 path!("/dir"),
4681 json!({
4682 "one.rs": r#"// Rust file one"#,
4683 "one.ts": r#"// TypeScript file one"#,
4684 "two.rs": r#"// Rust file two"#,
4685 "two.ts": r#"// TypeScript file two"#,
4686 }),
4687 )
4688 .await;
4689 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4690
4691 assert!(
4692 search(
4693 &project,
4694 SearchQuery::text(
4695 search_query,
4696 false,
4697 true,
4698 false,
4699 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4700 Default::default(),
4701 None
4702 )
4703 .unwrap(),
4704 cx
4705 )
4706 .await
4707 .unwrap()
4708 .is_empty(),
4709 "If no inclusions match, no files should be returned"
4710 );
4711
4712 assert_eq!(
4713 search(
4714 &project,
4715 SearchQuery::text(
4716 search_query,
4717 false,
4718 true,
4719 false,
4720 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4721 Default::default(),
4722 None
4723 )
4724 .unwrap(),
4725 cx
4726 )
4727 .await
4728 .unwrap(),
4729 HashMap::from_iter([
4730 (separator!("dir/one.rs").to_string(), vec![8..12]),
4731 (separator!("dir/two.rs").to_string(), vec![8..12]),
4732 ]),
4733 "Rust only search should give only Rust files"
4734 );
4735
4736 assert_eq!(
4737 search(
4738 &project,
4739 SearchQuery::text(
4740 search_query,
4741 false,
4742 true,
4743 false,
4744
4745 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4746
4747 Default::default(),
4748 None,
4749 ).unwrap(),
4750 cx
4751 )
4752 .await
4753 .unwrap(),
4754 HashMap::from_iter([
4755 (separator!("dir/one.ts").to_string(), vec![14..18]),
4756 (separator!("dir/two.ts").to_string(), vec![14..18]),
4757 ]),
4758 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
4759 );
4760
4761 assert_eq!(
4762 search(
4763 &project,
4764 SearchQuery::text(
4765 search_query,
4766 false,
4767 true,
4768 false,
4769
4770 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4771
4772 Default::default(),
4773 None,
4774 ).unwrap(),
4775 cx
4776 )
4777 .await
4778 .unwrap(),
4779 HashMap::from_iter([
4780 (separator!("dir/two.ts").to_string(), vec![14..18]),
4781 (separator!("dir/one.rs").to_string(), vec![8..12]),
4782 (separator!("dir/one.ts").to_string(), vec![14..18]),
4783 (separator!("dir/two.rs").to_string(), vec![8..12]),
4784 ]),
4785 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
4786 );
4787}
4788
4789#[gpui::test]
4790async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
4791 init_test(cx);
4792
4793 let search_query = "file";
4794
4795 let fs = FakeFs::new(cx.executor());
4796 fs.insert_tree(
4797 path!("/dir"),
4798 json!({
4799 "one.rs": r#"// Rust file one"#,
4800 "one.ts": r#"// TypeScript file one"#,
4801 "two.rs": r#"// Rust file two"#,
4802 "two.ts": r#"// TypeScript file two"#,
4803 }),
4804 )
4805 .await;
4806 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4807
4808 assert_eq!(
4809 search(
4810 &project,
4811 SearchQuery::text(
4812 search_query,
4813 false,
4814 true,
4815 false,
4816 Default::default(),
4817 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4818 None,
4819 )
4820 .unwrap(),
4821 cx
4822 )
4823 .await
4824 .unwrap(),
4825 HashMap::from_iter([
4826 (separator!("dir/one.rs").to_string(), vec![8..12]),
4827 (separator!("dir/one.ts").to_string(), vec![14..18]),
4828 (separator!("dir/two.rs").to_string(), vec![8..12]),
4829 (separator!("dir/two.ts").to_string(), vec![14..18]),
4830 ]),
4831 "If no exclusions match, all files should be returned"
4832 );
4833
4834 assert_eq!(
4835 search(
4836 &project,
4837 SearchQuery::text(
4838 search_query,
4839 false,
4840 true,
4841 false,
4842 Default::default(),
4843 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
4844 None,
4845 )
4846 .unwrap(),
4847 cx
4848 )
4849 .await
4850 .unwrap(),
4851 HashMap::from_iter([
4852 (separator!("dir/one.ts").to_string(), vec![14..18]),
4853 (separator!("dir/two.ts").to_string(), vec![14..18]),
4854 ]),
4855 "Rust exclusion search should give only TypeScript files"
4856 );
4857
4858 assert_eq!(
4859 search(
4860 &project,
4861 SearchQuery::text(
4862 search_query,
4863 false,
4864 true,
4865 false,
4866 Default::default(),
4867 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4868 None,
4869 ).unwrap(),
4870 cx
4871 )
4872 .await
4873 .unwrap(),
4874 HashMap::from_iter([
4875 (separator!("dir/one.rs").to_string(), vec![8..12]),
4876 (separator!("dir/two.rs").to_string(), vec![8..12]),
4877 ]),
4878 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
4879 );
4880
4881 assert!(
4882 search(
4883 &project,
4884 SearchQuery::text(
4885 search_query,
4886 false,
4887 true,
4888 false,
4889 Default::default(),
4890
4891 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4892 None,
4893
4894 ).unwrap(),
4895 cx
4896 )
4897 .await
4898 .unwrap().is_empty(),
4899 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
4900 );
4901}
4902
4903#[gpui::test]
4904async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
4905 init_test(cx);
4906
4907 let search_query = "file";
4908
4909 let fs = FakeFs::new(cx.executor());
4910 fs.insert_tree(
4911 path!("/dir"),
4912 json!({
4913 "one.rs": r#"// Rust file one"#,
4914 "one.ts": r#"// TypeScript file one"#,
4915 "two.rs": r#"// Rust file two"#,
4916 "two.ts": r#"// TypeScript file two"#,
4917 }),
4918 )
4919 .await;
4920 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4921
4922 assert!(
4923 search(
4924 &project,
4925 SearchQuery::text(
4926 search_query,
4927 false,
4928 true,
4929 false,
4930 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4931 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
4932 None,
4933 )
4934 .unwrap(),
4935 cx
4936 )
4937 .await
4938 .unwrap()
4939 .is_empty(),
4940 "If both no exclusions and inclusions match, exclusions should win and return nothing"
4941 );
4942
4943 assert!(
4944 search(
4945 &project,
4946 SearchQuery::text(
4947 search_query,
4948 false,
4949 true,
4950 false,
4951 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4952 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
4953 None,
4954 ).unwrap(),
4955 cx
4956 )
4957 .await
4958 .unwrap()
4959 .is_empty(),
4960 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
4961 );
4962
4963 assert!(
4964 search(
4965 &project,
4966 SearchQuery::text(
4967 search_query,
4968 false,
4969 true,
4970 false,
4971 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4972 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4973 None,
4974 )
4975 .unwrap(),
4976 cx
4977 )
4978 .await
4979 .unwrap()
4980 .is_empty(),
4981 "Non-matching inclusions and exclusions should not change that."
4982 );
4983
4984 assert_eq!(
4985 search(
4986 &project,
4987 SearchQuery::text(
4988 search_query,
4989 false,
4990 true,
4991 false,
4992 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
4993 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
4994 None,
4995 )
4996 .unwrap(),
4997 cx
4998 )
4999 .await
5000 .unwrap(),
5001 HashMap::from_iter([
5002 (separator!("dir/one.ts").to_string(), vec![14..18]),
5003 (separator!("dir/two.ts").to_string(), vec![14..18]),
5004 ]),
5005 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5006 );
5007}
5008
5009#[gpui::test]
5010async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5011 init_test(cx);
5012
5013 let fs = FakeFs::new(cx.executor());
5014 fs.insert_tree(
5015 path!("/worktree-a"),
5016 json!({
5017 "haystack.rs": r#"// NEEDLE"#,
5018 "haystack.ts": r#"// NEEDLE"#,
5019 }),
5020 )
5021 .await;
5022 fs.insert_tree(
5023 path!("/worktree-b"),
5024 json!({
5025 "haystack.rs": r#"// NEEDLE"#,
5026 "haystack.ts": r#"// NEEDLE"#,
5027 }),
5028 )
5029 .await;
5030
5031 let project = Project::test(
5032 fs.clone(),
5033 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5034 cx,
5035 )
5036 .await;
5037
5038 assert_eq!(
5039 search(
5040 &project,
5041 SearchQuery::text(
5042 "NEEDLE",
5043 false,
5044 true,
5045 false,
5046 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5047 Default::default(),
5048 None,
5049 )
5050 .unwrap(),
5051 cx
5052 )
5053 .await
5054 .unwrap(),
5055 HashMap::from_iter([(separator!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5056 "should only return results from included worktree"
5057 );
5058 assert_eq!(
5059 search(
5060 &project,
5061 SearchQuery::text(
5062 "NEEDLE",
5063 false,
5064 true,
5065 false,
5066 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5067 Default::default(),
5068 None,
5069 )
5070 .unwrap(),
5071 cx
5072 )
5073 .await
5074 .unwrap(),
5075 HashMap::from_iter([(separator!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5076 "should only return results from included worktree"
5077 );
5078
5079 assert_eq!(
5080 search(
5081 &project,
5082 SearchQuery::text(
5083 "NEEDLE",
5084 false,
5085 true,
5086 false,
5087 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5088 Default::default(),
5089 None,
5090 )
5091 .unwrap(),
5092 cx
5093 )
5094 .await
5095 .unwrap(),
5096 HashMap::from_iter([
5097 (separator!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5098 (separator!("worktree-b/haystack.ts").to_string(), vec![3..9])
5099 ]),
5100 "should return results from both worktrees"
5101 );
5102}
5103
5104#[gpui::test]
5105async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5106 init_test(cx);
5107
5108 let fs = FakeFs::new(cx.background_executor.clone());
5109 fs.insert_tree(
5110 path!("/dir"),
5111 json!({
5112 ".git": {},
5113 ".gitignore": "**/target\n/node_modules\n",
5114 "target": {
5115 "index.txt": "index_key:index_value"
5116 },
5117 "node_modules": {
5118 "eslint": {
5119 "index.ts": "const eslint_key = 'eslint value'",
5120 "package.json": r#"{ "some_key": "some value" }"#,
5121 },
5122 "prettier": {
5123 "index.ts": "const prettier_key = 'prettier value'",
5124 "package.json": r#"{ "other_key": "other value" }"#,
5125 },
5126 },
5127 "package.json": r#"{ "main_key": "main value" }"#,
5128 }),
5129 )
5130 .await;
5131 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5132
5133 let query = "key";
5134 assert_eq!(
5135 search(
5136 &project,
5137 SearchQuery::text(
5138 query,
5139 false,
5140 false,
5141 false,
5142 Default::default(),
5143 Default::default(),
5144 None,
5145 )
5146 .unwrap(),
5147 cx
5148 )
5149 .await
5150 .unwrap(),
5151 HashMap::from_iter([(separator!("dir/package.json").to_string(), vec![8..11])]),
5152 "Only one non-ignored file should have the query"
5153 );
5154
5155 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5156 assert_eq!(
5157 search(
5158 &project,
5159 SearchQuery::text(
5160 query,
5161 false,
5162 false,
5163 true,
5164 Default::default(),
5165 Default::default(),
5166 None,
5167 )
5168 .unwrap(),
5169 cx
5170 )
5171 .await
5172 .unwrap(),
5173 HashMap::from_iter([
5174 (separator!("dir/package.json").to_string(), vec![8..11]),
5175 (separator!("dir/target/index.txt").to_string(), vec![6..9]),
5176 (
5177 separator!("dir/node_modules/prettier/package.json").to_string(),
5178 vec![9..12]
5179 ),
5180 (
5181 separator!("dir/node_modules/prettier/index.ts").to_string(),
5182 vec![15..18]
5183 ),
5184 (
5185 separator!("dir/node_modules/eslint/index.ts").to_string(),
5186 vec![13..16]
5187 ),
5188 (
5189 separator!("dir/node_modules/eslint/package.json").to_string(),
5190 vec![8..11]
5191 ),
5192 ]),
5193 "Unrestricted search with ignored directories should find every file with the query"
5194 );
5195
5196 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5197 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5198 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5199 assert_eq!(
5200 search(
5201 &project,
5202 SearchQuery::text(
5203 query,
5204 false,
5205 false,
5206 true,
5207 files_to_include,
5208 files_to_exclude,
5209 None,
5210 )
5211 .unwrap(),
5212 cx
5213 )
5214 .await
5215 .unwrap(),
5216 HashMap::from_iter([(
5217 separator!("dir/node_modules/prettier/package.json").to_string(),
5218 vec![9..12]
5219 )]),
5220 "With search including ignored prettier directory and excluding TS files, only one file should be found"
5221 );
5222}
5223
5224#[gpui::test]
5225async fn test_create_entry(cx: &mut gpui::TestAppContext) {
5226 init_test(cx);
5227
5228 let fs = FakeFs::new(cx.executor().clone());
5229 fs.insert_tree(
5230 "/one/two",
5231 json!({
5232 "three": {
5233 "a.txt": "",
5234 "four": {}
5235 },
5236 "c.rs": ""
5237 }),
5238 )
5239 .await;
5240
5241 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
5242 project
5243 .update(cx, |project, cx| {
5244 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5245 project.create_entry((id, "b.."), true, cx)
5246 })
5247 .await
5248 .unwrap()
5249 .to_included()
5250 .unwrap();
5251
5252 // Can't create paths outside the project
5253 let result = project
5254 .update(cx, |project, cx| {
5255 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5256 project.create_entry((id, "../../boop"), true, cx)
5257 })
5258 .await;
5259 assert!(result.is_err());
5260
5261 // Can't create paths with '..'
5262 let result = project
5263 .update(cx, |project, cx| {
5264 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5265 project.create_entry((id, "four/../beep"), true, cx)
5266 })
5267 .await;
5268 assert!(result.is_err());
5269
5270 assert_eq!(
5271 fs.paths(true),
5272 vec![
5273 PathBuf::from(path!("/")),
5274 PathBuf::from(path!("/one")),
5275 PathBuf::from(path!("/one/two")),
5276 PathBuf::from(path!("/one/two/c.rs")),
5277 PathBuf::from(path!("/one/two/three")),
5278 PathBuf::from(path!("/one/two/three/a.txt")),
5279 PathBuf::from(path!("/one/two/three/b..")),
5280 PathBuf::from(path!("/one/two/three/four")),
5281 ]
5282 );
5283
5284 // And we cannot open buffers with '..'
5285 let result = project
5286 .update(cx, |project, cx| {
5287 let id = project.worktrees(cx).next().unwrap().read(cx).id();
5288 project.open_buffer((id, "../c.rs"), cx)
5289 })
5290 .await;
5291 assert!(result.is_err())
5292}
5293
5294#[gpui::test]
5295async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
5296 init_test(cx);
5297
5298 let fs = FakeFs::new(cx.executor());
5299 fs.insert_tree(
5300 path!("/dir"),
5301 json!({
5302 "a.tsx": "a",
5303 }),
5304 )
5305 .await;
5306
5307 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5308
5309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5310 language_registry.add(tsx_lang());
5311 let language_server_names = [
5312 "TypeScriptServer",
5313 "TailwindServer",
5314 "ESLintServer",
5315 "NoHoverCapabilitiesServer",
5316 ];
5317 let mut language_servers = [
5318 language_registry.register_fake_lsp(
5319 "tsx",
5320 FakeLspAdapter {
5321 name: language_server_names[0],
5322 capabilities: lsp::ServerCapabilities {
5323 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5324 ..lsp::ServerCapabilities::default()
5325 },
5326 ..FakeLspAdapter::default()
5327 },
5328 ),
5329 language_registry.register_fake_lsp(
5330 "tsx",
5331 FakeLspAdapter {
5332 name: language_server_names[1],
5333 capabilities: lsp::ServerCapabilities {
5334 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5335 ..lsp::ServerCapabilities::default()
5336 },
5337 ..FakeLspAdapter::default()
5338 },
5339 ),
5340 language_registry.register_fake_lsp(
5341 "tsx",
5342 FakeLspAdapter {
5343 name: language_server_names[2],
5344 capabilities: lsp::ServerCapabilities {
5345 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5346 ..lsp::ServerCapabilities::default()
5347 },
5348 ..FakeLspAdapter::default()
5349 },
5350 ),
5351 language_registry.register_fake_lsp(
5352 "tsx",
5353 FakeLspAdapter {
5354 name: language_server_names[3],
5355 capabilities: lsp::ServerCapabilities {
5356 hover_provider: None,
5357 ..lsp::ServerCapabilities::default()
5358 },
5359 ..FakeLspAdapter::default()
5360 },
5361 ),
5362 ];
5363
5364 let (buffer, _handle) = project
5365 .update(cx, |p, cx| {
5366 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5367 })
5368 .await
5369 .unwrap();
5370 cx.executor().run_until_parked();
5371
5372 let mut servers_with_hover_requests = HashMap::default();
5373 for i in 0..language_server_names.len() {
5374 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
5375 panic!(
5376 "Failed to get language server #{i} with name {}",
5377 &language_server_names[i]
5378 )
5379 });
5380 let new_server_name = new_server.server.name();
5381 assert!(
5382 !servers_with_hover_requests.contains_key(&new_server_name),
5383 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5384 );
5385 match new_server_name.as_ref() {
5386 "TailwindServer" | "TypeScriptServer" => {
5387 servers_with_hover_requests.insert(
5388 new_server_name.clone(),
5389 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5390 move |_, _| {
5391 let name = new_server_name.clone();
5392 async move {
5393 Ok(Some(lsp::Hover {
5394 contents: lsp::HoverContents::Scalar(
5395 lsp::MarkedString::String(format!("{name} hover")),
5396 ),
5397 range: None,
5398 }))
5399 }
5400 },
5401 ),
5402 );
5403 }
5404 "ESLintServer" => {
5405 servers_with_hover_requests.insert(
5406 new_server_name,
5407 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5408 |_, _| async move { Ok(None) },
5409 ),
5410 );
5411 }
5412 "NoHoverCapabilitiesServer" => {
5413 let _never_handled = new_server
5414 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
5415 panic!(
5416 "Should not call for hovers server with no corresponding capabilities"
5417 )
5418 });
5419 }
5420 unexpected => panic!("Unexpected server name: {unexpected}"),
5421 }
5422 }
5423
5424 let hover_task = project.update(cx, |project, cx| {
5425 project.hover(&buffer, Point::new(0, 0), cx)
5426 });
5427 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
5428 |mut hover_request| async move {
5429 hover_request
5430 .next()
5431 .await
5432 .expect("All hover requests should have been triggered")
5433 },
5434 ))
5435 .await;
5436 assert_eq!(
5437 vec!["TailwindServer hover", "TypeScriptServer hover"],
5438 hover_task
5439 .await
5440 .into_iter()
5441 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5442 .sorted()
5443 .collect::<Vec<_>>(),
5444 "Should receive hover responses from all related servers with hover capabilities"
5445 );
5446}
5447
5448#[gpui::test]
5449async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
5450 init_test(cx);
5451
5452 let fs = FakeFs::new(cx.executor());
5453 fs.insert_tree(
5454 path!("/dir"),
5455 json!({
5456 "a.ts": "a",
5457 }),
5458 )
5459 .await;
5460
5461 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5462
5463 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5464 language_registry.add(typescript_lang());
5465 let mut fake_language_servers = language_registry.register_fake_lsp(
5466 "TypeScript",
5467 FakeLspAdapter {
5468 capabilities: lsp::ServerCapabilities {
5469 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
5470 ..lsp::ServerCapabilities::default()
5471 },
5472 ..FakeLspAdapter::default()
5473 },
5474 );
5475
5476 let (buffer, _handle) = project
5477 .update(cx, |p, cx| {
5478 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5479 })
5480 .await
5481 .unwrap();
5482 cx.executor().run_until_parked();
5483
5484 let fake_server = fake_language_servers
5485 .next()
5486 .await
5487 .expect("failed to get the language server");
5488
5489 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
5490 move |_, _| async move {
5491 Ok(Some(lsp::Hover {
5492 contents: lsp::HoverContents::Array(vec![
5493 lsp::MarkedString::String("".to_string()),
5494 lsp::MarkedString::String(" ".to_string()),
5495 lsp::MarkedString::String("\n\n\n".to_string()),
5496 ]),
5497 range: None,
5498 }))
5499 },
5500 );
5501
5502 let hover_task = project.update(cx, |project, cx| {
5503 project.hover(&buffer, Point::new(0, 0), cx)
5504 });
5505 let () = request_handled
5506 .next()
5507 .await
5508 .expect("All hover requests should have been triggered");
5509 assert_eq!(
5510 Vec::<String>::new(),
5511 hover_task
5512 .await
5513 .into_iter()
5514 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
5515 .sorted()
5516 .collect::<Vec<_>>(),
5517 "Empty hover parts should be ignored"
5518 );
5519}
5520
5521#[gpui::test]
5522async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
5523 init_test(cx);
5524
5525 let fs = FakeFs::new(cx.executor());
5526 fs.insert_tree(
5527 path!("/dir"),
5528 json!({
5529 "a.ts": "a",
5530 }),
5531 )
5532 .await;
5533
5534 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5535
5536 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5537 language_registry.add(typescript_lang());
5538 let mut fake_language_servers = language_registry.register_fake_lsp(
5539 "TypeScript",
5540 FakeLspAdapter {
5541 capabilities: lsp::ServerCapabilities {
5542 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5543 ..lsp::ServerCapabilities::default()
5544 },
5545 ..FakeLspAdapter::default()
5546 },
5547 );
5548
5549 let (buffer, _handle) = project
5550 .update(cx, |p, cx| {
5551 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
5552 })
5553 .await
5554 .unwrap();
5555 cx.executor().run_until_parked();
5556
5557 let fake_server = fake_language_servers
5558 .next()
5559 .await
5560 .expect("failed to get the language server");
5561
5562 let mut request_handled = fake_server
5563 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
5564 Ok(Some(vec![
5565 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5566 title: "organize imports".to_string(),
5567 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
5568 ..lsp::CodeAction::default()
5569 }),
5570 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
5571 title: "fix code".to_string(),
5572 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
5573 ..lsp::CodeAction::default()
5574 }),
5575 ]))
5576 });
5577
5578 let code_actions_task = project.update(cx, |project, cx| {
5579 project.code_actions(
5580 &buffer,
5581 0..buffer.read(cx).len(),
5582 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
5583 cx,
5584 )
5585 });
5586
5587 let () = request_handled
5588 .next()
5589 .await
5590 .expect("The code action request should have been triggered");
5591
5592 let code_actions = code_actions_task.await.unwrap();
5593 assert_eq!(code_actions.len(), 1);
5594 assert_eq!(
5595 code_actions[0].lsp_action.action_kind(),
5596 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
5597 );
5598}
5599
5600#[gpui::test]
5601async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
5602 init_test(cx);
5603
5604 let fs = FakeFs::new(cx.executor());
5605 fs.insert_tree(
5606 path!("/dir"),
5607 json!({
5608 "a.tsx": "a",
5609 }),
5610 )
5611 .await;
5612
5613 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
5614
5615 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5616 language_registry.add(tsx_lang());
5617 let language_server_names = [
5618 "TypeScriptServer",
5619 "TailwindServer",
5620 "ESLintServer",
5621 "NoActionsCapabilitiesServer",
5622 ];
5623
5624 let mut language_server_rxs = [
5625 language_registry.register_fake_lsp(
5626 "tsx",
5627 FakeLspAdapter {
5628 name: language_server_names[0],
5629 capabilities: lsp::ServerCapabilities {
5630 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5631 ..lsp::ServerCapabilities::default()
5632 },
5633 ..FakeLspAdapter::default()
5634 },
5635 ),
5636 language_registry.register_fake_lsp(
5637 "tsx",
5638 FakeLspAdapter {
5639 name: language_server_names[1],
5640 capabilities: lsp::ServerCapabilities {
5641 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5642 ..lsp::ServerCapabilities::default()
5643 },
5644 ..FakeLspAdapter::default()
5645 },
5646 ),
5647 language_registry.register_fake_lsp(
5648 "tsx",
5649 FakeLspAdapter {
5650 name: language_server_names[2],
5651 capabilities: lsp::ServerCapabilities {
5652 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
5653 ..lsp::ServerCapabilities::default()
5654 },
5655 ..FakeLspAdapter::default()
5656 },
5657 ),
5658 language_registry.register_fake_lsp(
5659 "tsx",
5660 FakeLspAdapter {
5661 name: language_server_names[3],
5662 capabilities: lsp::ServerCapabilities {
5663 code_action_provider: None,
5664 ..lsp::ServerCapabilities::default()
5665 },
5666 ..FakeLspAdapter::default()
5667 },
5668 ),
5669 ];
5670
5671 let (buffer, _handle) = project
5672 .update(cx, |p, cx| {
5673 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
5674 })
5675 .await
5676 .unwrap();
5677 cx.executor().run_until_parked();
5678
5679 let mut servers_with_actions_requests = HashMap::default();
5680 for i in 0..language_server_names.len() {
5681 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
5682 panic!(
5683 "Failed to get language server #{i} with name {}",
5684 &language_server_names[i]
5685 )
5686 });
5687 let new_server_name = new_server.server.name();
5688
5689 assert!(
5690 !servers_with_actions_requests.contains_key(&new_server_name),
5691 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
5692 );
5693 match new_server_name.0.as_ref() {
5694 "TailwindServer" | "TypeScriptServer" => {
5695 servers_with_actions_requests.insert(
5696 new_server_name.clone(),
5697 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5698 move |_, _| {
5699 let name = new_server_name.clone();
5700 async move {
5701 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
5702 lsp::CodeAction {
5703 title: format!("{name} code action"),
5704 ..lsp::CodeAction::default()
5705 },
5706 )]))
5707 }
5708 },
5709 ),
5710 );
5711 }
5712 "ESLintServer" => {
5713 servers_with_actions_requests.insert(
5714 new_server_name,
5715 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
5716 |_, _| async move { Ok(None) },
5717 ),
5718 );
5719 }
5720 "NoActionsCapabilitiesServer" => {
5721 let _never_handled = new_server
5722 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
5723 panic!(
5724 "Should not call for code actions server with no corresponding capabilities"
5725 )
5726 });
5727 }
5728 unexpected => panic!("Unexpected server name: {unexpected}"),
5729 }
5730 }
5731
5732 let code_actions_task = project.update(cx, |project, cx| {
5733 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
5734 });
5735
5736 // cx.run_until_parked();
5737 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
5738 |mut code_actions_request| async move {
5739 code_actions_request
5740 .next()
5741 .await
5742 .expect("All code actions requests should have been triggered")
5743 },
5744 ))
5745 .await;
5746 assert_eq!(
5747 vec!["TailwindServer code action", "TypeScriptServer code action"],
5748 code_actions_task
5749 .await
5750 .unwrap()
5751 .into_iter()
5752 .map(|code_action| code_action.lsp_action.title().to_owned())
5753 .sorted()
5754 .collect::<Vec<_>>(),
5755 "Should receive code actions responses from all related servers with hover capabilities"
5756 );
5757}
5758
5759#[gpui::test]
5760async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
5761 init_test(cx);
5762
5763 let fs = FakeFs::new(cx.executor());
5764 fs.insert_tree(
5765 "/dir",
5766 json!({
5767 "a.rs": "let a = 1;",
5768 "b.rs": "let b = 2;",
5769 "c.rs": "let c = 2;",
5770 }),
5771 )
5772 .await;
5773
5774 let project = Project::test(
5775 fs,
5776 [
5777 "/dir/a.rs".as_ref(),
5778 "/dir/b.rs".as_ref(),
5779 "/dir/c.rs".as_ref(),
5780 ],
5781 cx,
5782 )
5783 .await;
5784
5785 // check the initial state and get the worktrees
5786 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
5787 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5788 assert_eq!(worktrees.len(), 3);
5789
5790 let worktree_a = worktrees[0].read(cx);
5791 let worktree_b = worktrees[1].read(cx);
5792 let worktree_c = worktrees[2].read(cx);
5793
5794 // check they start in the right order
5795 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
5796 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
5797 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
5798
5799 (
5800 worktrees[0].clone(),
5801 worktrees[1].clone(),
5802 worktrees[2].clone(),
5803 )
5804 });
5805
5806 // move first worktree to after the second
5807 // [a, b, c] -> [b, a, c]
5808 project
5809 .update(cx, |project, cx| {
5810 let first = worktree_a.read(cx);
5811 let second = worktree_b.read(cx);
5812 project.move_worktree(first.id(), second.id(), cx)
5813 })
5814 .expect("moving first after second");
5815
5816 // check the state after moving
5817 project.update(cx, |project, cx| {
5818 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5819 assert_eq!(worktrees.len(), 3);
5820
5821 let first = worktrees[0].read(cx);
5822 let second = worktrees[1].read(cx);
5823 let third = worktrees[2].read(cx);
5824
5825 // check they are now in the right order
5826 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5827 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
5828 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5829 });
5830
5831 // move the second worktree to before the first
5832 // [b, a, c] -> [a, b, c]
5833 project
5834 .update(cx, |project, cx| {
5835 let second = worktree_a.read(cx);
5836 let first = worktree_b.read(cx);
5837 project.move_worktree(first.id(), second.id(), cx)
5838 })
5839 .expect("moving second before first");
5840
5841 // check the state after moving
5842 project.update(cx, |project, cx| {
5843 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5844 assert_eq!(worktrees.len(), 3);
5845
5846 let first = worktrees[0].read(cx);
5847 let second = worktrees[1].read(cx);
5848 let third = worktrees[2].read(cx);
5849
5850 // check they are now in the right order
5851 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5852 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5853 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5854 });
5855
5856 // move the second worktree to after the third
5857 // [a, b, c] -> [a, c, b]
5858 project
5859 .update(cx, |project, cx| {
5860 let second = worktree_b.read(cx);
5861 let third = worktree_c.read(cx);
5862 project.move_worktree(second.id(), third.id(), cx)
5863 })
5864 .expect("moving second after third");
5865
5866 // check the state after moving
5867 project.update(cx, |project, cx| {
5868 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5869 assert_eq!(worktrees.len(), 3);
5870
5871 let first = worktrees[0].read(cx);
5872 let second = worktrees[1].read(cx);
5873 let third = worktrees[2].read(cx);
5874
5875 // check they are now in the right order
5876 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5877 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5878 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
5879 });
5880
5881 // move the third worktree to before the second
5882 // [a, c, b] -> [a, b, c]
5883 project
5884 .update(cx, |project, cx| {
5885 let third = worktree_c.read(cx);
5886 let second = worktree_b.read(cx);
5887 project.move_worktree(third.id(), second.id(), cx)
5888 })
5889 .expect("moving third before second");
5890
5891 // check the state after moving
5892 project.update(cx, |project, cx| {
5893 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5894 assert_eq!(worktrees.len(), 3);
5895
5896 let first = worktrees[0].read(cx);
5897 let second = worktrees[1].read(cx);
5898 let third = worktrees[2].read(cx);
5899
5900 // check they are now in the right order
5901 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5902 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5903 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5904 });
5905
5906 // move the first worktree to after the third
5907 // [a, b, c] -> [b, c, a]
5908 project
5909 .update(cx, |project, cx| {
5910 let first = worktree_a.read(cx);
5911 let third = worktree_c.read(cx);
5912 project.move_worktree(first.id(), third.id(), cx)
5913 })
5914 .expect("moving first after third");
5915
5916 // check the state after moving
5917 project.update(cx, |project, cx| {
5918 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5919 assert_eq!(worktrees.len(), 3);
5920
5921 let first = worktrees[0].read(cx);
5922 let second = worktrees[1].read(cx);
5923 let third = worktrees[2].read(cx);
5924
5925 // check they are now in the right order
5926 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
5927 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
5928 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
5929 });
5930
5931 // move the third worktree to before the first
5932 // [b, c, a] -> [a, b, c]
5933 project
5934 .update(cx, |project, cx| {
5935 let third = worktree_a.read(cx);
5936 let first = worktree_b.read(cx);
5937 project.move_worktree(third.id(), first.id(), cx)
5938 })
5939 .expect("moving third before first");
5940
5941 // check the state after moving
5942 project.update(cx, |project, cx| {
5943 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
5944 assert_eq!(worktrees.len(), 3);
5945
5946 let first = worktrees[0].read(cx);
5947 let second = worktrees[1].read(cx);
5948 let third = worktrees[2].read(cx);
5949
5950 // check they are now in the right order
5951 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
5952 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
5953 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
5954 });
5955}
5956
5957#[gpui::test]
5958async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
5959 init_test(cx);
5960
5961 let staged_contents = r#"
5962 fn main() {
5963 println!("hello world");
5964 }
5965 "#
5966 .unindent();
5967 let file_contents = r#"
5968 // print goodbye
5969 fn main() {
5970 println!("goodbye world");
5971 }
5972 "#
5973 .unindent();
5974
5975 let fs = FakeFs::new(cx.background_executor.clone());
5976 fs.insert_tree(
5977 "/dir",
5978 json!({
5979 ".git": {},
5980 "src": {
5981 "main.rs": file_contents,
5982 }
5983 }),
5984 )
5985 .await;
5986
5987 fs.set_index_for_repo(
5988 Path::new("/dir/.git"),
5989 &[("src/main.rs".into(), staged_contents)],
5990 );
5991
5992 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
5993
5994 let buffer = project
5995 .update(cx, |project, cx| {
5996 project.open_local_buffer("/dir/src/main.rs", cx)
5997 })
5998 .await
5999 .unwrap();
6000 let unstaged_diff = project
6001 .update(cx, |project, cx| {
6002 project.open_unstaged_diff(buffer.clone(), cx)
6003 })
6004 .await
6005 .unwrap();
6006
6007 cx.run_until_parked();
6008 unstaged_diff.update(cx, |unstaged_diff, cx| {
6009 let snapshot = buffer.read(cx).snapshot();
6010 assert_hunks(
6011 unstaged_diff.hunks(&snapshot, cx),
6012 &snapshot,
6013 &unstaged_diff.base_text_string().unwrap(),
6014 &[
6015 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6016 (
6017 2..3,
6018 " println!(\"hello world\");\n",
6019 " println!(\"goodbye world\");\n",
6020 DiffHunkStatus::modified_none(),
6021 ),
6022 ],
6023 );
6024 });
6025
6026 let staged_contents = r#"
6027 // print goodbye
6028 fn main() {
6029 }
6030 "#
6031 .unindent();
6032
6033 fs.set_index_for_repo(
6034 Path::new("/dir/.git"),
6035 &[("src/main.rs".into(), staged_contents)],
6036 );
6037
6038 cx.run_until_parked();
6039 unstaged_diff.update(cx, |unstaged_diff, cx| {
6040 let snapshot = buffer.read(cx).snapshot();
6041 assert_hunks(
6042 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6043 &snapshot,
6044 &unstaged_diff.base_text().text(),
6045 &[(
6046 2..3,
6047 "",
6048 " println!(\"goodbye world\");\n",
6049 DiffHunkStatus::added_none(),
6050 )],
6051 );
6052 });
6053}
6054
6055#[gpui::test]
6056async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6057 init_test(cx);
6058
6059 let committed_contents = r#"
6060 fn main() {
6061 println!("hello world");
6062 }
6063 "#
6064 .unindent();
6065 let staged_contents = r#"
6066 fn main() {
6067 println!("goodbye world");
6068 }
6069 "#
6070 .unindent();
6071 let file_contents = r#"
6072 // print goodbye
6073 fn main() {
6074 println!("goodbye world");
6075 }
6076 "#
6077 .unindent();
6078
6079 let fs = FakeFs::new(cx.background_executor.clone());
6080 fs.insert_tree(
6081 "/dir",
6082 json!({
6083 ".git": {},
6084 "src": {
6085 "modification.rs": file_contents,
6086 }
6087 }),
6088 )
6089 .await;
6090
6091 fs.set_head_for_repo(
6092 Path::new("/dir/.git"),
6093 &[
6094 ("src/modification.rs".into(), committed_contents),
6095 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6096 ],
6097 );
6098 fs.set_index_for_repo(
6099 Path::new("/dir/.git"),
6100 &[
6101 ("src/modification.rs".into(), staged_contents),
6102 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6103 ],
6104 );
6105
6106 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6107 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6108 let language = rust_lang();
6109 language_registry.add(language.clone());
6110
6111 let buffer_1 = project
6112 .update(cx, |project, cx| {
6113 project.open_local_buffer("/dir/src/modification.rs", cx)
6114 })
6115 .await
6116 .unwrap();
6117 let diff_1 = project
6118 .update(cx, |project, cx| {
6119 project.open_uncommitted_diff(buffer_1.clone(), cx)
6120 })
6121 .await
6122 .unwrap();
6123 diff_1.read_with(cx, |diff, _| {
6124 assert_eq!(diff.base_text().language().cloned(), Some(language))
6125 });
6126 cx.run_until_parked();
6127 diff_1.update(cx, |diff, cx| {
6128 let snapshot = buffer_1.read(cx).snapshot();
6129 assert_hunks(
6130 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6131 &snapshot,
6132 &diff.base_text_string().unwrap(),
6133 &[
6134 (
6135 0..1,
6136 "",
6137 "// print goodbye\n",
6138 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
6139 ),
6140 (
6141 2..3,
6142 " println!(\"hello world\");\n",
6143 " println!(\"goodbye world\");\n",
6144 DiffHunkStatus::modified_none(),
6145 ),
6146 ],
6147 );
6148 });
6149
6150 // Reset HEAD to a version that differs from both the buffer and the index.
6151 let committed_contents = r#"
6152 // print goodbye
6153 fn main() {
6154 }
6155 "#
6156 .unindent();
6157 fs.set_head_for_repo(
6158 Path::new("/dir/.git"),
6159 &[
6160 ("src/modification.rs".into(), committed_contents.clone()),
6161 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6162 ],
6163 );
6164
6165 // Buffer now has an unstaged hunk.
6166 cx.run_until_parked();
6167 diff_1.update(cx, |diff, cx| {
6168 let snapshot = buffer_1.read(cx).snapshot();
6169 assert_hunks(
6170 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6171 &snapshot,
6172 &diff.base_text().text(),
6173 &[(
6174 2..3,
6175 "",
6176 " println!(\"goodbye world\");\n",
6177 DiffHunkStatus::added_none(),
6178 )],
6179 );
6180 });
6181
6182 // Open a buffer for a file that's been deleted.
6183 let buffer_2 = project
6184 .update(cx, |project, cx| {
6185 project.open_local_buffer("/dir/src/deletion.rs", cx)
6186 })
6187 .await
6188 .unwrap();
6189 let diff_2 = project
6190 .update(cx, |project, cx| {
6191 project.open_uncommitted_diff(buffer_2.clone(), cx)
6192 })
6193 .await
6194 .unwrap();
6195 cx.run_until_parked();
6196 diff_2.update(cx, |diff, cx| {
6197 let snapshot = buffer_2.read(cx).snapshot();
6198 assert_hunks(
6199 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6200 &snapshot,
6201 &diff.base_text_string().unwrap(),
6202 &[(
6203 0..0,
6204 "// the-deleted-contents\n",
6205 "",
6206 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
6207 )],
6208 );
6209 });
6210
6211 // Stage the deletion of this file
6212 fs.set_index_for_repo(
6213 Path::new("/dir/.git"),
6214 &[("src/modification.rs".into(), committed_contents.clone())],
6215 );
6216 cx.run_until_parked();
6217 diff_2.update(cx, |diff, cx| {
6218 let snapshot = buffer_2.read(cx).snapshot();
6219 assert_hunks(
6220 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6221 &snapshot,
6222 &diff.base_text_string().unwrap(),
6223 &[(
6224 0..0,
6225 "// the-deleted-contents\n",
6226 "",
6227 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
6228 )],
6229 );
6230 });
6231}
6232
6233#[gpui::test]
6234async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
6235 use DiffHunkSecondaryStatus::*;
6236 init_test(cx);
6237
6238 let committed_contents = r#"
6239 zero
6240 one
6241 two
6242 three
6243 four
6244 five
6245 "#
6246 .unindent();
6247 let file_contents = r#"
6248 one
6249 TWO
6250 three
6251 FOUR
6252 five
6253 "#
6254 .unindent();
6255
6256 let fs = FakeFs::new(cx.background_executor.clone());
6257 fs.insert_tree(
6258 "/dir",
6259 json!({
6260 ".git": {},
6261 "file.txt": file_contents.clone()
6262 }),
6263 )
6264 .await;
6265
6266 fs.set_head_and_index_for_repo(
6267 "/dir/.git".as_ref(),
6268 &[("file.txt".into(), committed_contents.clone())],
6269 );
6270
6271 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6272
6273 let buffer = project
6274 .update(cx, |project, cx| {
6275 project.open_local_buffer("/dir/file.txt", cx)
6276 })
6277 .await
6278 .unwrap();
6279 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6280 let uncommitted_diff = project
6281 .update(cx, |project, cx| {
6282 project.open_uncommitted_diff(buffer.clone(), cx)
6283 })
6284 .await
6285 .unwrap();
6286 let mut diff_events = cx.events(&uncommitted_diff);
6287
6288 // The hunks are initially unstaged.
6289 uncommitted_diff.read_with(cx, |diff, cx| {
6290 assert_hunks(
6291 diff.hunks(&snapshot, cx),
6292 &snapshot,
6293 &diff.base_text_string().unwrap(),
6294 &[
6295 (
6296 0..0,
6297 "zero\n",
6298 "",
6299 DiffHunkStatus::deleted(HasSecondaryHunk),
6300 ),
6301 (
6302 1..2,
6303 "two\n",
6304 "TWO\n",
6305 DiffHunkStatus::modified(HasSecondaryHunk),
6306 ),
6307 (
6308 3..4,
6309 "four\n",
6310 "FOUR\n",
6311 DiffHunkStatus::modified(HasSecondaryHunk),
6312 ),
6313 ],
6314 );
6315 });
6316
6317 // Stage a hunk. It appears as optimistically staged.
6318 uncommitted_diff.update(cx, |diff, cx| {
6319 let range =
6320 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
6321 let hunks = diff
6322 .hunks_intersecting_range(range, &snapshot, cx)
6323 .collect::<Vec<_>>();
6324 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6325
6326 assert_hunks(
6327 diff.hunks(&snapshot, cx),
6328 &snapshot,
6329 &diff.base_text_string().unwrap(),
6330 &[
6331 (
6332 0..0,
6333 "zero\n",
6334 "",
6335 DiffHunkStatus::deleted(HasSecondaryHunk),
6336 ),
6337 (
6338 1..2,
6339 "two\n",
6340 "TWO\n",
6341 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6342 ),
6343 (
6344 3..4,
6345 "four\n",
6346 "FOUR\n",
6347 DiffHunkStatus::modified(HasSecondaryHunk),
6348 ),
6349 ],
6350 );
6351 });
6352
6353 // The diff emits a change event for the range of the staged hunk.
6354 assert!(matches!(
6355 diff_events.next().await.unwrap(),
6356 BufferDiffEvent::HunksStagedOrUnstaged(_)
6357 ));
6358 let event = diff_events.next().await.unwrap();
6359 if let BufferDiffEvent::DiffChanged {
6360 changed_range: Some(changed_range),
6361 } = event
6362 {
6363 let changed_range = changed_range.to_point(&snapshot);
6364 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
6365 } else {
6366 panic!("Unexpected event {event:?}");
6367 }
6368
6369 // When the write to the index completes, it appears as staged.
6370 cx.run_until_parked();
6371 uncommitted_diff.update(cx, |diff, cx| {
6372 assert_hunks(
6373 diff.hunks(&snapshot, cx),
6374 &snapshot,
6375 &diff.base_text_string().unwrap(),
6376 &[
6377 (
6378 0..0,
6379 "zero\n",
6380 "",
6381 DiffHunkStatus::deleted(HasSecondaryHunk),
6382 ),
6383 (
6384 1..2,
6385 "two\n",
6386 "TWO\n",
6387 DiffHunkStatus::modified(NoSecondaryHunk),
6388 ),
6389 (
6390 3..4,
6391 "four\n",
6392 "FOUR\n",
6393 DiffHunkStatus::modified(HasSecondaryHunk),
6394 ),
6395 ],
6396 );
6397 });
6398
6399 // The diff emits a change event for the changed index text.
6400 let event = diff_events.next().await.unwrap();
6401 if let BufferDiffEvent::DiffChanged {
6402 changed_range: Some(changed_range),
6403 } = event
6404 {
6405 let changed_range = changed_range.to_point(&snapshot);
6406 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6407 } else {
6408 panic!("Unexpected event {event:?}");
6409 }
6410
6411 // Simulate a problem writing to the git index.
6412 fs.set_error_message_for_index_write(
6413 "/dir/.git".as_ref(),
6414 Some("failed to write git index".into()),
6415 );
6416
6417 // Stage another hunk.
6418 uncommitted_diff.update(cx, |diff, cx| {
6419 let range =
6420 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
6421 let hunks = diff
6422 .hunks_intersecting_range(range, &snapshot, cx)
6423 .collect::<Vec<_>>();
6424 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
6425
6426 assert_hunks(
6427 diff.hunks(&snapshot, cx),
6428 &snapshot,
6429 &diff.base_text_string().unwrap(),
6430 &[
6431 (
6432 0..0,
6433 "zero\n",
6434 "",
6435 DiffHunkStatus::deleted(HasSecondaryHunk),
6436 ),
6437 (
6438 1..2,
6439 "two\n",
6440 "TWO\n",
6441 DiffHunkStatus::modified(NoSecondaryHunk),
6442 ),
6443 (
6444 3..4,
6445 "four\n",
6446 "FOUR\n",
6447 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6448 ),
6449 ],
6450 );
6451 });
6452 assert!(matches!(
6453 diff_events.next().await.unwrap(),
6454 BufferDiffEvent::HunksStagedOrUnstaged(_)
6455 ));
6456 let event = diff_events.next().await.unwrap();
6457 if let BufferDiffEvent::DiffChanged {
6458 changed_range: Some(changed_range),
6459 } = event
6460 {
6461 let changed_range = changed_range.to_point(&snapshot);
6462 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
6463 } else {
6464 panic!("Unexpected event {event:?}");
6465 }
6466
6467 // When the write fails, the hunk returns to being unstaged.
6468 cx.run_until_parked();
6469 uncommitted_diff.update(cx, |diff, cx| {
6470 assert_hunks(
6471 diff.hunks(&snapshot, cx),
6472 &snapshot,
6473 &diff.base_text_string().unwrap(),
6474 &[
6475 (
6476 0..0,
6477 "zero\n",
6478 "",
6479 DiffHunkStatus::deleted(HasSecondaryHunk),
6480 ),
6481 (
6482 1..2,
6483 "two\n",
6484 "TWO\n",
6485 DiffHunkStatus::modified(NoSecondaryHunk),
6486 ),
6487 (
6488 3..4,
6489 "four\n",
6490 "FOUR\n",
6491 DiffHunkStatus::modified(HasSecondaryHunk),
6492 ),
6493 ],
6494 );
6495 });
6496
6497 let event = diff_events.next().await.unwrap();
6498 if let BufferDiffEvent::DiffChanged {
6499 changed_range: Some(changed_range),
6500 } = event
6501 {
6502 let changed_range = changed_range.to_point(&snapshot);
6503 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
6504 } else {
6505 panic!("Unexpected event {event:?}");
6506 }
6507
6508 // Allow writing to the git index to succeed again.
6509 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
6510
6511 // Stage two hunks with separate operations.
6512 uncommitted_diff.update(cx, |diff, cx| {
6513 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6514 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
6515 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
6516 });
6517
6518 // Both staged hunks appear as pending.
6519 uncommitted_diff.update(cx, |diff, cx| {
6520 assert_hunks(
6521 diff.hunks(&snapshot, cx),
6522 &snapshot,
6523 &diff.base_text_string().unwrap(),
6524 &[
6525 (
6526 0..0,
6527 "zero\n",
6528 "",
6529 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6530 ),
6531 (
6532 1..2,
6533 "two\n",
6534 "TWO\n",
6535 DiffHunkStatus::modified(NoSecondaryHunk),
6536 ),
6537 (
6538 3..4,
6539 "four\n",
6540 "FOUR\n",
6541 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6542 ),
6543 ],
6544 );
6545 });
6546
6547 // Both staging operations take effect.
6548 cx.run_until_parked();
6549 uncommitted_diff.update(cx, |diff, cx| {
6550 assert_hunks(
6551 diff.hunks(&snapshot, cx),
6552 &snapshot,
6553 &diff.base_text_string().unwrap(),
6554 &[
6555 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6556 (
6557 1..2,
6558 "two\n",
6559 "TWO\n",
6560 DiffHunkStatus::modified(NoSecondaryHunk),
6561 ),
6562 (
6563 3..4,
6564 "four\n",
6565 "FOUR\n",
6566 DiffHunkStatus::modified(NoSecondaryHunk),
6567 ),
6568 ],
6569 );
6570 });
6571}
6572
6573#[gpui::test(seeds(340, 472))]
6574async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
6575 use DiffHunkSecondaryStatus::*;
6576 init_test(cx);
6577
6578 let committed_contents = r#"
6579 zero
6580 one
6581 two
6582 three
6583 four
6584 five
6585 "#
6586 .unindent();
6587 let file_contents = r#"
6588 one
6589 TWO
6590 three
6591 FOUR
6592 five
6593 "#
6594 .unindent();
6595
6596 let fs = FakeFs::new(cx.background_executor.clone());
6597 fs.insert_tree(
6598 "/dir",
6599 json!({
6600 ".git": {},
6601 "file.txt": file_contents.clone()
6602 }),
6603 )
6604 .await;
6605
6606 fs.set_head_for_repo(
6607 "/dir/.git".as_ref(),
6608 &[("file.txt".into(), committed_contents.clone())],
6609 );
6610 fs.set_index_for_repo(
6611 "/dir/.git".as_ref(),
6612 &[("file.txt".into(), committed_contents.clone())],
6613 );
6614
6615 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6616
6617 let buffer = project
6618 .update(cx, |project, cx| {
6619 project.open_local_buffer("/dir/file.txt", cx)
6620 })
6621 .await
6622 .unwrap();
6623 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6624 let uncommitted_diff = project
6625 .update(cx, |project, cx| {
6626 project.open_uncommitted_diff(buffer.clone(), cx)
6627 })
6628 .await
6629 .unwrap();
6630
6631 // The hunks are initially unstaged.
6632 uncommitted_diff.read_with(cx, |diff, cx| {
6633 assert_hunks(
6634 diff.hunks(&snapshot, cx),
6635 &snapshot,
6636 &diff.base_text_string().unwrap(),
6637 &[
6638 (
6639 0..0,
6640 "zero\n",
6641 "",
6642 DiffHunkStatus::deleted(HasSecondaryHunk),
6643 ),
6644 (
6645 1..2,
6646 "two\n",
6647 "TWO\n",
6648 DiffHunkStatus::modified(HasSecondaryHunk),
6649 ),
6650 (
6651 3..4,
6652 "four\n",
6653 "FOUR\n",
6654 DiffHunkStatus::modified(HasSecondaryHunk),
6655 ),
6656 ],
6657 );
6658 });
6659
6660 // Pause IO events
6661 fs.pause_events();
6662
6663 // Stage the first hunk.
6664 uncommitted_diff.update(cx, |diff, cx| {
6665 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
6666 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6667 assert_hunks(
6668 diff.hunks(&snapshot, cx),
6669 &snapshot,
6670 &diff.base_text_string().unwrap(),
6671 &[
6672 (
6673 0..0,
6674 "zero\n",
6675 "",
6676 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6677 ),
6678 (
6679 1..2,
6680 "two\n",
6681 "TWO\n",
6682 DiffHunkStatus::modified(HasSecondaryHunk),
6683 ),
6684 (
6685 3..4,
6686 "four\n",
6687 "FOUR\n",
6688 DiffHunkStatus::modified(HasSecondaryHunk),
6689 ),
6690 ],
6691 );
6692 });
6693
6694 // Stage the second hunk *before* receiving the FS event for the first hunk.
6695 cx.run_until_parked();
6696 uncommitted_diff.update(cx, |diff, cx| {
6697 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
6698 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6699 assert_hunks(
6700 diff.hunks(&snapshot, cx),
6701 &snapshot,
6702 &diff.base_text_string().unwrap(),
6703 &[
6704 (
6705 0..0,
6706 "zero\n",
6707 "",
6708 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
6709 ),
6710 (
6711 1..2,
6712 "two\n",
6713 "TWO\n",
6714 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
6715 ),
6716 (
6717 3..4,
6718 "four\n",
6719 "FOUR\n",
6720 DiffHunkStatus::modified(HasSecondaryHunk),
6721 ),
6722 ],
6723 );
6724 });
6725
6726 // Process the FS event for staging the first hunk (second event is still pending).
6727 fs.flush_events(1);
6728 cx.run_until_parked();
6729
6730 // Stage the third hunk before receiving the second FS event.
6731 uncommitted_diff.update(cx, |diff, cx| {
6732 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
6733 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6734 });
6735
6736 // Wait for all remaining IO.
6737 cx.run_until_parked();
6738 fs.flush_events(fs.buffered_event_count());
6739
6740 // Now all hunks are staged.
6741 cx.run_until_parked();
6742 uncommitted_diff.update(cx, |diff, cx| {
6743 assert_hunks(
6744 diff.hunks(&snapshot, cx),
6745 &snapshot,
6746 &diff.base_text_string().unwrap(),
6747 &[
6748 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
6749 (
6750 1..2,
6751 "two\n",
6752 "TWO\n",
6753 DiffHunkStatus::modified(NoSecondaryHunk),
6754 ),
6755 (
6756 3..4,
6757 "four\n",
6758 "FOUR\n",
6759 DiffHunkStatus::modified(NoSecondaryHunk),
6760 ),
6761 ],
6762 );
6763 });
6764}
6765
6766#[gpui::test]
6767async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
6768 use DiffHunkSecondaryStatus::*;
6769 init_test(cx);
6770
6771 let different_lines = (0..500)
6772 .step_by(5)
6773 .map(|i| format!("diff {}\n", i))
6774 .collect::<Vec<String>>();
6775 let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
6776 let file_contents = (0..500)
6777 .map(|i| {
6778 if i % 5 == 0 {
6779 different_lines[i / 5].clone()
6780 } else {
6781 format!("{}\n", i)
6782 }
6783 })
6784 .collect::<String>();
6785
6786 let fs = FakeFs::new(cx.background_executor.clone());
6787 fs.insert_tree(
6788 "/dir",
6789 json!({
6790 ".git": {},
6791 "file.txt": file_contents.clone()
6792 }),
6793 )
6794 .await;
6795
6796 fs.set_head_for_repo(
6797 "/dir/.git".as_ref(),
6798 &[("file.txt".into(), committed_contents.clone())],
6799 );
6800 fs.set_index_for_repo(
6801 "/dir/.git".as_ref(),
6802 &[("file.txt".into(), committed_contents.clone())],
6803 );
6804
6805 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6806
6807 let buffer = project
6808 .update(cx, |project, cx| {
6809 project.open_local_buffer("/dir/file.txt", cx)
6810 })
6811 .await
6812 .unwrap();
6813 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
6814 let uncommitted_diff = project
6815 .update(cx, |project, cx| {
6816 project.open_uncommitted_diff(buffer.clone(), cx)
6817 })
6818 .await
6819 .unwrap();
6820
6821 let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
6822 .step_by(5)
6823 .map(|i| {
6824 (
6825 i as u32..i as u32 + 1,
6826 format!("{}\n", i),
6827 different_lines[i / 5].clone(),
6828 DiffHunkStatus::modified(HasSecondaryHunk),
6829 )
6830 })
6831 .collect();
6832
6833 // The hunks are initially unstaged
6834 uncommitted_diff.read_with(cx, |diff, cx| {
6835 assert_hunks(
6836 diff.hunks(&snapshot, cx),
6837 &snapshot,
6838 &diff.base_text_string().unwrap(),
6839 &expected_hunks,
6840 );
6841 });
6842
6843 for (_, _, _, status) in expected_hunks.iter_mut() {
6844 *status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
6845 }
6846
6847 // Stage every hunk with a different call
6848 uncommitted_diff.update(cx, |diff, cx| {
6849 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6850 for hunk in hunks {
6851 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
6852 }
6853
6854 assert_hunks(
6855 diff.hunks(&snapshot, cx),
6856 &snapshot,
6857 &diff.base_text_string().unwrap(),
6858 &expected_hunks,
6859 );
6860 });
6861
6862 // If we wait, we'll have no pending hunks
6863 cx.run_until_parked();
6864 for (_, _, _, status) in expected_hunks.iter_mut() {
6865 *status = DiffHunkStatus::modified(NoSecondaryHunk);
6866 }
6867
6868 uncommitted_diff.update(cx, |diff, cx| {
6869 assert_hunks(
6870 diff.hunks(&snapshot, cx),
6871 &snapshot,
6872 &diff.base_text_string().unwrap(),
6873 &expected_hunks,
6874 );
6875 });
6876
6877 for (_, _, _, status) in expected_hunks.iter_mut() {
6878 *status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
6879 }
6880
6881 // Unstage every hunk with a different call
6882 uncommitted_diff.update(cx, |diff, cx| {
6883 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
6884 for hunk in hunks {
6885 diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
6886 }
6887
6888 assert_hunks(
6889 diff.hunks(&snapshot, cx),
6890 &snapshot,
6891 &diff.base_text_string().unwrap(),
6892 &expected_hunks,
6893 );
6894 });
6895
6896 // If we wait, we'll have no pending hunks, again
6897 cx.run_until_parked();
6898 for (_, _, _, status) in expected_hunks.iter_mut() {
6899 *status = DiffHunkStatus::modified(HasSecondaryHunk);
6900 }
6901
6902 uncommitted_diff.update(cx, |diff, cx| {
6903 assert_hunks(
6904 diff.hunks(&snapshot, cx),
6905 &snapshot,
6906 &diff.base_text_string().unwrap(),
6907 &expected_hunks,
6908 );
6909 });
6910}
6911
6912#[gpui::test]
6913async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
6914 init_test(cx);
6915
6916 let committed_contents = r#"
6917 fn main() {
6918 println!("hello from HEAD");
6919 }
6920 "#
6921 .unindent();
6922 let file_contents = r#"
6923 fn main() {
6924 println!("hello from the working copy");
6925 }
6926 "#
6927 .unindent();
6928
6929 let fs = FakeFs::new(cx.background_executor.clone());
6930 fs.insert_tree(
6931 "/dir",
6932 json!({
6933 ".git": {},
6934 "src": {
6935 "main.rs": file_contents,
6936 }
6937 }),
6938 )
6939 .await;
6940
6941 fs.set_head_for_repo(
6942 Path::new("/dir/.git"),
6943 &[("src/main.rs".into(), committed_contents.clone())],
6944 );
6945 fs.set_index_for_repo(
6946 Path::new("/dir/.git"),
6947 &[("src/main.rs".into(), committed_contents.clone())],
6948 );
6949
6950 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
6951
6952 let buffer = project
6953 .update(cx, |project, cx| {
6954 project.open_local_buffer("/dir/src/main.rs", cx)
6955 })
6956 .await
6957 .unwrap();
6958 let uncommitted_diff = project
6959 .update(cx, |project, cx| {
6960 project.open_uncommitted_diff(buffer.clone(), cx)
6961 })
6962 .await
6963 .unwrap();
6964
6965 cx.run_until_parked();
6966 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
6967 let snapshot = buffer.read(cx).snapshot();
6968 assert_hunks(
6969 uncommitted_diff.hunks(&snapshot, cx),
6970 &snapshot,
6971 &uncommitted_diff.base_text_string().unwrap(),
6972 &[(
6973 1..2,
6974 " println!(\"hello from HEAD\");\n",
6975 " println!(\"hello from the working copy\");\n",
6976 DiffHunkStatus {
6977 kind: DiffHunkStatusKind::Modified,
6978 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
6979 },
6980 )],
6981 );
6982 });
6983}
6984
6985#[gpui::test]
6986async fn test_repository_and_path_for_project_path(
6987 background_executor: BackgroundExecutor,
6988 cx: &mut gpui::TestAppContext,
6989) {
6990 init_test(cx);
6991 let fs = FakeFs::new(background_executor);
6992 fs.insert_tree(
6993 path!("/root"),
6994 json!({
6995 "c.txt": "",
6996 "dir1": {
6997 ".git": {},
6998 "deps": {
6999 "dep1": {
7000 ".git": {},
7001 "src": {
7002 "a.txt": ""
7003 }
7004 }
7005 },
7006 "src": {
7007 "b.txt": ""
7008 }
7009 },
7010 }),
7011 )
7012 .await;
7013
7014 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7015 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7016 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7017 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7018 .await;
7019 tree.flush_fs_events(cx).await;
7020
7021 project.read_with(cx, |project, cx| {
7022 let git_store = project.git_store().read(cx);
7023 let pairs = [
7024 ("c.txt", None),
7025 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7026 (
7027 "dir1/deps/dep1/src/a.txt",
7028 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7029 ),
7030 ];
7031 let expected = pairs
7032 .iter()
7033 .map(|(path, result)| {
7034 (
7035 path,
7036 result.map(|(repo, repo_path)| {
7037 (Path::new(repo).to_owned(), RepoPath::from(repo_path))
7038 }),
7039 )
7040 })
7041 .collect::<Vec<_>>();
7042 let actual = pairs
7043 .iter()
7044 .map(|(path, _)| {
7045 let project_path = (tree_id, Path::new(path)).into();
7046 let result = maybe!({
7047 let (repo, repo_path) =
7048 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7049 Some((
7050 repo.read(cx)
7051 .repository_entry
7052 .work_directory_abs_path
7053 .clone(),
7054 repo_path,
7055 ))
7056 });
7057 (path, result)
7058 })
7059 .collect::<Vec<_>>();
7060 pretty_assertions::assert_eq!(expected, actual);
7061 });
7062
7063 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7064 .await
7065 .unwrap();
7066 tree.flush_fs_events(cx).await;
7067
7068 project.read_with(cx, |project, cx| {
7069 let git_store = project.git_store().read(cx);
7070 assert_eq!(
7071 git_store.repository_and_path_for_project_path(
7072 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7073 cx
7074 ),
7075 None
7076 );
7077 });
7078}
7079
7080#[gpui::test]
7081async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7082 init_test(cx);
7083 let fs = FakeFs::new(cx.background_executor.clone());
7084 fs.insert_tree(
7085 path!("/root"),
7086 json!({
7087 "home": {
7088 ".git": {},
7089 "project": {
7090 "a.txt": "A"
7091 },
7092 },
7093 }),
7094 )
7095 .await;
7096 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7097
7098 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7099 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7100 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7101 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7102 .await;
7103 tree.flush_fs_events(cx).await;
7104
7105 project.read_with(cx, |project, cx| {
7106 let containing = project
7107 .git_store()
7108 .read(cx)
7109 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7110 assert!(containing.is_none());
7111 });
7112
7113 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7114 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7115 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7116 tree.read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
7117 .await;
7118 tree.flush_fs_events(cx).await;
7119
7120 project.read_with(cx, |project, cx| {
7121 let containing = project
7122 .git_store()
7123 .read(cx)
7124 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7125 assert_eq!(
7126 containing
7127 .unwrap()
7128 .0
7129 .read(cx)
7130 .repository_entry
7131 .work_directory_abs_path,
7132 Path::new(path!("/root/home"))
7133 );
7134 });
7135}
7136
7137async fn search(
7138 project: &Entity<Project>,
7139 query: SearchQuery,
7140 cx: &mut gpui::TestAppContext,
7141) -> Result<HashMap<String, Vec<Range<usize>>>> {
7142 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
7143 let mut results = HashMap::default();
7144 while let Ok(search_result) = search_rx.recv().await {
7145 match search_result {
7146 SearchResult::Buffer { buffer, ranges } => {
7147 results.entry(buffer).or_insert(ranges);
7148 }
7149 SearchResult::LimitReached => {}
7150 }
7151 }
7152 Ok(results
7153 .into_iter()
7154 .map(|(buffer, ranges)| {
7155 buffer.update(cx, |buffer, cx| {
7156 let path = buffer
7157 .file()
7158 .unwrap()
7159 .full_path(cx)
7160 .to_string_lossy()
7161 .to_string();
7162 let ranges = ranges
7163 .into_iter()
7164 .map(|range| range.to_offset(buffer))
7165 .collect::<Vec<_>>();
7166 (path, ranges)
7167 })
7168 })
7169 .collect())
7170}
7171
7172pub fn init_test(cx: &mut gpui::TestAppContext) {
7173 if std::env::var("RUST_LOG").is_ok() {
7174 env_logger::try_init().ok();
7175 }
7176
7177 cx.update(|cx| {
7178 let settings_store = SettingsStore::test(cx);
7179 cx.set_global(settings_store);
7180 release_channel::init(SemanticVersion::default(), cx);
7181 language::init(cx);
7182 Project::init_settings(cx);
7183 });
7184}
7185
7186fn json_lang() -> Arc<Language> {
7187 Arc::new(Language::new(
7188 LanguageConfig {
7189 name: "JSON".into(),
7190 matcher: LanguageMatcher {
7191 path_suffixes: vec!["json".to_string()],
7192 ..Default::default()
7193 },
7194 ..Default::default()
7195 },
7196 None,
7197 ))
7198}
7199
7200fn js_lang() -> Arc<Language> {
7201 Arc::new(Language::new(
7202 LanguageConfig {
7203 name: "JavaScript".into(),
7204 matcher: LanguageMatcher {
7205 path_suffixes: vec!["js".to_string()],
7206 ..Default::default()
7207 },
7208 ..Default::default()
7209 },
7210 None,
7211 ))
7212}
7213
7214fn rust_lang() -> Arc<Language> {
7215 Arc::new(Language::new(
7216 LanguageConfig {
7217 name: "Rust".into(),
7218 matcher: LanguageMatcher {
7219 path_suffixes: vec!["rs".to_string()],
7220 ..Default::default()
7221 },
7222 ..Default::default()
7223 },
7224 Some(tree_sitter_rust::LANGUAGE.into()),
7225 ))
7226}
7227
7228fn typescript_lang() -> Arc<Language> {
7229 Arc::new(Language::new(
7230 LanguageConfig {
7231 name: "TypeScript".into(),
7232 matcher: LanguageMatcher {
7233 path_suffixes: vec!["ts".to_string()],
7234 ..Default::default()
7235 },
7236 ..Default::default()
7237 },
7238 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
7239 ))
7240}
7241
7242fn tsx_lang() -> Arc<Language> {
7243 Arc::new(Language::new(
7244 LanguageConfig {
7245 name: "tsx".into(),
7246 matcher: LanguageMatcher {
7247 path_suffixes: vec!["tsx".to_string()],
7248 ..Default::default()
7249 },
7250 ..Default::default()
7251 },
7252 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
7253 ))
7254}
7255
7256fn get_all_tasks(
7257 project: &Entity<Project>,
7258 task_contexts: &TaskContexts,
7259 cx: &mut App,
7260) -> Vec<(TaskSourceKind, ResolvedTask)> {
7261 let (mut old, new) = project.update(cx, |project, cx| {
7262 project
7263 .task_store
7264 .read(cx)
7265 .task_inventory()
7266 .unwrap()
7267 .read(cx)
7268 .used_and_current_resolved_tasks(task_contexts, cx)
7269 });
7270 old.extend(new);
7271 old
7272}