1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use encodings::Encoding;
16use fs::FakeFs;
17use futures::{StreamExt, future};
18use git::{
19 GitHostingProviderRegistry,
20 repository::{RepoPath, repo_path},
21 status::{StatusCode, TrackedStatus},
22};
23use git2::RepositoryInitOptions;
24use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
25use itertools::Itertools;
26use language::{
27 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
28 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
29 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
30 ToolchainLister,
31 language_settings::{LanguageSettingsContent, language_settings},
32 tree_sitter_rust, tree_sitter_typescript,
33};
34use lsp::{
35 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
36 Uri, WillRenameFiles, notification::DidRenameFiles,
37};
38use parking_lot::Mutex;
39use paths::{config_dir, global_gitignore_path, tasks_file};
40use postage::stream::Stream as _;
41use pretty_assertions::{assert_eq, assert_matches};
42use rand::{Rng as _, rngs::StdRng};
43use serde_json::json;
44#[cfg(not(windows))]
45use std::os;
46use std::{
47 env, mem,
48 num::NonZeroU32,
49 ops::Range,
50 str::FromStr,
51 sync::{Arc, OnceLock},
52 task::Poll,
53};
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let fs = FakeFs::new(cx.executor());
1216 fs.insert_tree(
1217 path!("/the-root"),
1218 json!({
1219 ".gitignore": "target\n",
1220 "Cargo.lock": "",
1221 "src": {
1222 "a.rs": "",
1223 "b.rs": "",
1224 },
1225 "target": {
1226 "x": {
1227 "out": {
1228 "x.rs": ""
1229 }
1230 },
1231 "y": {
1232 "out": {
1233 "y.rs": "",
1234 }
1235 },
1236 "z": {
1237 "out": {
1238 "z.rs": ""
1239 }
1240 }
1241 }
1242 }),
1243 )
1244 .await;
1245 fs.insert_tree(
1246 path!("/the-registry"),
1247 json!({
1248 "dep1": {
1249 "src": {
1250 "dep1.rs": "",
1251 }
1252 },
1253 "dep2": {
1254 "src": {
1255 "dep2.rs": "",
1256 }
1257 },
1258 }),
1259 )
1260 .await;
1261 fs.insert_tree(
1262 path!("/the/stdlib"),
1263 json!({
1264 "LICENSE": "",
1265 "src": {
1266 "string.rs": "",
1267 }
1268 }),
1269 )
1270 .await;
1271
1272 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1273 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1274 (project.languages().clone(), project.lsp_store())
1275 });
1276 language_registry.add(rust_lang());
1277 let mut fake_servers = language_registry.register_fake_lsp(
1278 "Rust",
1279 FakeLspAdapter {
1280 name: "the-language-server",
1281 ..Default::default()
1282 },
1283 );
1284
1285 cx.executor().run_until_parked();
1286
1287 // Start the language server by opening a buffer with a compatible file extension.
1288 project
1289 .update(cx, |project, cx| {
1290 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1291 })
1292 .await
1293 .unwrap();
1294
1295 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1296 project.update(cx, |project, cx| {
1297 let worktree = project.worktrees(cx).next().unwrap();
1298 assert_eq!(
1299 worktree
1300 .read(cx)
1301 .snapshot()
1302 .entries(true, 0)
1303 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1304 .collect::<Vec<_>>(),
1305 &[
1306 ("", false),
1307 (".gitignore", false),
1308 ("Cargo.lock", false),
1309 ("src", false),
1310 ("src/a.rs", false),
1311 ("src/b.rs", false),
1312 ("target", true),
1313 ]
1314 );
1315 });
1316
1317 let prev_read_dir_count = fs.read_dir_call_count();
1318
1319 let fake_server = fake_servers.next().await.unwrap();
1320 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1321 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1322 id
1323 });
1324
1325 // Simulate jumping to a definition in a dependency outside of the worktree.
1326 let _out_of_worktree_buffer = project
1327 .update(cx, |project, cx| {
1328 project.open_local_buffer_via_lsp(
1329 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1330 server_id,
1331 cx,
1332 )
1333 })
1334 .await
1335 .unwrap();
1336
1337 // Keep track of the FS events reported to the language server.
1338 let file_changes = Arc::new(Mutex::new(Vec::new()));
1339 fake_server
1340 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1341 registrations: vec![lsp::Registration {
1342 id: Default::default(),
1343 method: "workspace/didChangeWatchedFiles".to_string(),
1344 register_options: serde_json::to_value(
1345 lsp::DidChangeWatchedFilesRegistrationOptions {
1346 watchers: vec![
1347 lsp::FileSystemWatcher {
1348 glob_pattern: lsp::GlobPattern::String(
1349 path!("/the-root/Cargo.toml").to_string(),
1350 ),
1351 kind: None,
1352 },
1353 lsp::FileSystemWatcher {
1354 glob_pattern: lsp::GlobPattern::String(
1355 path!("/the-root/src/*.{rs,c}").to_string(),
1356 ),
1357 kind: None,
1358 },
1359 lsp::FileSystemWatcher {
1360 glob_pattern: lsp::GlobPattern::String(
1361 path!("/the-root/target/y/**/*.rs").to_string(),
1362 ),
1363 kind: None,
1364 },
1365 lsp::FileSystemWatcher {
1366 glob_pattern: lsp::GlobPattern::String(
1367 path!("/the/stdlib/src/**/*.rs").to_string(),
1368 ),
1369 kind: None,
1370 },
1371 lsp::FileSystemWatcher {
1372 glob_pattern: lsp::GlobPattern::String(
1373 path!("**/Cargo.lock").to_string(),
1374 ),
1375 kind: None,
1376 },
1377 ],
1378 },
1379 )
1380 .ok(),
1381 }],
1382 })
1383 .await
1384 .into_response()
1385 .unwrap();
1386 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1387 let file_changes = file_changes.clone();
1388 move |params, _| {
1389 let mut file_changes = file_changes.lock();
1390 file_changes.extend(params.changes);
1391 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1392 }
1393 });
1394
1395 cx.executor().run_until_parked();
1396 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1397 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1398
1399 let mut new_watched_paths = fs.watched_paths();
1400 new_watched_paths.retain(|path| {
1401 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1402 });
1403 assert_eq!(
1404 &new_watched_paths,
1405 &[
1406 Path::new(path!("/the-root")),
1407 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1408 Path::new(path!("/the/stdlib/src"))
1409 ]
1410 );
1411
1412 // Now the language server has asked us to watch an ignored directory path,
1413 // so we recursively load it.
1414 project.update(cx, |project, cx| {
1415 let worktree = project.visible_worktrees(cx).next().unwrap();
1416 assert_eq!(
1417 worktree
1418 .read(cx)
1419 .snapshot()
1420 .entries(true, 0)
1421 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1422 .collect::<Vec<_>>(),
1423 &[
1424 ("", false),
1425 (".gitignore", false),
1426 ("Cargo.lock", false),
1427 ("src", false),
1428 ("src/a.rs", false),
1429 ("src/b.rs", false),
1430 ("target", true),
1431 ("target/x", true),
1432 ("target/y", true),
1433 ("target/y/out", true),
1434 ("target/y/out/y.rs", true),
1435 ("target/z", true),
1436 ]
1437 );
1438 });
1439
1440 // Perform some file system mutations, two of which match the watched patterns,
1441 // and one of which does not.
1442 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1443 .await
1444 .unwrap();
1445 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1446 .await
1447 .unwrap();
1448 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1449 .await
1450 .unwrap();
1451 fs.create_file(
1452 path!("/the-root/target/x/out/x2.rs").as_ref(),
1453 Default::default(),
1454 )
1455 .await
1456 .unwrap();
1457 fs.create_file(
1458 path!("/the-root/target/y/out/y2.rs").as_ref(),
1459 Default::default(),
1460 )
1461 .await
1462 .unwrap();
1463
1464 let encoding = Encoding::default();
1465
1466 fs.save(
1467 path!("/the-root/Cargo.lock").as_ref(),
1468 &Rope::default(),
1469 Default::default(),
1470 encoding.clone(),
1471 )
1472 .await
1473 .unwrap();
1474 fs.save(
1475 path!("/the-stdlib/LICENSE").as_ref(),
1476 &Rope::default(),
1477 Default::default(),
1478 encoding.clone(),
1479 )
1480 .await
1481 .unwrap();
1482 fs.save(
1483 path!("/the/stdlib/src/string.rs").as_ref(),
1484 &Rope::default(),
1485 Default::default(),
1486 encoding,
1487 )
1488 .await
1489 .unwrap();
1490
1491 // The language server receives events for the FS mutations that match its watch patterns.
1492 cx.executor().run_until_parked();
1493 assert_eq!(
1494 &*file_changes.lock(),
1495 &[
1496 lsp::FileEvent {
1497 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1498 typ: lsp::FileChangeType::CHANGED,
1499 },
1500 lsp::FileEvent {
1501 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1502 typ: lsp::FileChangeType::DELETED,
1503 },
1504 lsp::FileEvent {
1505 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1506 typ: lsp::FileChangeType::CREATED,
1507 },
1508 lsp::FileEvent {
1509 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1510 typ: lsp::FileChangeType::CREATED,
1511 },
1512 lsp::FileEvent {
1513 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1514 typ: lsp::FileChangeType::CHANGED,
1515 },
1516 ]
1517 );
1518}
1519
1520#[gpui::test]
1521async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1522 init_test(cx);
1523
1524 let fs = FakeFs::new(cx.executor());
1525 fs.insert_tree(
1526 path!("/dir"),
1527 json!({
1528 "a.rs": "let a = 1;",
1529 "b.rs": "let b = 2;"
1530 }),
1531 )
1532 .await;
1533
1534 let project = Project::test(
1535 fs,
1536 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1537 cx,
1538 )
1539 .await;
1540 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1541
1542 let buffer_a = project
1543 .update(cx, |project, cx| {
1544 project.open_local_buffer(path!("/dir/a.rs"), cx)
1545 })
1546 .await
1547 .unwrap();
1548 let buffer_b = project
1549 .update(cx, |project, cx| {
1550 project.open_local_buffer(path!("/dir/b.rs"), cx)
1551 })
1552 .await
1553 .unwrap();
1554
1555 lsp_store.update(cx, |lsp_store, cx| {
1556 lsp_store
1557 .update_diagnostics(
1558 LanguageServerId(0),
1559 lsp::PublishDiagnosticsParams {
1560 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1561 version: None,
1562 diagnostics: vec![lsp::Diagnostic {
1563 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1564 severity: Some(lsp::DiagnosticSeverity::ERROR),
1565 message: "error 1".to_string(),
1566 ..Default::default()
1567 }],
1568 },
1569 None,
1570 DiagnosticSourceKind::Pushed,
1571 &[],
1572 cx,
1573 )
1574 .unwrap();
1575 lsp_store
1576 .update_diagnostics(
1577 LanguageServerId(0),
1578 lsp::PublishDiagnosticsParams {
1579 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1580 version: None,
1581 diagnostics: vec![lsp::Diagnostic {
1582 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1583 severity: Some(DiagnosticSeverity::WARNING),
1584 message: "error 2".to_string(),
1585 ..Default::default()
1586 }],
1587 },
1588 None,
1589 DiagnosticSourceKind::Pushed,
1590 &[],
1591 cx,
1592 )
1593 .unwrap();
1594 });
1595
1596 buffer_a.update(cx, |buffer, _| {
1597 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1598 assert_eq!(
1599 chunks
1600 .iter()
1601 .map(|(s, d)| (s.as_str(), *d))
1602 .collect::<Vec<_>>(),
1603 &[
1604 ("let ", None),
1605 ("a", Some(DiagnosticSeverity::ERROR)),
1606 (" = 1;", None),
1607 ]
1608 );
1609 });
1610 buffer_b.update(cx, |buffer, _| {
1611 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1612 assert_eq!(
1613 chunks
1614 .iter()
1615 .map(|(s, d)| (s.as_str(), *d))
1616 .collect::<Vec<_>>(),
1617 &[
1618 ("let ", None),
1619 ("b", Some(DiagnosticSeverity::WARNING)),
1620 (" = 2;", None),
1621 ]
1622 );
1623 });
1624}
1625
1626#[gpui::test]
1627async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1628 init_test(cx);
1629
1630 let fs = FakeFs::new(cx.executor());
1631 fs.insert_tree(
1632 path!("/root"),
1633 json!({
1634 "dir": {
1635 ".git": {
1636 "HEAD": "ref: refs/heads/main",
1637 },
1638 ".gitignore": "b.rs",
1639 "a.rs": "let a = 1;",
1640 "b.rs": "let b = 2;",
1641 },
1642 "other.rs": "let b = c;"
1643 }),
1644 )
1645 .await;
1646
1647 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1648 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1649 let (worktree, _) = project
1650 .update(cx, |project, cx| {
1651 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1652 })
1653 .await
1654 .unwrap();
1655 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1656
1657 let (worktree, _) = project
1658 .update(cx, |project, cx| {
1659 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1660 })
1661 .await
1662 .unwrap();
1663 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1664
1665 let server_id = LanguageServerId(0);
1666 lsp_store.update(cx, |lsp_store, cx| {
1667 lsp_store
1668 .update_diagnostics(
1669 server_id,
1670 lsp::PublishDiagnosticsParams {
1671 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1672 version: None,
1673 diagnostics: vec![lsp::Diagnostic {
1674 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1675 severity: Some(lsp::DiagnosticSeverity::ERROR),
1676 message: "unused variable 'b'".to_string(),
1677 ..Default::default()
1678 }],
1679 },
1680 None,
1681 DiagnosticSourceKind::Pushed,
1682 &[],
1683 cx,
1684 )
1685 .unwrap();
1686 lsp_store
1687 .update_diagnostics(
1688 server_id,
1689 lsp::PublishDiagnosticsParams {
1690 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1691 version: None,
1692 diagnostics: vec![lsp::Diagnostic {
1693 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1694 severity: Some(lsp::DiagnosticSeverity::ERROR),
1695 message: "unknown variable 'c'".to_string(),
1696 ..Default::default()
1697 }],
1698 },
1699 None,
1700 DiagnosticSourceKind::Pushed,
1701 &[],
1702 cx,
1703 )
1704 .unwrap();
1705 });
1706
1707 let main_ignored_buffer = project
1708 .update(cx, |project, cx| {
1709 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1710 })
1711 .await
1712 .unwrap();
1713 main_ignored_buffer.update(cx, |buffer, _| {
1714 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1715 assert_eq!(
1716 chunks
1717 .iter()
1718 .map(|(s, d)| (s.as_str(), *d))
1719 .collect::<Vec<_>>(),
1720 &[
1721 ("let ", None),
1722 ("b", Some(DiagnosticSeverity::ERROR)),
1723 (" = 2;", None),
1724 ],
1725 "Gigitnored buffers should still get in-buffer diagnostics",
1726 );
1727 });
1728 let other_buffer = project
1729 .update(cx, |project, cx| {
1730 project.open_buffer((other_worktree_id, rel_path("")), cx)
1731 })
1732 .await
1733 .unwrap();
1734 other_buffer.update(cx, |buffer, _| {
1735 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1736 assert_eq!(
1737 chunks
1738 .iter()
1739 .map(|(s, d)| (s.as_str(), *d))
1740 .collect::<Vec<_>>(),
1741 &[
1742 ("let b = ", None),
1743 ("c", Some(DiagnosticSeverity::ERROR)),
1744 (";", None),
1745 ],
1746 "Buffers from hidden projects should still get in-buffer diagnostics"
1747 );
1748 });
1749
1750 project.update(cx, |project, cx| {
1751 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1752 assert_eq!(
1753 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1754 vec![(
1755 ProjectPath {
1756 worktree_id: main_worktree_id,
1757 path: rel_path("b.rs").into(),
1758 },
1759 server_id,
1760 DiagnosticSummary {
1761 error_count: 1,
1762 warning_count: 0,
1763 }
1764 )]
1765 );
1766 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1767 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1768 });
1769}
1770
1771#[gpui::test]
1772async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1773 init_test(cx);
1774
1775 let progress_token = "the-progress-token";
1776
1777 let fs = FakeFs::new(cx.executor());
1778 fs.insert_tree(
1779 path!("/dir"),
1780 json!({
1781 "a.rs": "fn a() { A }",
1782 "b.rs": "const y: i32 = 1",
1783 }),
1784 )
1785 .await;
1786
1787 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1788 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1789
1790 language_registry.add(rust_lang());
1791 let mut fake_servers = language_registry.register_fake_lsp(
1792 "Rust",
1793 FakeLspAdapter {
1794 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1795 disk_based_diagnostics_sources: vec!["disk".into()],
1796 ..Default::default()
1797 },
1798 );
1799
1800 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1801
1802 // Cause worktree to start the fake language server
1803 let _ = project
1804 .update(cx, |project, cx| {
1805 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1806 })
1807 .await
1808 .unwrap();
1809
1810 let mut events = cx.events(&project);
1811
1812 let fake_server = fake_servers.next().await.unwrap();
1813 assert_eq!(
1814 events.next().await.unwrap(),
1815 Event::LanguageServerAdded(
1816 LanguageServerId(0),
1817 fake_server.server.name(),
1818 Some(worktree_id)
1819 ),
1820 );
1821
1822 fake_server
1823 .start_progress(format!("{}/0", progress_token))
1824 .await;
1825 assert_eq!(
1826 events.next().await.unwrap(),
1827 Event::RefreshInlayHints(fake_server.server.server_id())
1828 );
1829 assert_eq!(
1830 events.next().await.unwrap(),
1831 Event::DiskBasedDiagnosticsStarted {
1832 language_server_id: LanguageServerId(0),
1833 }
1834 );
1835
1836 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1837 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1838 version: None,
1839 diagnostics: vec![lsp::Diagnostic {
1840 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1841 severity: Some(lsp::DiagnosticSeverity::ERROR),
1842 message: "undefined variable 'A'".to_string(),
1843 ..Default::default()
1844 }],
1845 });
1846 assert_eq!(
1847 events.next().await.unwrap(),
1848 Event::DiagnosticsUpdated {
1849 language_server_id: LanguageServerId(0),
1850 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1851 }
1852 );
1853
1854 fake_server.end_progress(format!("{}/0", progress_token));
1855 assert_eq!(
1856 events.next().await.unwrap(),
1857 Event::DiskBasedDiagnosticsFinished {
1858 language_server_id: LanguageServerId(0)
1859 }
1860 );
1861
1862 let buffer = project
1863 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1864 .await
1865 .unwrap();
1866
1867 buffer.update(cx, |buffer, _| {
1868 let snapshot = buffer.snapshot();
1869 let diagnostics = snapshot
1870 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1871 .collect::<Vec<_>>();
1872 assert_eq!(
1873 diagnostics,
1874 &[DiagnosticEntryRef {
1875 range: Point::new(0, 9)..Point::new(0, 10),
1876 diagnostic: &Diagnostic {
1877 severity: lsp::DiagnosticSeverity::ERROR,
1878 message: "undefined variable 'A'".to_string(),
1879 group_id: 0,
1880 is_primary: true,
1881 source_kind: DiagnosticSourceKind::Pushed,
1882 ..Diagnostic::default()
1883 }
1884 }]
1885 )
1886 });
1887
1888 // Ensure publishing empty diagnostics twice only results in one update event.
1889 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1890 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1891 version: None,
1892 diagnostics: Default::default(),
1893 });
1894 assert_eq!(
1895 events.next().await.unwrap(),
1896 Event::DiagnosticsUpdated {
1897 language_server_id: LanguageServerId(0),
1898 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1899 }
1900 );
1901
1902 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1903 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1904 version: None,
1905 diagnostics: Default::default(),
1906 });
1907 cx.executor().run_until_parked();
1908 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1909}
1910
1911#[gpui::test]
1912async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1913 init_test(cx);
1914
1915 let progress_token = "the-progress-token";
1916
1917 let fs = FakeFs::new(cx.executor());
1918 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1919
1920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1921
1922 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1923 language_registry.add(rust_lang());
1924 let mut fake_servers = language_registry.register_fake_lsp(
1925 "Rust",
1926 FakeLspAdapter {
1927 name: "the-language-server",
1928 disk_based_diagnostics_sources: vec!["disk".into()],
1929 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1930 ..FakeLspAdapter::default()
1931 },
1932 );
1933
1934 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1935
1936 let (buffer, _handle) = project
1937 .update(cx, |project, cx| {
1938 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1939 })
1940 .await
1941 .unwrap();
1942 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1943 // Simulate diagnostics starting to update.
1944 let fake_server = fake_servers.next().await.unwrap();
1945 fake_server.start_progress(progress_token).await;
1946
1947 // Restart the server before the diagnostics finish updating.
1948 project.update(cx, |project, cx| {
1949 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1950 });
1951 let mut events = cx.events(&project);
1952
1953 // Simulate the newly started server sending more diagnostics.
1954 let fake_server = fake_servers.next().await.unwrap();
1955 assert_eq!(
1956 events.next().await.unwrap(),
1957 Event::LanguageServerRemoved(LanguageServerId(0))
1958 );
1959 assert_eq!(
1960 events.next().await.unwrap(),
1961 Event::LanguageServerAdded(
1962 LanguageServerId(1),
1963 fake_server.server.name(),
1964 Some(worktree_id)
1965 )
1966 );
1967 assert_eq!(
1968 events.next().await.unwrap(),
1969 Event::RefreshInlayHints(fake_server.server.server_id())
1970 );
1971 fake_server.start_progress(progress_token).await;
1972 assert_eq!(
1973 events.next().await.unwrap(),
1974 Event::LanguageServerBufferRegistered {
1975 server_id: LanguageServerId(1),
1976 buffer_id,
1977 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1978 name: Some(fake_server.server.name())
1979 }
1980 );
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiskBasedDiagnosticsStarted {
1984 language_server_id: LanguageServerId(1)
1985 }
1986 );
1987 project.update(cx, |project, cx| {
1988 assert_eq!(
1989 project
1990 .language_servers_running_disk_based_diagnostics(cx)
1991 .collect::<Vec<_>>(),
1992 [LanguageServerId(1)]
1993 );
1994 });
1995
1996 // All diagnostics are considered done, despite the old server's diagnostic
1997 // task never completing.
1998 fake_server.end_progress(progress_token);
1999 assert_eq!(
2000 events.next().await.unwrap(),
2001 Event::DiskBasedDiagnosticsFinished {
2002 language_server_id: LanguageServerId(1)
2003 }
2004 );
2005 project.update(cx, |project, cx| {
2006 assert_eq!(
2007 project
2008 .language_servers_running_disk_based_diagnostics(cx)
2009 .collect::<Vec<_>>(),
2010 [] as [language::LanguageServerId; 0]
2011 );
2012 });
2013}
2014
2015#[gpui::test]
2016async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2017 init_test(cx);
2018
2019 let fs = FakeFs::new(cx.executor());
2020 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2021
2022 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2023
2024 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2025 language_registry.add(rust_lang());
2026 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2027
2028 let (buffer, _) = project
2029 .update(cx, |project, cx| {
2030 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2031 })
2032 .await
2033 .unwrap();
2034
2035 // Publish diagnostics
2036 let fake_server = fake_servers.next().await.unwrap();
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: vec![lsp::Diagnostic {
2041 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2042 severity: Some(lsp::DiagnosticSeverity::ERROR),
2043 message: "the message".to_string(),
2044 ..Default::default()
2045 }],
2046 });
2047
2048 cx.executor().run_until_parked();
2049 buffer.update(cx, |buffer, _| {
2050 assert_eq!(
2051 buffer
2052 .snapshot()
2053 .diagnostics_in_range::<_, usize>(0..1, false)
2054 .map(|entry| entry.diagnostic.message.clone())
2055 .collect::<Vec<_>>(),
2056 ["the message".to_string()]
2057 );
2058 });
2059 project.update(cx, |project, cx| {
2060 assert_eq!(
2061 project.diagnostic_summary(false, cx),
2062 DiagnosticSummary {
2063 error_count: 1,
2064 warning_count: 0,
2065 }
2066 );
2067 });
2068
2069 project.update(cx, |project, cx| {
2070 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2071 });
2072
2073 // The diagnostics are cleared.
2074 cx.executor().run_until_parked();
2075 buffer.update(cx, |buffer, _| {
2076 assert_eq!(
2077 buffer
2078 .snapshot()
2079 .diagnostics_in_range::<_, usize>(0..1, false)
2080 .map(|entry| entry.diagnostic.message.clone())
2081 .collect::<Vec<_>>(),
2082 Vec::<String>::new(),
2083 );
2084 });
2085 project.update(cx, |project, cx| {
2086 assert_eq!(
2087 project.diagnostic_summary(false, cx),
2088 DiagnosticSummary {
2089 error_count: 0,
2090 warning_count: 0,
2091 }
2092 );
2093 });
2094}
2095
2096#[gpui::test]
2097async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2098 init_test(cx);
2099
2100 let fs = FakeFs::new(cx.executor());
2101 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2102
2103 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2104 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2105
2106 language_registry.add(rust_lang());
2107 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2108
2109 let (buffer, _handle) = project
2110 .update(cx, |project, cx| {
2111 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2112 })
2113 .await
2114 .unwrap();
2115
2116 // Before restarting the server, report diagnostics with an unknown buffer version.
2117 let fake_server = fake_servers.next().await.unwrap();
2118 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2119 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2120 version: Some(10000),
2121 diagnostics: Vec::new(),
2122 });
2123 cx.executor().run_until_parked();
2124 project.update(cx, |project, cx| {
2125 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2126 });
2127
2128 let mut fake_server = fake_servers.next().await.unwrap();
2129 let notification = fake_server
2130 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2131 .await
2132 .text_document;
2133 assert_eq!(notification.version, 0);
2134}
2135
2136#[gpui::test]
2137async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2138 init_test(cx);
2139
2140 let progress_token = "the-progress-token";
2141
2142 let fs = FakeFs::new(cx.executor());
2143 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2144
2145 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2146
2147 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2148 language_registry.add(rust_lang());
2149 let mut fake_servers = language_registry.register_fake_lsp(
2150 "Rust",
2151 FakeLspAdapter {
2152 name: "the-language-server",
2153 disk_based_diagnostics_sources: vec!["disk".into()],
2154 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2155 ..Default::default()
2156 },
2157 );
2158
2159 let (buffer, _handle) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Simulate diagnostics starting to update.
2167 let mut fake_server = fake_servers.next().await.unwrap();
2168 fake_server
2169 .start_progress_with(
2170 "another-token",
2171 lsp::WorkDoneProgressBegin {
2172 cancellable: Some(false),
2173 ..Default::default()
2174 },
2175 )
2176 .await;
2177 fake_server
2178 .start_progress_with(
2179 progress_token,
2180 lsp::WorkDoneProgressBegin {
2181 cancellable: Some(true),
2182 ..Default::default()
2183 },
2184 )
2185 .await;
2186 cx.executor().run_until_parked();
2187
2188 project.update(cx, |project, cx| {
2189 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2190 });
2191
2192 let cancel_notification = fake_server
2193 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2194 .await;
2195 assert_eq!(
2196 cancel_notification.token,
2197 NumberOrString::String(progress_token.into())
2198 );
2199}
2200
2201#[gpui::test]
2202async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2203 init_test(cx);
2204
2205 let fs = FakeFs::new(cx.executor());
2206 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2207 .await;
2208
2209 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2210 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2211
2212 let mut fake_rust_servers = language_registry.register_fake_lsp(
2213 "Rust",
2214 FakeLspAdapter {
2215 name: "rust-lsp",
2216 ..Default::default()
2217 },
2218 );
2219 let mut fake_js_servers = language_registry.register_fake_lsp(
2220 "JavaScript",
2221 FakeLspAdapter {
2222 name: "js-lsp",
2223 ..Default::default()
2224 },
2225 );
2226 language_registry.add(rust_lang());
2227 language_registry.add(js_lang());
2228
2229 let _rs_buffer = project
2230 .update(cx, |project, cx| {
2231 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2232 })
2233 .await
2234 .unwrap();
2235 let _js_buffer = project
2236 .update(cx, |project, cx| {
2237 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2238 })
2239 .await
2240 .unwrap();
2241
2242 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2243 assert_eq!(
2244 fake_rust_server_1
2245 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2246 .await
2247 .text_document
2248 .uri
2249 .as_str(),
2250 uri!("file:///dir/a.rs")
2251 );
2252
2253 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2254 assert_eq!(
2255 fake_js_server
2256 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2257 .await
2258 .text_document
2259 .uri
2260 .as_str(),
2261 uri!("file:///dir/b.js")
2262 );
2263
2264 // Disable Rust language server, ensuring only that server gets stopped.
2265 cx.update(|cx| {
2266 SettingsStore::update_global(cx, |settings, cx| {
2267 settings.update_user_settings(cx, |settings| {
2268 settings.languages_mut().insert(
2269 "Rust".into(),
2270 LanguageSettingsContent {
2271 enable_language_server: Some(false),
2272 ..Default::default()
2273 },
2274 );
2275 });
2276 })
2277 });
2278 fake_rust_server_1
2279 .receive_notification::<lsp::notification::Exit>()
2280 .await;
2281
2282 // Enable Rust and disable JavaScript language servers, ensuring that the
2283 // former gets started again and that the latter stops.
2284 cx.update(|cx| {
2285 SettingsStore::update_global(cx, |settings, cx| {
2286 settings.update_user_settings(cx, |settings| {
2287 settings.languages_mut().insert(
2288 "Rust".into(),
2289 LanguageSettingsContent {
2290 enable_language_server: Some(true),
2291 ..Default::default()
2292 },
2293 );
2294 settings.languages_mut().insert(
2295 "JavaScript".into(),
2296 LanguageSettingsContent {
2297 enable_language_server: Some(false),
2298 ..Default::default()
2299 },
2300 );
2301 });
2302 })
2303 });
2304 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2305 assert_eq!(
2306 fake_rust_server_2
2307 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2308 .await
2309 .text_document
2310 .uri
2311 .as_str(),
2312 uri!("file:///dir/a.rs")
2313 );
2314 fake_js_server
2315 .receive_notification::<lsp::notification::Exit>()
2316 .await;
2317}
2318
2319#[gpui::test(iterations = 3)]
2320async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2321 init_test(cx);
2322
2323 let text = "
2324 fn a() { A }
2325 fn b() { BB }
2326 fn c() { CCC }
2327 "
2328 .unindent();
2329
2330 let fs = FakeFs::new(cx.executor());
2331 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2332
2333 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2334 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2335
2336 language_registry.add(rust_lang());
2337 let mut fake_servers = language_registry.register_fake_lsp(
2338 "Rust",
2339 FakeLspAdapter {
2340 disk_based_diagnostics_sources: vec!["disk".into()],
2341 ..Default::default()
2342 },
2343 );
2344
2345 let buffer = project
2346 .update(cx, |project, cx| {
2347 project.open_local_buffer(path!("/dir/a.rs"), cx)
2348 })
2349 .await
2350 .unwrap();
2351
2352 let _handle = project.update(cx, |project, cx| {
2353 project.register_buffer_with_language_servers(&buffer, cx)
2354 });
2355
2356 let mut fake_server = fake_servers.next().await.unwrap();
2357 let open_notification = fake_server
2358 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2359 .await;
2360
2361 // Edit the buffer, moving the content down
2362 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2363 let change_notification_1 = fake_server
2364 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2365 .await;
2366 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2367
2368 // Report some diagnostics for the initial version of the buffer
2369 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2370 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2371 version: Some(open_notification.text_document.version),
2372 diagnostics: vec![
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 message: "undefined variable 'A'".to_string(),
2377 source: Some("disk".to_string()),
2378 ..Default::default()
2379 },
2380 lsp::Diagnostic {
2381 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2382 severity: Some(DiagnosticSeverity::ERROR),
2383 message: "undefined variable 'BB'".to_string(),
2384 source: Some("disk".to_string()),
2385 ..Default::default()
2386 },
2387 lsp::Diagnostic {
2388 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2389 severity: Some(DiagnosticSeverity::ERROR),
2390 source: Some("disk".to_string()),
2391 message: "undefined variable 'CCC'".to_string(),
2392 ..Default::default()
2393 },
2394 ],
2395 });
2396
2397 // The diagnostics have moved down since they were created.
2398 cx.executor().run_until_parked();
2399 buffer.update(cx, |buffer, _| {
2400 assert_eq!(
2401 buffer
2402 .snapshot()
2403 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2404 .collect::<Vec<_>>(),
2405 &[
2406 DiagnosticEntry {
2407 range: Point::new(3, 9)..Point::new(3, 11),
2408 diagnostic: Diagnostic {
2409 source: Some("disk".into()),
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "undefined variable 'BB'".to_string(),
2412 is_disk_based: true,
2413 group_id: 1,
2414 is_primary: true,
2415 source_kind: DiagnosticSourceKind::Pushed,
2416 ..Diagnostic::default()
2417 },
2418 },
2419 DiagnosticEntry {
2420 range: Point::new(4, 9)..Point::new(4, 12),
2421 diagnostic: Diagnostic {
2422 source: Some("disk".into()),
2423 severity: DiagnosticSeverity::ERROR,
2424 message: "undefined variable 'CCC'".to_string(),
2425 is_disk_based: true,
2426 group_id: 2,
2427 is_primary: true,
2428 source_kind: DiagnosticSourceKind::Pushed,
2429 ..Diagnostic::default()
2430 }
2431 }
2432 ]
2433 );
2434 assert_eq!(
2435 chunks_with_diagnostics(buffer, 0..buffer.len()),
2436 [
2437 ("\n\nfn a() { ".to_string(), None),
2438 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2439 (" }\nfn b() { ".to_string(), None),
2440 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2441 (" }\nfn c() { ".to_string(), None),
2442 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2443 (" }\n".to_string(), None),
2444 ]
2445 );
2446 assert_eq!(
2447 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2448 [
2449 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2450 (" }\nfn c() { ".to_string(), None),
2451 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2452 ]
2453 );
2454 });
2455
2456 // Ensure overlapping diagnostics are highlighted correctly.
2457 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2458 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2459 version: Some(open_notification.text_document.version),
2460 diagnostics: vec![
2461 lsp::Diagnostic {
2462 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2463 severity: Some(DiagnosticSeverity::ERROR),
2464 message: "undefined variable 'A'".to_string(),
2465 source: Some("disk".to_string()),
2466 ..Default::default()
2467 },
2468 lsp::Diagnostic {
2469 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2470 severity: Some(DiagnosticSeverity::WARNING),
2471 message: "unreachable statement".to_string(),
2472 source: Some("disk".to_string()),
2473 ..Default::default()
2474 },
2475 ],
2476 });
2477
2478 cx.executor().run_until_parked();
2479 buffer.update(cx, |buffer, _| {
2480 assert_eq!(
2481 buffer
2482 .snapshot()
2483 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2484 .collect::<Vec<_>>(),
2485 &[
2486 DiagnosticEntry {
2487 range: Point::new(2, 9)..Point::new(2, 12),
2488 diagnostic: Diagnostic {
2489 source: Some("disk".into()),
2490 severity: DiagnosticSeverity::WARNING,
2491 message: "unreachable statement".to_string(),
2492 is_disk_based: true,
2493 group_id: 4,
2494 is_primary: true,
2495 source_kind: DiagnosticSourceKind::Pushed,
2496 ..Diagnostic::default()
2497 }
2498 },
2499 DiagnosticEntry {
2500 range: Point::new(2, 9)..Point::new(2, 10),
2501 diagnostic: Diagnostic {
2502 source: Some("disk".into()),
2503 severity: DiagnosticSeverity::ERROR,
2504 message: "undefined variable 'A'".to_string(),
2505 is_disk_based: true,
2506 group_id: 3,
2507 is_primary: true,
2508 source_kind: DiagnosticSourceKind::Pushed,
2509 ..Diagnostic::default()
2510 },
2511 }
2512 ]
2513 );
2514 assert_eq!(
2515 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2516 [
2517 ("fn a() { ".to_string(), None),
2518 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2519 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2520 ("\n".to_string(), None),
2521 ]
2522 );
2523 assert_eq!(
2524 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2525 [
2526 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2527 ("\n".to_string(), None),
2528 ]
2529 );
2530 });
2531
2532 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2533 // changes since the last save.
2534 buffer.update(cx, |buffer, cx| {
2535 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2536 buffer.edit(
2537 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2538 None,
2539 cx,
2540 );
2541 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2542 });
2543 let change_notification_2 = fake_server
2544 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2545 .await;
2546 assert!(
2547 change_notification_2.text_document.version > change_notification_1.text_document.version
2548 );
2549
2550 // Handle out-of-order diagnostics
2551 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2552 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2553 version: Some(change_notification_2.text_document.version),
2554 diagnostics: vec![
2555 lsp::Diagnostic {
2556 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2557 severity: Some(DiagnosticSeverity::ERROR),
2558 message: "undefined variable 'BB'".to_string(),
2559 source: Some("disk".to_string()),
2560 ..Default::default()
2561 },
2562 lsp::Diagnostic {
2563 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2564 severity: Some(DiagnosticSeverity::WARNING),
2565 message: "undefined variable 'A'".to_string(),
2566 source: Some("disk".to_string()),
2567 ..Default::default()
2568 },
2569 ],
2570 });
2571
2572 cx.executor().run_until_parked();
2573 buffer.update(cx, |buffer, _| {
2574 assert_eq!(
2575 buffer
2576 .snapshot()
2577 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2578 .collect::<Vec<_>>(),
2579 &[
2580 DiagnosticEntry {
2581 range: Point::new(2, 21)..Point::new(2, 22),
2582 diagnostic: Diagnostic {
2583 source: Some("disk".into()),
2584 severity: DiagnosticSeverity::WARNING,
2585 message: "undefined variable 'A'".to_string(),
2586 is_disk_based: true,
2587 group_id: 6,
2588 is_primary: true,
2589 source_kind: DiagnosticSourceKind::Pushed,
2590 ..Diagnostic::default()
2591 }
2592 },
2593 DiagnosticEntry {
2594 range: Point::new(3, 9)..Point::new(3, 14),
2595 diagnostic: Diagnostic {
2596 source: Some("disk".into()),
2597 severity: DiagnosticSeverity::ERROR,
2598 message: "undefined variable 'BB'".to_string(),
2599 is_disk_based: true,
2600 group_id: 5,
2601 is_primary: true,
2602 source_kind: DiagnosticSourceKind::Pushed,
2603 ..Diagnostic::default()
2604 },
2605 }
2606 ]
2607 );
2608 });
2609}
2610
2611#[gpui::test]
2612async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2613 init_test(cx);
2614
2615 let text = concat!(
2616 "let one = ;\n", //
2617 "let two = \n",
2618 "let three = 3;\n",
2619 );
2620
2621 let fs = FakeFs::new(cx.executor());
2622 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2623
2624 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2625 let buffer = project
2626 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2627 .await
2628 .unwrap();
2629
2630 project.update(cx, |project, cx| {
2631 project.lsp_store.update(cx, |lsp_store, cx| {
2632 lsp_store
2633 .update_diagnostic_entries(
2634 LanguageServerId(0),
2635 PathBuf::from("/dir/a.rs"),
2636 None,
2637 None,
2638 vec![
2639 DiagnosticEntry {
2640 range: Unclipped(PointUtf16::new(0, 10))
2641 ..Unclipped(PointUtf16::new(0, 10)),
2642 diagnostic: Diagnostic {
2643 severity: DiagnosticSeverity::ERROR,
2644 message: "syntax error 1".to_string(),
2645 source_kind: DiagnosticSourceKind::Pushed,
2646 ..Diagnostic::default()
2647 },
2648 },
2649 DiagnosticEntry {
2650 range: Unclipped(PointUtf16::new(1, 10))
2651 ..Unclipped(PointUtf16::new(1, 10)),
2652 diagnostic: Diagnostic {
2653 severity: DiagnosticSeverity::ERROR,
2654 message: "syntax error 2".to_string(),
2655 source_kind: DiagnosticSourceKind::Pushed,
2656 ..Diagnostic::default()
2657 },
2658 },
2659 ],
2660 cx,
2661 )
2662 .unwrap();
2663 })
2664 });
2665
2666 // An empty range is extended forward to include the following character.
2667 // At the end of a line, an empty range is extended backward to include
2668 // the preceding character.
2669 buffer.update(cx, |buffer, _| {
2670 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2671 assert_eq!(
2672 chunks
2673 .iter()
2674 .map(|(s, d)| (s.as_str(), *d))
2675 .collect::<Vec<_>>(),
2676 &[
2677 ("let one = ", None),
2678 (";", Some(DiagnosticSeverity::ERROR)),
2679 ("\nlet two =", None),
2680 (" ", Some(DiagnosticSeverity::ERROR)),
2681 ("\nlet three = 3;\n", None)
2682 ]
2683 );
2684 });
2685}
2686
2687#[gpui::test]
2688async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2689 init_test(cx);
2690
2691 let fs = FakeFs::new(cx.executor());
2692 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2693 .await;
2694
2695 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2696 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2697
2698 lsp_store.update(cx, |lsp_store, cx| {
2699 lsp_store
2700 .update_diagnostic_entries(
2701 LanguageServerId(0),
2702 Path::new("/dir/a.rs").to_owned(),
2703 None,
2704 None,
2705 vec![DiagnosticEntry {
2706 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2707 diagnostic: Diagnostic {
2708 severity: DiagnosticSeverity::ERROR,
2709 is_primary: true,
2710 message: "syntax error a1".to_string(),
2711 source_kind: DiagnosticSourceKind::Pushed,
2712 ..Diagnostic::default()
2713 },
2714 }],
2715 cx,
2716 )
2717 .unwrap();
2718 lsp_store
2719 .update_diagnostic_entries(
2720 LanguageServerId(1),
2721 Path::new("/dir/a.rs").to_owned(),
2722 None,
2723 None,
2724 vec![DiagnosticEntry {
2725 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2726 diagnostic: Diagnostic {
2727 severity: DiagnosticSeverity::ERROR,
2728 is_primary: true,
2729 message: "syntax error b1".to_string(),
2730 source_kind: DiagnosticSourceKind::Pushed,
2731 ..Diagnostic::default()
2732 },
2733 }],
2734 cx,
2735 )
2736 .unwrap();
2737
2738 assert_eq!(
2739 lsp_store.diagnostic_summary(false, cx),
2740 DiagnosticSummary {
2741 error_count: 2,
2742 warning_count: 0,
2743 }
2744 );
2745 });
2746}
2747
2748#[gpui::test]
2749async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2750 init_test(cx);
2751
2752 let text = "
2753 fn a() {
2754 f1();
2755 }
2756 fn b() {
2757 f2();
2758 }
2759 fn c() {
2760 f3();
2761 }
2762 "
2763 .unindent();
2764
2765 let fs = FakeFs::new(cx.executor());
2766 fs.insert_tree(
2767 path!("/dir"),
2768 json!({
2769 "a.rs": text.clone(),
2770 }),
2771 )
2772 .await;
2773
2774 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2775 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2776
2777 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2778 language_registry.add(rust_lang());
2779 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2780
2781 let (buffer, _handle) = project
2782 .update(cx, |project, cx| {
2783 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2784 })
2785 .await
2786 .unwrap();
2787
2788 let mut fake_server = fake_servers.next().await.unwrap();
2789 let lsp_document_version = fake_server
2790 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2791 .await
2792 .text_document
2793 .version;
2794
2795 // Simulate editing the buffer after the language server computes some edits.
2796 buffer.update(cx, |buffer, cx| {
2797 buffer.edit(
2798 [(
2799 Point::new(0, 0)..Point::new(0, 0),
2800 "// above first function\n",
2801 )],
2802 None,
2803 cx,
2804 );
2805 buffer.edit(
2806 [(
2807 Point::new(2, 0)..Point::new(2, 0),
2808 " // inside first function\n",
2809 )],
2810 None,
2811 cx,
2812 );
2813 buffer.edit(
2814 [(
2815 Point::new(6, 4)..Point::new(6, 4),
2816 "// inside second function ",
2817 )],
2818 None,
2819 cx,
2820 );
2821
2822 assert_eq!(
2823 buffer.text(),
2824 "
2825 // above first function
2826 fn a() {
2827 // inside first function
2828 f1();
2829 }
2830 fn b() {
2831 // inside second function f2();
2832 }
2833 fn c() {
2834 f3();
2835 }
2836 "
2837 .unindent()
2838 );
2839 });
2840
2841 let edits = lsp_store
2842 .update(cx, |lsp_store, cx| {
2843 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2844 &buffer,
2845 vec![
2846 // replace body of first function
2847 lsp::TextEdit {
2848 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2849 new_text: "
2850 fn a() {
2851 f10();
2852 }
2853 "
2854 .unindent(),
2855 },
2856 // edit inside second function
2857 lsp::TextEdit {
2858 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2859 new_text: "00".into(),
2860 },
2861 // edit inside third function via two distinct edits
2862 lsp::TextEdit {
2863 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2864 new_text: "4000".into(),
2865 },
2866 lsp::TextEdit {
2867 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2868 new_text: "".into(),
2869 },
2870 ],
2871 LanguageServerId(0),
2872 Some(lsp_document_version),
2873 cx,
2874 )
2875 })
2876 .await
2877 .unwrap();
2878
2879 buffer.update(cx, |buffer, cx| {
2880 for (range, new_text) in edits {
2881 buffer.edit([(range, new_text)], None, cx);
2882 }
2883 assert_eq!(
2884 buffer.text(),
2885 "
2886 // above first function
2887 fn a() {
2888 // inside first function
2889 f10();
2890 }
2891 fn b() {
2892 // inside second function f200();
2893 }
2894 fn c() {
2895 f4000();
2896 }
2897 "
2898 .unindent()
2899 );
2900 });
2901}
2902
2903#[gpui::test]
2904async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2905 init_test(cx);
2906
2907 let text = "
2908 use a::b;
2909 use a::c;
2910
2911 fn f() {
2912 b();
2913 c();
2914 }
2915 "
2916 .unindent();
2917
2918 let fs = FakeFs::new(cx.executor());
2919 fs.insert_tree(
2920 path!("/dir"),
2921 json!({
2922 "a.rs": text.clone(),
2923 }),
2924 )
2925 .await;
2926
2927 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2928 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2929 let buffer = project
2930 .update(cx, |project, cx| {
2931 project.open_local_buffer(path!("/dir/a.rs"), cx)
2932 })
2933 .await
2934 .unwrap();
2935
2936 // Simulate the language server sending us a small edit in the form of a very large diff.
2937 // Rust-analyzer does this when performing a merge-imports code action.
2938 let edits = lsp_store
2939 .update(cx, |lsp_store, cx| {
2940 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2941 &buffer,
2942 [
2943 // Replace the first use statement without editing the semicolon.
2944 lsp::TextEdit {
2945 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2946 new_text: "a::{b, c}".into(),
2947 },
2948 // Reinsert the remainder of the file between the semicolon and the final
2949 // newline of the file.
2950 lsp::TextEdit {
2951 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2952 new_text: "\n\n".into(),
2953 },
2954 lsp::TextEdit {
2955 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2956 new_text: "
2957 fn f() {
2958 b();
2959 c();
2960 }"
2961 .unindent(),
2962 },
2963 // Delete everything after the first newline of the file.
2964 lsp::TextEdit {
2965 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2966 new_text: "".into(),
2967 },
2968 ],
2969 LanguageServerId(0),
2970 None,
2971 cx,
2972 )
2973 })
2974 .await
2975 .unwrap();
2976
2977 buffer.update(cx, |buffer, cx| {
2978 let edits = edits
2979 .into_iter()
2980 .map(|(range, text)| {
2981 (
2982 range.start.to_point(buffer)..range.end.to_point(buffer),
2983 text,
2984 )
2985 })
2986 .collect::<Vec<_>>();
2987
2988 assert_eq!(
2989 edits,
2990 [
2991 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2992 (Point::new(1, 0)..Point::new(2, 0), "".into())
2993 ]
2994 );
2995
2996 for (range, new_text) in edits {
2997 buffer.edit([(range, new_text)], None, cx);
2998 }
2999 assert_eq!(
3000 buffer.text(),
3001 "
3002 use a::{b, c};
3003
3004 fn f() {
3005 b();
3006 c();
3007 }
3008 "
3009 .unindent()
3010 );
3011 });
3012}
3013
3014#[gpui::test]
3015async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3016 cx: &mut gpui::TestAppContext,
3017) {
3018 init_test(cx);
3019
3020 let text = "Path()";
3021
3022 let fs = FakeFs::new(cx.executor());
3023 fs.insert_tree(
3024 path!("/dir"),
3025 json!({
3026 "a.rs": text
3027 }),
3028 )
3029 .await;
3030
3031 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3032 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3033 let buffer = project
3034 .update(cx, |project, cx| {
3035 project.open_local_buffer(path!("/dir/a.rs"), cx)
3036 })
3037 .await
3038 .unwrap();
3039
3040 // Simulate the language server sending us a pair of edits at the same location,
3041 // with an insertion following a replacement (which violates the LSP spec).
3042 let edits = lsp_store
3043 .update(cx, |lsp_store, cx| {
3044 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3045 &buffer,
3046 [
3047 lsp::TextEdit {
3048 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3049 new_text: "Path".into(),
3050 },
3051 lsp::TextEdit {
3052 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3053 new_text: "from path import Path\n\n\n".into(),
3054 },
3055 ],
3056 LanguageServerId(0),
3057 None,
3058 cx,
3059 )
3060 })
3061 .await
3062 .unwrap();
3063
3064 buffer.update(cx, |buffer, cx| {
3065 buffer.edit(edits, None, cx);
3066 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3067 });
3068}
3069
3070#[gpui::test]
3071async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3072 init_test(cx);
3073
3074 let text = "
3075 use a::b;
3076 use a::c;
3077
3078 fn f() {
3079 b();
3080 c();
3081 }
3082 "
3083 .unindent();
3084
3085 let fs = FakeFs::new(cx.executor());
3086 fs.insert_tree(
3087 path!("/dir"),
3088 json!({
3089 "a.rs": text.clone(),
3090 }),
3091 )
3092 .await;
3093
3094 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3095 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3096 let buffer = project
3097 .update(cx, |project, cx| {
3098 project.open_local_buffer(path!("/dir/a.rs"), cx)
3099 })
3100 .await
3101 .unwrap();
3102
3103 // Simulate the language server sending us edits in a non-ordered fashion,
3104 // with ranges sometimes being inverted or pointing to invalid locations.
3105 let edits = lsp_store
3106 .update(cx, |lsp_store, cx| {
3107 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3108 &buffer,
3109 [
3110 lsp::TextEdit {
3111 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3112 new_text: "\n\n".into(),
3113 },
3114 lsp::TextEdit {
3115 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3116 new_text: "a::{b, c}".into(),
3117 },
3118 lsp::TextEdit {
3119 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3120 new_text: "".into(),
3121 },
3122 lsp::TextEdit {
3123 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3124 new_text: "
3125 fn f() {
3126 b();
3127 c();
3128 }"
3129 .unindent(),
3130 },
3131 ],
3132 LanguageServerId(0),
3133 None,
3134 cx,
3135 )
3136 })
3137 .await
3138 .unwrap();
3139
3140 buffer.update(cx, |buffer, cx| {
3141 let edits = edits
3142 .into_iter()
3143 .map(|(range, text)| {
3144 (
3145 range.start.to_point(buffer)..range.end.to_point(buffer),
3146 text,
3147 )
3148 })
3149 .collect::<Vec<_>>();
3150
3151 assert_eq!(
3152 edits,
3153 [
3154 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3155 (Point::new(1, 0)..Point::new(2, 0), "".into())
3156 ]
3157 );
3158
3159 for (range, new_text) in edits {
3160 buffer.edit([(range, new_text)], None, cx);
3161 }
3162 assert_eq!(
3163 buffer.text(),
3164 "
3165 use a::{b, c};
3166
3167 fn f() {
3168 b();
3169 c();
3170 }
3171 "
3172 .unindent()
3173 );
3174 });
3175}
3176
3177fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3178 buffer: &Buffer,
3179 range: Range<T>,
3180) -> Vec<(String, Option<DiagnosticSeverity>)> {
3181 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3182 for chunk in buffer.snapshot().chunks(range, true) {
3183 if chunks
3184 .last()
3185 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3186 {
3187 chunks.last_mut().unwrap().0.push_str(chunk.text);
3188 } else {
3189 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3190 }
3191 }
3192 chunks
3193}
3194
3195#[gpui::test(iterations = 10)]
3196async fn test_definition(cx: &mut gpui::TestAppContext) {
3197 init_test(cx);
3198
3199 let fs = FakeFs::new(cx.executor());
3200 fs.insert_tree(
3201 path!("/dir"),
3202 json!({
3203 "a.rs": "const fn a() { A }",
3204 "b.rs": "const y: i32 = crate::a()",
3205 }),
3206 )
3207 .await;
3208
3209 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3210
3211 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3212 language_registry.add(rust_lang());
3213 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3214
3215 let (buffer, _handle) = project
3216 .update(cx, |project, cx| {
3217 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3218 })
3219 .await
3220 .unwrap();
3221
3222 let fake_server = fake_servers.next().await.unwrap();
3223 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3224 let params = params.text_document_position_params;
3225 assert_eq!(
3226 params.text_document.uri.to_file_path().unwrap(),
3227 Path::new(path!("/dir/b.rs")),
3228 );
3229 assert_eq!(params.position, lsp::Position::new(0, 22));
3230
3231 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3232 lsp::Location::new(
3233 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3234 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3235 ),
3236 )))
3237 });
3238 let mut definitions = project
3239 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3240 .await
3241 .unwrap()
3242 .unwrap();
3243
3244 // Assert no new language server started
3245 cx.executor().run_until_parked();
3246 assert!(fake_servers.try_next().is_err());
3247
3248 assert_eq!(definitions.len(), 1);
3249 let definition = definitions.pop().unwrap();
3250 cx.update(|cx| {
3251 let target_buffer = definition.target.buffer.read(cx);
3252 assert_eq!(
3253 target_buffer
3254 .file()
3255 .unwrap()
3256 .as_local()
3257 .unwrap()
3258 .abs_path(cx),
3259 Path::new(path!("/dir/a.rs")),
3260 );
3261 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3262 assert_eq!(
3263 list_worktrees(&project, cx),
3264 [
3265 (path!("/dir/a.rs").as_ref(), false),
3266 (path!("/dir/b.rs").as_ref(), true)
3267 ],
3268 );
3269
3270 drop(definition);
3271 });
3272 cx.update(|cx| {
3273 assert_eq!(
3274 list_worktrees(&project, cx),
3275 [(path!("/dir/b.rs").as_ref(), true)]
3276 );
3277 });
3278
3279 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3280 project
3281 .read(cx)
3282 .worktrees(cx)
3283 .map(|worktree| {
3284 let worktree = worktree.read(cx);
3285 (
3286 worktree.as_local().unwrap().abs_path().as_ref(),
3287 worktree.is_visible(),
3288 )
3289 })
3290 .collect::<Vec<_>>()
3291 }
3292}
3293
3294#[gpui::test]
3295async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3296 init_test(cx);
3297
3298 let fs = FakeFs::new(cx.executor());
3299 fs.insert_tree(
3300 path!("/dir"),
3301 json!({
3302 "a.ts": "",
3303 }),
3304 )
3305 .await;
3306
3307 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3308
3309 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3310 language_registry.add(typescript_lang());
3311 let mut fake_language_servers = language_registry.register_fake_lsp(
3312 "TypeScript",
3313 FakeLspAdapter {
3314 capabilities: lsp::ServerCapabilities {
3315 completion_provider: Some(lsp::CompletionOptions {
3316 trigger_characters: Some(vec![".".to_string()]),
3317 ..Default::default()
3318 }),
3319 ..Default::default()
3320 },
3321 ..Default::default()
3322 },
3323 );
3324
3325 let (buffer, _handle) = project
3326 .update(cx, |p, cx| {
3327 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3328 })
3329 .await
3330 .unwrap();
3331
3332 let fake_server = fake_language_servers.next().await.unwrap();
3333
3334 // When text_edit exists, it takes precedence over insert_text and label
3335 let text = "let a = obj.fqn";
3336 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3337 let completions = project.update(cx, |project, cx| {
3338 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3339 });
3340
3341 fake_server
3342 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3343 Ok(Some(lsp::CompletionResponse::Array(vec![
3344 lsp::CompletionItem {
3345 label: "labelText".into(),
3346 insert_text: Some("insertText".into()),
3347 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3348 range: lsp::Range::new(
3349 lsp::Position::new(0, text.len() as u32 - 3),
3350 lsp::Position::new(0, text.len() as u32),
3351 ),
3352 new_text: "textEditText".into(),
3353 })),
3354 ..Default::default()
3355 },
3356 ])))
3357 })
3358 .next()
3359 .await;
3360
3361 let completions = completions
3362 .await
3363 .unwrap()
3364 .into_iter()
3365 .flat_map(|response| response.completions)
3366 .collect::<Vec<_>>();
3367 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3368
3369 assert_eq!(completions.len(), 1);
3370 assert_eq!(completions[0].new_text, "textEditText");
3371 assert_eq!(
3372 completions[0].replace_range.to_offset(&snapshot),
3373 text.len() - 3..text.len()
3374 );
3375}
3376
3377#[gpui::test]
3378async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3379 init_test(cx);
3380
3381 let fs = FakeFs::new(cx.executor());
3382 fs.insert_tree(
3383 path!("/dir"),
3384 json!({
3385 "a.ts": "",
3386 }),
3387 )
3388 .await;
3389
3390 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3391
3392 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3393 language_registry.add(typescript_lang());
3394 let mut fake_language_servers = language_registry.register_fake_lsp(
3395 "TypeScript",
3396 FakeLspAdapter {
3397 capabilities: lsp::ServerCapabilities {
3398 completion_provider: Some(lsp::CompletionOptions {
3399 trigger_characters: Some(vec![".".to_string()]),
3400 ..Default::default()
3401 }),
3402 ..Default::default()
3403 },
3404 ..Default::default()
3405 },
3406 );
3407
3408 let (buffer, _handle) = project
3409 .update(cx, |p, cx| {
3410 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3411 })
3412 .await
3413 .unwrap();
3414
3415 let fake_server = fake_language_servers.next().await.unwrap();
3416 let text = "let a = obj.fqn";
3417
3418 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3419 {
3420 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3421 let completions = project.update(cx, |project, cx| {
3422 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3423 });
3424
3425 fake_server
3426 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3427 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3428 is_incomplete: false,
3429 item_defaults: Some(lsp::CompletionListItemDefaults {
3430 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3431 lsp::Range::new(
3432 lsp::Position::new(0, text.len() as u32 - 3),
3433 lsp::Position::new(0, text.len() as u32),
3434 ),
3435 )),
3436 ..Default::default()
3437 }),
3438 items: vec![lsp::CompletionItem {
3439 label: "labelText".into(),
3440 text_edit_text: Some("textEditText".into()),
3441 text_edit: None,
3442 ..Default::default()
3443 }],
3444 })))
3445 })
3446 .next()
3447 .await;
3448
3449 let completions = completions
3450 .await
3451 .unwrap()
3452 .into_iter()
3453 .flat_map(|response| response.completions)
3454 .collect::<Vec<_>>();
3455 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3456
3457 assert_eq!(completions.len(), 1);
3458 assert_eq!(completions[0].new_text, "textEditText");
3459 assert_eq!(
3460 completions[0].replace_range.to_offset(&snapshot),
3461 text.len() - 3..text.len()
3462 );
3463 }
3464
3465 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3466 {
3467 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3468 let completions = project.update(cx, |project, cx| {
3469 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3470 });
3471
3472 fake_server
3473 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3474 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3475 is_incomplete: false,
3476 item_defaults: Some(lsp::CompletionListItemDefaults {
3477 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3478 lsp::Range::new(
3479 lsp::Position::new(0, text.len() as u32 - 3),
3480 lsp::Position::new(0, text.len() as u32),
3481 ),
3482 )),
3483 ..Default::default()
3484 }),
3485 items: vec![lsp::CompletionItem {
3486 label: "labelText".into(),
3487 text_edit_text: None,
3488 insert_text: Some("irrelevant".into()),
3489 text_edit: None,
3490 ..Default::default()
3491 }],
3492 })))
3493 })
3494 .next()
3495 .await;
3496
3497 let completions = completions
3498 .await
3499 .unwrap()
3500 .into_iter()
3501 .flat_map(|response| response.completions)
3502 .collect::<Vec<_>>();
3503 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3504
3505 assert_eq!(completions.len(), 1);
3506 assert_eq!(completions[0].new_text, "labelText");
3507 assert_eq!(
3508 completions[0].replace_range.to_offset(&snapshot),
3509 text.len() - 3..text.len()
3510 );
3511 }
3512}
3513
3514#[gpui::test]
3515async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3516 init_test(cx);
3517
3518 let fs = FakeFs::new(cx.executor());
3519 fs.insert_tree(
3520 path!("/dir"),
3521 json!({
3522 "a.ts": "",
3523 }),
3524 )
3525 .await;
3526
3527 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3528
3529 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3530 language_registry.add(typescript_lang());
3531 let mut fake_language_servers = language_registry.register_fake_lsp(
3532 "TypeScript",
3533 FakeLspAdapter {
3534 capabilities: lsp::ServerCapabilities {
3535 completion_provider: Some(lsp::CompletionOptions {
3536 trigger_characters: Some(vec![":".to_string()]),
3537 ..Default::default()
3538 }),
3539 ..Default::default()
3540 },
3541 ..Default::default()
3542 },
3543 );
3544
3545 let (buffer, _handle) = project
3546 .update(cx, |p, cx| {
3547 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3548 })
3549 .await
3550 .unwrap();
3551
3552 let fake_server = fake_language_servers.next().await.unwrap();
3553
3554 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3555 let text = "let a = b.fqn";
3556 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3557 let completions = project.update(cx, |project, cx| {
3558 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3559 });
3560
3561 fake_server
3562 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3563 Ok(Some(lsp::CompletionResponse::Array(vec![
3564 lsp::CompletionItem {
3565 label: "fullyQualifiedName?".into(),
3566 insert_text: Some("fullyQualifiedName".into()),
3567 ..Default::default()
3568 },
3569 ])))
3570 })
3571 .next()
3572 .await;
3573 let completions = completions
3574 .await
3575 .unwrap()
3576 .into_iter()
3577 .flat_map(|response| response.completions)
3578 .collect::<Vec<_>>();
3579 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3580 assert_eq!(completions.len(), 1);
3581 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3582 assert_eq!(
3583 completions[0].replace_range.to_offset(&snapshot),
3584 text.len() - 3..text.len()
3585 );
3586
3587 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3588 let text = "let a = \"atoms/cmp\"";
3589 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3590 let completions = project.update(cx, |project, cx| {
3591 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3592 });
3593
3594 fake_server
3595 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3596 Ok(Some(lsp::CompletionResponse::Array(vec![
3597 lsp::CompletionItem {
3598 label: "component".into(),
3599 ..Default::default()
3600 },
3601 ])))
3602 })
3603 .next()
3604 .await;
3605 let completions = completions
3606 .await
3607 .unwrap()
3608 .into_iter()
3609 .flat_map(|response| response.completions)
3610 .collect::<Vec<_>>();
3611 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3612 assert_eq!(completions.len(), 1);
3613 assert_eq!(completions[0].new_text, "component");
3614 assert_eq!(
3615 completions[0].replace_range.to_offset(&snapshot),
3616 text.len() - 4..text.len() - 1
3617 );
3618}
3619
3620#[gpui::test]
3621async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3622 init_test(cx);
3623
3624 let fs = FakeFs::new(cx.executor());
3625 fs.insert_tree(
3626 path!("/dir"),
3627 json!({
3628 "a.ts": "",
3629 }),
3630 )
3631 .await;
3632
3633 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3634
3635 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3636 language_registry.add(typescript_lang());
3637 let mut fake_language_servers = language_registry.register_fake_lsp(
3638 "TypeScript",
3639 FakeLspAdapter {
3640 capabilities: lsp::ServerCapabilities {
3641 completion_provider: Some(lsp::CompletionOptions {
3642 trigger_characters: Some(vec![":".to_string()]),
3643 ..Default::default()
3644 }),
3645 ..Default::default()
3646 },
3647 ..Default::default()
3648 },
3649 );
3650
3651 let (buffer, _handle) = project
3652 .update(cx, |p, cx| {
3653 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3654 })
3655 .await
3656 .unwrap();
3657
3658 let fake_server = fake_language_servers.next().await.unwrap();
3659
3660 let text = "let a = b.fqn";
3661 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3662 let completions = project.update(cx, |project, cx| {
3663 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3664 });
3665
3666 fake_server
3667 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3668 Ok(Some(lsp::CompletionResponse::Array(vec![
3669 lsp::CompletionItem {
3670 label: "fullyQualifiedName?".into(),
3671 insert_text: Some("fully\rQualified\r\nName".into()),
3672 ..Default::default()
3673 },
3674 ])))
3675 })
3676 .next()
3677 .await;
3678 let completions = completions
3679 .await
3680 .unwrap()
3681 .into_iter()
3682 .flat_map(|response| response.completions)
3683 .collect::<Vec<_>>();
3684 assert_eq!(completions.len(), 1);
3685 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3686}
3687
3688#[gpui::test(iterations = 10)]
3689async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3690 init_test(cx);
3691
3692 let fs = FakeFs::new(cx.executor());
3693 fs.insert_tree(
3694 path!("/dir"),
3695 json!({
3696 "a.ts": "a",
3697 }),
3698 )
3699 .await;
3700
3701 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3702
3703 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3704 language_registry.add(typescript_lang());
3705 let mut fake_language_servers = language_registry.register_fake_lsp(
3706 "TypeScript",
3707 FakeLspAdapter {
3708 capabilities: lsp::ServerCapabilities {
3709 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3710 lsp::CodeActionOptions {
3711 resolve_provider: Some(true),
3712 ..lsp::CodeActionOptions::default()
3713 },
3714 )),
3715 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3716 commands: vec!["_the/command".to_string()],
3717 ..lsp::ExecuteCommandOptions::default()
3718 }),
3719 ..lsp::ServerCapabilities::default()
3720 },
3721 ..FakeLspAdapter::default()
3722 },
3723 );
3724
3725 let (buffer, _handle) = project
3726 .update(cx, |p, cx| {
3727 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3728 })
3729 .await
3730 .unwrap();
3731
3732 let fake_server = fake_language_servers.next().await.unwrap();
3733
3734 // Language server returns code actions that contain commands, and not edits.
3735 let actions = project.update(cx, |project, cx| {
3736 project.code_actions(&buffer, 0..0, None, cx)
3737 });
3738 fake_server
3739 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3740 Ok(Some(vec![
3741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3742 title: "The code action".into(),
3743 data: Some(serde_json::json!({
3744 "command": "_the/command",
3745 })),
3746 ..lsp::CodeAction::default()
3747 }),
3748 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3749 title: "two".into(),
3750 ..lsp::CodeAction::default()
3751 }),
3752 ]))
3753 })
3754 .next()
3755 .await;
3756
3757 let action = actions.await.unwrap().unwrap()[0].clone();
3758 let apply = project.update(cx, |project, cx| {
3759 project.apply_code_action(buffer.clone(), action, true, cx)
3760 });
3761
3762 // Resolving the code action does not populate its edits. In absence of
3763 // edits, we must execute the given command.
3764 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3765 |mut action, _| async move {
3766 if action.data.is_some() {
3767 action.command = Some(lsp::Command {
3768 title: "The command".into(),
3769 command: "_the/command".into(),
3770 arguments: Some(vec![json!("the-argument")]),
3771 });
3772 }
3773 Ok(action)
3774 },
3775 );
3776
3777 // While executing the command, the language server sends the editor
3778 // a `workspaceEdit` request.
3779 fake_server
3780 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3781 let fake = fake_server.clone();
3782 move |params, _| {
3783 assert_eq!(params.command, "_the/command");
3784 let fake = fake.clone();
3785 async move {
3786 fake.server
3787 .request::<lsp::request::ApplyWorkspaceEdit>(
3788 lsp::ApplyWorkspaceEditParams {
3789 label: None,
3790 edit: lsp::WorkspaceEdit {
3791 changes: Some(
3792 [(
3793 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3794 vec![lsp::TextEdit {
3795 range: lsp::Range::new(
3796 lsp::Position::new(0, 0),
3797 lsp::Position::new(0, 0),
3798 ),
3799 new_text: "X".into(),
3800 }],
3801 )]
3802 .into_iter()
3803 .collect(),
3804 ),
3805 ..Default::default()
3806 },
3807 },
3808 )
3809 .await
3810 .into_response()
3811 .unwrap();
3812 Ok(Some(json!(null)))
3813 }
3814 }
3815 })
3816 .next()
3817 .await;
3818
3819 // Applying the code action returns a project transaction containing the edits
3820 // sent by the language server in its `workspaceEdit` request.
3821 let transaction = apply.await.unwrap();
3822 assert!(transaction.0.contains_key(&buffer));
3823 buffer.update(cx, |buffer, cx| {
3824 assert_eq!(buffer.text(), "Xa");
3825 buffer.undo(cx);
3826 assert_eq!(buffer.text(), "a");
3827 });
3828}
3829
3830#[gpui::test]
3831async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3832 init_test(cx);
3833 let fs = FakeFs::new(cx.background_executor.clone());
3834 let expected_contents = "content";
3835 fs.as_fake()
3836 .insert_tree(
3837 "/root",
3838 json!({
3839 "test.txt": expected_contents
3840 }),
3841 )
3842 .await;
3843
3844 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3845
3846 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3847 let worktree = project.worktrees(cx).next().unwrap();
3848 let entry_id = worktree
3849 .read(cx)
3850 .entry_for_path(rel_path("test.txt"))
3851 .unwrap()
3852 .id;
3853 (worktree, entry_id)
3854 });
3855 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3856 let _result = project
3857 .update(cx, |project, cx| {
3858 project.rename_entry(
3859 entry_id,
3860 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3861 cx,
3862 )
3863 })
3864 .await
3865 .unwrap();
3866 worktree.read_with(cx, |worktree, _| {
3867 assert!(
3868 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3869 "Old file should have been removed"
3870 );
3871 assert!(
3872 worktree
3873 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3874 .is_some(),
3875 "Whole directory hierarchy and the new file should have been created"
3876 );
3877 });
3878 assert_eq!(
3879 worktree
3880 .update(cx, |worktree, cx| {
3881 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), &Default::default(), cx)
3882 })
3883 .await
3884 .unwrap()
3885 .text,
3886 expected_contents,
3887 "Moved file's contents should be preserved"
3888 );
3889
3890 let entry_id = worktree.read_with(cx, |worktree, _| {
3891 worktree
3892 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3893 .unwrap()
3894 .id
3895 });
3896
3897 let _result = project
3898 .update(cx, |project, cx| {
3899 project.rename_entry(
3900 entry_id,
3901 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3902 cx,
3903 )
3904 })
3905 .await
3906 .unwrap();
3907 worktree.read_with(cx, |worktree, _| {
3908 assert!(
3909 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3910 "First file should not reappear"
3911 );
3912 assert!(
3913 worktree
3914 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3915 .is_none(),
3916 "Old file should have been removed"
3917 );
3918 assert!(
3919 worktree
3920 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3921 .is_some(),
3922 "No error should have occurred after moving into existing directory"
3923 );
3924 });
3925 assert_eq!(
3926 worktree
3927 .update(cx, |worktree, cx| {
3928 worktree.load_file(rel_path("dir1/dir2/test.txt"), &Default::default(), cx)
3929 })
3930 .await
3931 .unwrap()
3932 .text,
3933 expected_contents,
3934 "Moved file's contents should be preserved"
3935 );
3936}
3937
3938#[gpui::test(iterations = 10)]
3939async fn test_save_file(cx: &mut gpui::TestAppContext) {
3940 init_test(cx);
3941
3942 let fs = FakeFs::new(cx.executor());
3943 fs.insert_tree(
3944 path!("/dir"),
3945 json!({
3946 "file1": "the old contents",
3947 }),
3948 )
3949 .await;
3950
3951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3952 let buffer = project
3953 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3954 .await
3955 .unwrap();
3956 buffer.update(cx, |buffer, cx| {
3957 assert_eq!(buffer.text(), "the old contents");
3958 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3959 });
3960
3961 project
3962 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3963 .await
3964 .unwrap();
3965
3966 let new_text = fs
3967 .load(Path::new(path!("/dir/file1")))
3968 .await
3969 .unwrap()
3970 .replace("\r\n", "\n");
3971 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3972}
3973
3974#[gpui::test(iterations = 10)]
3975async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3976 // Issue: #24349
3977 init_test(cx);
3978
3979 let fs = FakeFs::new(cx.executor());
3980 fs.insert_tree(path!("/dir"), json!({})).await;
3981
3982 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3983 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3984
3985 language_registry.add(rust_lang());
3986 let mut fake_rust_servers = language_registry.register_fake_lsp(
3987 "Rust",
3988 FakeLspAdapter {
3989 name: "the-rust-language-server",
3990 capabilities: lsp::ServerCapabilities {
3991 completion_provider: Some(lsp::CompletionOptions {
3992 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3993 ..Default::default()
3994 }),
3995 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3996 lsp::TextDocumentSyncOptions {
3997 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3998 ..Default::default()
3999 },
4000 )),
4001 ..Default::default()
4002 },
4003 ..Default::default()
4004 },
4005 );
4006
4007 let buffer = project
4008 .update(cx, |this, cx| this.create_buffer(false, cx))
4009 .unwrap()
4010 .await;
4011 project.update(cx, |this, cx| {
4012 this.register_buffer_with_language_servers(&buffer, cx);
4013 buffer.update(cx, |buffer, cx| {
4014 assert!(!this.has_language_servers_for(buffer, cx));
4015 })
4016 });
4017
4018 project
4019 .update(cx, |this, cx| {
4020 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4021 this.save_buffer_as(
4022 buffer.clone(),
4023 ProjectPath {
4024 worktree_id,
4025 path: rel_path("file.rs").into(),
4026 },
4027 cx,
4028 )
4029 })
4030 .await
4031 .unwrap();
4032 // A server is started up, and it is notified about Rust files.
4033 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4034 assert_eq!(
4035 fake_rust_server
4036 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4037 .await
4038 .text_document,
4039 lsp::TextDocumentItem {
4040 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4041 version: 0,
4042 text: "".to_string(),
4043 language_id: "rust".to_string(),
4044 }
4045 );
4046
4047 project.update(cx, |this, cx| {
4048 buffer.update(cx, |buffer, cx| {
4049 assert!(this.has_language_servers_for(buffer, cx));
4050 })
4051 });
4052}
4053
4054#[gpui::test(iterations = 30)]
4055async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4056 init_test(cx);
4057
4058 let fs = FakeFs::new(cx.executor());
4059 fs.insert_tree(
4060 path!("/dir"),
4061 json!({
4062 "file1": "the original contents",
4063 }),
4064 )
4065 .await;
4066
4067 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4068 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4069 let buffer = project
4070 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4071 .await
4072 .unwrap();
4073
4074 // Simulate buffer diffs being slow, so that they don't complete before
4075 // the next file change occurs.
4076 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4077
4078 // Change the buffer's file on disk, and then wait for the file change
4079 // to be detected by the worktree, so that the buffer starts reloading.
4080 fs.save(
4081 path!("/dir/file1").as_ref(),
4082 &Rope::from_str("the first contents", cx.background_executor()),
4083 Default::default(),
4084 Default::default(),
4085 )
4086 .await
4087 .unwrap();
4088 worktree.next_event(cx).await;
4089
4090 // Change the buffer's file again. Depending on the random seed, the
4091 // previous file change may still be in progress.
4092 fs.save(
4093 path!("/dir/file1").as_ref(),
4094 &Rope::from_str("the second contents", cx.background_executor()),
4095 Default::default(),
4096 Default::default(),
4097 )
4098 .await
4099 .unwrap();
4100 worktree.next_event(cx).await;
4101
4102 cx.executor().run_until_parked();
4103 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4104 buffer.read_with(cx, |buffer, _| {
4105 assert_eq!(buffer.text(), on_disk_text);
4106 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4107 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4108 });
4109}
4110
4111#[gpui::test(iterations = 30)]
4112async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4113 init_test(cx);
4114
4115 let fs = FakeFs::new(cx.executor());
4116 fs.insert_tree(
4117 path!("/dir"),
4118 json!({
4119 "file1": "the original contents",
4120 }),
4121 )
4122 .await;
4123
4124 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4125 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4126 let buffer = project
4127 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4128 .await
4129 .unwrap();
4130
4131 // Simulate buffer diffs being slow, so that they don't complete before
4132 // the next file change occurs.
4133 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4134
4135 // Change the buffer's file on disk, and then wait for the file change
4136 // to be detected by the worktree, so that the buffer starts reloading.
4137 fs.save(
4138 path!("/dir/file1").as_ref(),
4139 &Rope::from_str("the first contents", cx.background_executor()),
4140 Default::default(),
4141 Default::default(),
4142 )
4143 .await
4144 .unwrap();
4145 worktree.next_event(cx).await;
4146
4147 cx.executor()
4148 .spawn(cx.executor().simulate_random_delay())
4149 .await;
4150
4151 // Perform a noop edit, causing the buffer's version to increase.
4152 buffer.update(cx, |buffer, cx| {
4153 buffer.edit([(0..0, " ")], None, cx);
4154 buffer.undo(cx);
4155 });
4156
4157 cx.executor().run_until_parked();
4158 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4159 buffer.read_with(cx, |buffer, _| {
4160 let buffer_text = buffer.text();
4161 if buffer_text == on_disk_text {
4162 assert!(
4163 !buffer.is_dirty() && !buffer.has_conflict(),
4164 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4165 );
4166 }
4167 // If the file change occurred while the buffer was processing the first
4168 // change, the buffer will be in a conflicting state.
4169 else {
4170 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4171 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4172 }
4173 });
4174}
4175
4176#[gpui::test]
4177async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4178 init_test(cx);
4179
4180 let fs = FakeFs::new(cx.executor());
4181 fs.insert_tree(
4182 path!("/dir"),
4183 json!({
4184 "file1": "the old contents",
4185 }),
4186 )
4187 .await;
4188
4189 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4190 let buffer = project
4191 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4192 .await
4193 .unwrap();
4194 buffer.update(cx, |buffer, cx| {
4195 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4196 });
4197
4198 project
4199 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4200 .await
4201 .unwrap();
4202
4203 let new_text = fs
4204 .load(Path::new(path!("/dir/file1")))
4205 .await
4206 .unwrap()
4207 .replace("\r\n", "\n");
4208 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4209}
4210
4211#[gpui::test]
4212async fn test_save_as(cx: &mut gpui::TestAppContext) {
4213 init_test(cx);
4214
4215 let fs = FakeFs::new(cx.executor());
4216 fs.insert_tree("/dir", json!({})).await;
4217
4218 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4219
4220 let languages = project.update(cx, |project, _| project.languages().clone());
4221 languages.add(rust_lang());
4222
4223 let buffer = project.update(cx, |project, cx| {
4224 project.create_local_buffer("", None, false, cx)
4225 });
4226 buffer.update(cx, |buffer, cx| {
4227 buffer.edit([(0..0, "abc")], None, cx);
4228 assert!(buffer.is_dirty());
4229 assert!(!buffer.has_conflict());
4230 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4231 });
4232 project
4233 .update(cx, |project, cx| {
4234 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4235 let path = ProjectPath {
4236 worktree_id,
4237 path: rel_path("file1.rs").into(),
4238 };
4239 project.save_buffer_as(buffer.clone(), path, cx)
4240 })
4241 .await
4242 .unwrap();
4243 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4244
4245 cx.executor().run_until_parked();
4246 buffer.update(cx, |buffer, cx| {
4247 assert_eq!(
4248 buffer.file().unwrap().full_path(cx),
4249 Path::new("dir/file1.rs")
4250 );
4251 assert!(!buffer.is_dirty());
4252 assert!(!buffer.has_conflict());
4253 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4254 });
4255
4256 let opened_buffer = project
4257 .update(cx, |project, cx| {
4258 project.open_local_buffer("/dir/file1.rs", cx)
4259 })
4260 .await
4261 .unwrap();
4262 assert_eq!(opened_buffer, buffer);
4263}
4264
4265#[gpui::test]
4266async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4267 init_test(cx);
4268
4269 let fs = FakeFs::new(cx.executor());
4270 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4271
4272 fs.insert_tree(
4273 path!("/dir"),
4274 json!({
4275 "data_a.txt": "data about a"
4276 }),
4277 )
4278 .await;
4279
4280 let buffer = project
4281 .update(cx, |project, cx| {
4282 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4283 })
4284 .await
4285 .unwrap();
4286
4287 buffer.update(cx, |buffer, cx| {
4288 buffer.edit([(11..12, "b")], None, cx);
4289 });
4290
4291 // Save buffer's contents as a new file and confirm that the buffer's now
4292 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4293 // file associated with the buffer has now been updated to `data_b.txt`
4294 project
4295 .update(cx, |project, cx| {
4296 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4297 let new_path = ProjectPath {
4298 worktree_id,
4299 path: rel_path("data_b.txt").into(),
4300 };
4301
4302 project.save_buffer_as(buffer.clone(), new_path, cx)
4303 })
4304 .await
4305 .unwrap();
4306
4307 buffer.update(cx, |buffer, cx| {
4308 assert_eq!(
4309 buffer.file().unwrap().full_path(cx),
4310 Path::new("dir/data_b.txt")
4311 )
4312 });
4313
4314 // Open the original `data_a.txt` file, confirming that its contents are
4315 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4316 let original_buffer = project
4317 .update(cx, |project, cx| {
4318 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4319 })
4320 .await
4321 .unwrap();
4322
4323 original_buffer.update(cx, |buffer, cx| {
4324 assert_eq!(buffer.text(), "data about a");
4325 assert_eq!(
4326 buffer.file().unwrap().full_path(cx),
4327 Path::new("dir/data_a.txt")
4328 )
4329 });
4330}
4331
4332#[gpui::test(retries = 5)]
4333async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4334 use worktree::WorktreeModelHandle as _;
4335
4336 init_test(cx);
4337 cx.executor().allow_parking();
4338
4339 let dir = TempTree::new(json!({
4340 "a": {
4341 "file1": "",
4342 "file2": "",
4343 "file3": "",
4344 },
4345 "b": {
4346 "c": {
4347 "file4": "",
4348 "file5": "",
4349 }
4350 }
4351 }));
4352
4353 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4354
4355 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4356 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4357 async move { buffer.await.unwrap() }
4358 };
4359 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4360 project.update(cx, |project, cx| {
4361 let tree = project.worktrees(cx).next().unwrap();
4362 tree.read(cx)
4363 .entry_for_path(rel_path(path))
4364 .unwrap_or_else(|| panic!("no entry for path {}", path))
4365 .id
4366 })
4367 };
4368
4369 let buffer2 = buffer_for_path("a/file2", cx).await;
4370 let buffer3 = buffer_for_path("a/file3", cx).await;
4371 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4372 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4373
4374 let file2_id = id_for_path("a/file2", cx);
4375 let file3_id = id_for_path("a/file3", cx);
4376 let file4_id = id_for_path("b/c/file4", cx);
4377
4378 // Create a remote copy of this worktree.
4379 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4380 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4381
4382 let updates = Arc::new(Mutex::new(Vec::new()));
4383 tree.update(cx, |tree, cx| {
4384 let updates = updates.clone();
4385 tree.observe_updates(0, cx, move |update| {
4386 updates.lock().push(update);
4387 async { true }
4388 });
4389 });
4390
4391 let remote = cx.update(|cx| {
4392 Worktree::remote(
4393 0,
4394 ReplicaId::REMOTE_SERVER,
4395 metadata,
4396 project.read(cx).client().into(),
4397 project.read(cx).path_style(cx),
4398 cx,
4399 )
4400 });
4401
4402 cx.executor().run_until_parked();
4403
4404 cx.update(|cx| {
4405 assert!(!buffer2.read(cx).is_dirty());
4406 assert!(!buffer3.read(cx).is_dirty());
4407 assert!(!buffer4.read(cx).is_dirty());
4408 assert!(!buffer5.read(cx).is_dirty());
4409 });
4410
4411 // Rename and delete files and directories.
4412 tree.flush_fs_events(cx).await;
4413 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4414 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4415 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4416 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4417 tree.flush_fs_events(cx).await;
4418
4419 cx.update(|app| {
4420 assert_eq!(
4421 tree.read(app).paths().collect::<Vec<_>>(),
4422 vec![
4423 rel_path("a"),
4424 rel_path("a/file1"),
4425 rel_path("a/file2.new"),
4426 rel_path("b"),
4427 rel_path("d"),
4428 rel_path("d/file3"),
4429 rel_path("d/file4"),
4430 ]
4431 );
4432 });
4433
4434 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4435 assert_eq!(id_for_path("d/file3", cx), file3_id);
4436 assert_eq!(id_for_path("d/file4", cx), file4_id);
4437
4438 cx.update(|cx| {
4439 assert_eq!(
4440 buffer2.read(cx).file().unwrap().path().as_ref(),
4441 rel_path("a/file2.new")
4442 );
4443 assert_eq!(
4444 buffer3.read(cx).file().unwrap().path().as_ref(),
4445 rel_path("d/file3")
4446 );
4447 assert_eq!(
4448 buffer4.read(cx).file().unwrap().path().as_ref(),
4449 rel_path("d/file4")
4450 );
4451 assert_eq!(
4452 buffer5.read(cx).file().unwrap().path().as_ref(),
4453 rel_path("b/c/file5")
4454 );
4455
4456 assert_matches!(
4457 buffer2.read(cx).file().unwrap().disk_state(),
4458 DiskState::Present { .. }
4459 );
4460 assert_matches!(
4461 buffer3.read(cx).file().unwrap().disk_state(),
4462 DiskState::Present { .. }
4463 );
4464 assert_matches!(
4465 buffer4.read(cx).file().unwrap().disk_state(),
4466 DiskState::Present { .. }
4467 );
4468 assert_eq!(
4469 buffer5.read(cx).file().unwrap().disk_state(),
4470 DiskState::Deleted
4471 );
4472 });
4473
4474 // Update the remote worktree. Check that it becomes consistent with the
4475 // local worktree.
4476 cx.executor().run_until_parked();
4477
4478 remote.update(cx, |remote, _| {
4479 for update in updates.lock().drain(..) {
4480 remote.as_remote_mut().unwrap().update_from_remote(update);
4481 }
4482 });
4483 cx.executor().run_until_parked();
4484 remote.update(cx, |remote, _| {
4485 assert_eq!(
4486 remote.paths().collect::<Vec<_>>(),
4487 vec![
4488 rel_path("a"),
4489 rel_path("a/file1"),
4490 rel_path("a/file2.new"),
4491 rel_path("b"),
4492 rel_path("d"),
4493 rel_path("d/file3"),
4494 rel_path("d/file4"),
4495 ]
4496 );
4497 });
4498}
4499
4500#[gpui::test(iterations = 10)]
4501async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4502 init_test(cx);
4503
4504 let fs = FakeFs::new(cx.executor());
4505 fs.insert_tree(
4506 path!("/dir"),
4507 json!({
4508 "a": {
4509 "file1": "",
4510 }
4511 }),
4512 )
4513 .await;
4514
4515 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4516 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4517 let tree_id = tree.update(cx, |tree, _| tree.id());
4518
4519 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4520 project.update(cx, |project, cx| {
4521 let tree = project.worktrees(cx).next().unwrap();
4522 tree.read(cx)
4523 .entry_for_path(rel_path(path))
4524 .unwrap_or_else(|| panic!("no entry for path {}", path))
4525 .id
4526 })
4527 };
4528
4529 let dir_id = id_for_path("a", cx);
4530 let file_id = id_for_path("a/file1", cx);
4531 let buffer = project
4532 .update(cx, |p, cx| {
4533 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4534 })
4535 .await
4536 .unwrap();
4537 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4538
4539 project
4540 .update(cx, |project, cx| {
4541 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4542 })
4543 .unwrap()
4544 .await
4545 .into_included()
4546 .unwrap();
4547 cx.executor().run_until_parked();
4548
4549 assert_eq!(id_for_path("b", cx), dir_id);
4550 assert_eq!(id_for_path("b/file1", cx), file_id);
4551 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4552}
4553
4554#[gpui::test]
4555async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4556 init_test(cx);
4557
4558 let fs = FakeFs::new(cx.executor());
4559 fs.insert_tree(
4560 "/dir",
4561 json!({
4562 "a.txt": "a-contents",
4563 "b.txt": "b-contents",
4564 }),
4565 )
4566 .await;
4567
4568 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4569
4570 // Spawn multiple tasks to open paths, repeating some paths.
4571 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4572 (
4573 p.open_local_buffer("/dir/a.txt", cx),
4574 p.open_local_buffer("/dir/b.txt", cx),
4575 p.open_local_buffer("/dir/a.txt", cx),
4576 )
4577 });
4578
4579 let buffer_a_1 = buffer_a_1.await.unwrap();
4580 let buffer_a_2 = buffer_a_2.await.unwrap();
4581 let buffer_b = buffer_b.await.unwrap();
4582 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4583 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4584
4585 // There is only one buffer per path.
4586 let buffer_a_id = buffer_a_1.entity_id();
4587 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4588
4589 // Open the same path again while it is still open.
4590 drop(buffer_a_1);
4591 let buffer_a_3 = project
4592 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4593 .await
4594 .unwrap();
4595
4596 // There's still only one buffer per path.
4597 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4598}
4599
4600#[gpui::test]
4601async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4602 init_test(cx);
4603
4604 let fs = FakeFs::new(cx.executor());
4605 fs.insert_tree(
4606 path!("/dir"),
4607 json!({
4608 "file1": "abc",
4609 "file2": "def",
4610 "file3": "ghi",
4611 }),
4612 )
4613 .await;
4614
4615 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4616
4617 let buffer1 = project
4618 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4619 .await
4620 .unwrap();
4621 let events = Arc::new(Mutex::new(Vec::new()));
4622
4623 // initially, the buffer isn't dirty.
4624 buffer1.update(cx, |buffer, cx| {
4625 cx.subscribe(&buffer1, {
4626 let events = events.clone();
4627 move |_, _, event, _| match event {
4628 BufferEvent::Operation { .. } => {}
4629 _ => events.lock().push(event.clone()),
4630 }
4631 })
4632 .detach();
4633
4634 assert!(!buffer.is_dirty());
4635 assert!(events.lock().is_empty());
4636
4637 buffer.edit([(1..2, "")], None, cx);
4638 });
4639
4640 // after the first edit, the buffer is dirty, and emits a dirtied event.
4641 buffer1.update(cx, |buffer, cx| {
4642 assert!(buffer.text() == "ac");
4643 assert!(buffer.is_dirty());
4644 assert_eq!(
4645 *events.lock(),
4646 &[
4647 language::BufferEvent::Edited,
4648 language::BufferEvent::DirtyChanged
4649 ]
4650 );
4651 events.lock().clear();
4652 buffer.did_save(
4653 buffer.version(),
4654 buffer.file().unwrap().disk_state().mtime(),
4655 cx,
4656 );
4657 });
4658
4659 // after saving, the buffer is not dirty, and emits a saved event.
4660 buffer1.update(cx, |buffer, cx| {
4661 assert!(!buffer.is_dirty());
4662 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4663 events.lock().clear();
4664
4665 buffer.edit([(1..1, "B")], None, cx);
4666 buffer.edit([(2..2, "D")], None, cx);
4667 });
4668
4669 // after editing again, the buffer is dirty, and emits another dirty event.
4670 buffer1.update(cx, |buffer, cx| {
4671 assert!(buffer.text() == "aBDc");
4672 assert!(buffer.is_dirty());
4673 assert_eq!(
4674 *events.lock(),
4675 &[
4676 language::BufferEvent::Edited,
4677 language::BufferEvent::DirtyChanged,
4678 language::BufferEvent::Edited,
4679 ],
4680 );
4681 events.lock().clear();
4682
4683 // After restoring the buffer to its previously-saved state,
4684 // the buffer is not considered dirty anymore.
4685 buffer.edit([(1..3, "")], None, cx);
4686 assert!(buffer.text() == "ac");
4687 assert!(!buffer.is_dirty());
4688 });
4689
4690 assert_eq!(
4691 *events.lock(),
4692 &[
4693 language::BufferEvent::Edited,
4694 language::BufferEvent::DirtyChanged
4695 ]
4696 );
4697
4698 // When a file is deleted, it is not considered dirty.
4699 let events = Arc::new(Mutex::new(Vec::new()));
4700 let buffer2 = project
4701 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4702 .await
4703 .unwrap();
4704 buffer2.update(cx, |_, cx| {
4705 cx.subscribe(&buffer2, {
4706 let events = events.clone();
4707 move |_, _, event, _| match event {
4708 BufferEvent::Operation { .. } => {}
4709 _ => events.lock().push(event.clone()),
4710 }
4711 })
4712 .detach();
4713 });
4714
4715 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4716 .await
4717 .unwrap();
4718 cx.executor().run_until_parked();
4719 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4720 assert_eq!(
4721 mem::take(&mut *events.lock()),
4722 &[language::BufferEvent::FileHandleChanged]
4723 );
4724
4725 // Buffer becomes dirty when edited.
4726 buffer2.update(cx, |buffer, cx| {
4727 buffer.edit([(2..3, "")], None, cx);
4728 assert_eq!(buffer.is_dirty(), true);
4729 });
4730 assert_eq!(
4731 mem::take(&mut *events.lock()),
4732 &[
4733 language::BufferEvent::Edited,
4734 language::BufferEvent::DirtyChanged
4735 ]
4736 );
4737
4738 // Buffer becomes clean again when all of its content is removed, because
4739 // the file was deleted.
4740 buffer2.update(cx, |buffer, cx| {
4741 buffer.edit([(0..2, "")], None, cx);
4742 assert_eq!(buffer.is_empty(), true);
4743 assert_eq!(buffer.is_dirty(), false);
4744 });
4745 assert_eq!(
4746 *events.lock(),
4747 &[
4748 language::BufferEvent::Edited,
4749 language::BufferEvent::DirtyChanged
4750 ]
4751 );
4752
4753 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4754 let events = Arc::new(Mutex::new(Vec::new()));
4755 let buffer3 = project
4756 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4757 .await
4758 .unwrap();
4759 buffer3.update(cx, |_, cx| {
4760 cx.subscribe(&buffer3, {
4761 let events = events.clone();
4762 move |_, _, event, _| match event {
4763 BufferEvent::Operation { .. } => {}
4764 _ => events.lock().push(event.clone()),
4765 }
4766 })
4767 .detach();
4768 });
4769
4770 buffer3.update(cx, |buffer, cx| {
4771 buffer.edit([(0..0, "x")], None, cx);
4772 });
4773 events.lock().clear();
4774 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4775 .await
4776 .unwrap();
4777 cx.executor().run_until_parked();
4778 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4779 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4780}
4781
4782#[gpui::test]
4783async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4784 init_test(cx);
4785
4786 let (initial_contents, initial_offsets) =
4787 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4788 let fs = FakeFs::new(cx.executor());
4789 fs.insert_tree(
4790 path!("/dir"),
4791 json!({
4792 "the-file": initial_contents,
4793 }),
4794 )
4795 .await;
4796 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4797 let buffer = project
4798 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4799 .await
4800 .unwrap();
4801
4802 let anchors = initial_offsets
4803 .iter()
4804 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4805 .collect::<Vec<_>>();
4806
4807 // Change the file on disk, adding two new lines of text, and removing
4808 // one line.
4809 buffer.update(cx, |buffer, _| {
4810 assert!(!buffer.is_dirty());
4811 assert!(!buffer.has_conflict());
4812 });
4813
4814 let (new_contents, new_offsets) =
4815 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4816
4817 fs.save(
4818 path!("/dir/the-file").as_ref(),
4819 &Rope::from_str(new_contents.as_str(), cx.background_executor()),
4820 LineEnding::Unix,
4821 Default::default(),
4822 )
4823 .await
4824 .unwrap();
4825
4826 // Because the buffer was not modified, it is reloaded from disk. Its
4827 // contents are edited according to the diff between the old and new
4828 // file contents.
4829 cx.executor().run_until_parked();
4830 buffer.update(cx, |buffer, _| {
4831 assert_eq!(buffer.text(), new_contents);
4832 assert!(!buffer.is_dirty());
4833 assert!(!buffer.has_conflict());
4834
4835 let anchor_offsets = anchors
4836 .iter()
4837 .map(|anchor| anchor.to_offset(&*buffer))
4838 .collect::<Vec<_>>();
4839 assert_eq!(anchor_offsets, new_offsets);
4840 });
4841
4842 // Modify the buffer
4843 buffer.update(cx, |buffer, cx| {
4844 buffer.edit([(0..0, " ")], None, cx);
4845 assert!(buffer.is_dirty());
4846 assert!(!buffer.has_conflict());
4847 });
4848
4849 // Change the file on disk again, adding blank lines to the beginning.
4850 fs.save(
4851 path!("/dir/the-file").as_ref(),
4852 &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
4853 LineEnding::Unix,
4854 Default::default(),
4855 )
4856 .await
4857 .unwrap();
4858
4859 // Because the buffer is modified, it doesn't reload from disk, but is
4860 // marked as having a conflict.
4861 cx.executor().run_until_parked();
4862 buffer.update(cx, |buffer, _| {
4863 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4864 assert!(buffer.has_conflict());
4865 });
4866}
4867
4868#[gpui::test]
4869async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4870 init_test(cx);
4871
4872 let fs = FakeFs::new(cx.executor());
4873 fs.insert_tree(
4874 path!("/dir"),
4875 json!({
4876 "file1": "a\nb\nc\n",
4877 "file2": "one\r\ntwo\r\nthree\r\n",
4878 }),
4879 )
4880 .await;
4881
4882 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4883 let buffer1 = project
4884 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4885 .await
4886 .unwrap();
4887 let buffer2 = project
4888 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4889 .await
4890 .unwrap();
4891
4892 buffer1.update(cx, |buffer, _| {
4893 assert_eq!(buffer.text(), "a\nb\nc\n");
4894 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4895 });
4896 buffer2.update(cx, |buffer, _| {
4897 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4898 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4899 });
4900
4901 // Change a file's line endings on disk from unix to windows. The buffer's
4902 // state updates correctly.
4903 fs.save(
4904 path!("/dir/file1").as_ref(),
4905 &Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
4906 LineEnding::Windows,
4907 Default::default(),
4908 )
4909 .await
4910 .unwrap();
4911 cx.executor().run_until_parked();
4912 buffer1.update(cx, |buffer, _| {
4913 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4914 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4915 });
4916
4917 // Save a file with windows line endings. The file is written correctly.
4918 buffer2.update(cx, |buffer, cx| {
4919 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4920 });
4921 project
4922 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4923 .await
4924 .unwrap();
4925 assert_eq!(
4926 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4927 "one\r\ntwo\r\nthree\r\nfour\r\n",
4928 );
4929}
4930
4931#[gpui::test]
4932async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4933 init_test(cx);
4934
4935 let fs = FakeFs::new(cx.executor());
4936 fs.insert_tree(
4937 path!("/dir"),
4938 json!({
4939 "a.rs": "
4940 fn foo(mut v: Vec<usize>) {
4941 for x in &v {
4942 v.push(1);
4943 }
4944 }
4945 "
4946 .unindent(),
4947 }),
4948 )
4949 .await;
4950
4951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4952 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4953 let buffer = project
4954 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4955 .await
4956 .unwrap();
4957
4958 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4959 let message = lsp::PublishDiagnosticsParams {
4960 uri: buffer_uri.clone(),
4961 diagnostics: vec![
4962 lsp::Diagnostic {
4963 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4964 severity: Some(DiagnosticSeverity::WARNING),
4965 message: "error 1".to_string(),
4966 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4967 location: lsp::Location {
4968 uri: buffer_uri.clone(),
4969 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4970 },
4971 message: "error 1 hint 1".to_string(),
4972 }]),
4973 ..Default::default()
4974 },
4975 lsp::Diagnostic {
4976 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4977 severity: Some(DiagnosticSeverity::HINT),
4978 message: "error 1 hint 1".to_string(),
4979 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4980 location: lsp::Location {
4981 uri: buffer_uri.clone(),
4982 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4983 },
4984 message: "original diagnostic".to_string(),
4985 }]),
4986 ..Default::default()
4987 },
4988 lsp::Diagnostic {
4989 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4990 severity: Some(DiagnosticSeverity::ERROR),
4991 message: "error 2".to_string(),
4992 related_information: Some(vec![
4993 lsp::DiagnosticRelatedInformation {
4994 location: lsp::Location {
4995 uri: buffer_uri.clone(),
4996 range: lsp::Range::new(
4997 lsp::Position::new(1, 13),
4998 lsp::Position::new(1, 15),
4999 ),
5000 },
5001 message: "error 2 hint 1".to_string(),
5002 },
5003 lsp::DiagnosticRelatedInformation {
5004 location: lsp::Location {
5005 uri: buffer_uri.clone(),
5006 range: lsp::Range::new(
5007 lsp::Position::new(1, 13),
5008 lsp::Position::new(1, 15),
5009 ),
5010 },
5011 message: "error 2 hint 2".to_string(),
5012 },
5013 ]),
5014 ..Default::default()
5015 },
5016 lsp::Diagnostic {
5017 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5018 severity: Some(DiagnosticSeverity::HINT),
5019 message: "error 2 hint 1".to_string(),
5020 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5021 location: lsp::Location {
5022 uri: buffer_uri.clone(),
5023 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5024 },
5025 message: "original diagnostic".to_string(),
5026 }]),
5027 ..Default::default()
5028 },
5029 lsp::Diagnostic {
5030 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5031 severity: Some(DiagnosticSeverity::HINT),
5032 message: "error 2 hint 2".to_string(),
5033 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5034 location: lsp::Location {
5035 uri: buffer_uri,
5036 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5037 },
5038 message: "original diagnostic".to_string(),
5039 }]),
5040 ..Default::default()
5041 },
5042 ],
5043 version: None,
5044 };
5045
5046 lsp_store
5047 .update(cx, |lsp_store, cx| {
5048 lsp_store.update_diagnostics(
5049 LanguageServerId(0),
5050 message,
5051 None,
5052 DiagnosticSourceKind::Pushed,
5053 &[],
5054 cx,
5055 )
5056 })
5057 .unwrap();
5058 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5059
5060 assert_eq!(
5061 buffer
5062 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5063 .collect::<Vec<_>>(),
5064 &[
5065 DiagnosticEntry {
5066 range: Point::new(1, 8)..Point::new(1, 9),
5067 diagnostic: Diagnostic {
5068 severity: DiagnosticSeverity::WARNING,
5069 message: "error 1".to_string(),
5070 group_id: 1,
5071 is_primary: true,
5072 source_kind: DiagnosticSourceKind::Pushed,
5073 ..Diagnostic::default()
5074 }
5075 },
5076 DiagnosticEntry {
5077 range: Point::new(1, 8)..Point::new(1, 9),
5078 diagnostic: Diagnostic {
5079 severity: DiagnosticSeverity::HINT,
5080 message: "error 1 hint 1".to_string(),
5081 group_id: 1,
5082 is_primary: false,
5083 source_kind: DiagnosticSourceKind::Pushed,
5084 ..Diagnostic::default()
5085 }
5086 },
5087 DiagnosticEntry {
5088 range: Point::new(1, 13)..Point::new(1, 15),
5089 diagnostic: Diagnostic {
5090 severity: DiagnosticSeverity::HINT,
5091 message: "error 2 hint 1".to_string(),
5092 group_id: 0,
5093 is_primary: false,
5094 source_kind: DiagnosticSourceKind::Pushed,
5095 ..Diagnostic::default()
5096 }
5097 },
5098 DiagnosticEntry {
5099 range: Point::new(1, 13)..Point::new(1, 15),
5100 diagnostic: Diagnostic {
5101 severity: DiagnosticSeverity::HINT,
5102 message: "error 2 hint 2".to_string(),
5103 group_id: 0,
5104 is_primary: false,
5105 source_kind: DiagnosticSourceKind::Pushed,
5106 ..Diagnostic::default()
5107 }
5108 },
5109 DiagnosticEntry {
5110 range: Point::new(2, 8)..Point::new(2, 17),
5111 diagnostic: Diagnostic {
5112 severity: DiagnosticSeverity::ERROR,
5113 message: "error 2".to_string(),
5114 group_id: 0,
5115 is_primary: true,
5116 source_kind: DiagnosticSourceKind::Pushed,
5117 ..Diagnostic::default()
5118 }
5119 }
5120 ]
5121 );
5122
5123 assert_eq!(
5124 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5125 &[
5126 DiagnosticEntry {
5127 range: Point::new(1, 13)..Point::new(1, 15),
5128 diagnostic: Diagnostic {
5129 severity: DiagnosticSeverity::HINT,
5130 message: "error 2 hint 1".to_string(),
5131 group_id: 0,
5132 is_primary: false,
5133 source_kind: DiagnosticSourceKind::Pushed,
5134 ..Diagnostic::default()
5135 }
5136 },
5137 DiagnosticEntry {
5138 range: Point::new(1, 13)..Point::new(1, 15),
5139 diagnostic: Diagnostic {
5140 severity: DiagnosticSeverity::HINT,
5141 message: "error 2 hint 2".to_string(),
5142 group_id: 0,
5143 is_primary: false,
5144 source_kind: DiagnosticSourceKind::Pushed,
5145 ..Diagnostic::default()
5146 }
5147 },
5148 DiagnosticEntry {
5149 range: Point::new(2, 8)..Point::new(2, 17),
5150 diagnostic: Diagnostic {
5151 severity: DiagnosticSeverity::ERROR,
5152 message: "error 2".to_string(),
5153 group_id: 0,
5154 is_primary: true,
5155 source_kind: DiagnosticSourceKind::Pushed,
5156 ..Diagnostic::default()
5157 }
5158 }
5159 ]
5160 );
5161
5162 assert_eq!(
5163 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5164 &[
5165 DiagnosticEntry {
5166 range: Point::new(1, 8)..Point::new(1, 9),
5167 diagnostic: Diagnostic {
5168 severity: DiagnosticSeverity::WARNING,
5169 message: "error 1".to_string(),
5170 group_id: 1,
5171 is_primary: true,
5172 source_kind: DiagnosticSourceKind::Pushed,
5173 ..Diagnostic::default()
5174 }
5175 },
5176 DiagnosticEntry {
5177 range: Point::new(1, 8)..Point::new(1, 9),
5178 diagnostic: Diagnostic {
5179 severity: DiagnosticSeverity::HINT,
5180 message: "error 1 hint 1".to_string(),
5181 group_id: 1,
5182 is_primary: false,
5183 source_kind: DiagnosticSourceKind::Pushed,
5184 ..Diagnostic::default()
5185 }
5186 },
5187 ]
5188 );
5189}
5190
5191#[gpui::test]
5192async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5193 init_test(cx);
5194
5195 let fs = FakeFs::new(cx.executor());
5196 fs.insert_tree(
5197 path!("/dir"),
5198 json!({
5199 "one.rs": "const ONE: usize = 1;",
5200 "two": {
5201 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5202 }
5203
5204 }),
5205 )
5206 .await;
5207 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5208
5209 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5210 language_registry.add(rust_lang());
5211 let watched_paths = lsp::FileOperationRegistrationOptions {
5212 filters: vec![
5213 FileOperationFilter {
5214 scheme: Some("file".to_owned()),
5215 pattern: lsp::FileOperationPattern {
5216 glob: "**/*.rs".to_owned(),
5217 matches: Some(lsp::FileOperationPatternKind::File),
5218 options: None,
5219 },
5220 },
5221 FileOperationFilter {
5222 scheme: Some("file".to_owned()),
5223 pattern: lsp::FileOperationPattern {
5224 glob: "**/**".to_owned(),
5225 matches: Some(lsp::FileOperationPatternKind::Folder),
5226 options: None,
5227 },
5228 },
5229 ],
5230 };
5231 let mut fake_servers = language_registry.register_fake_lsp(
5232 "Rust",
5233 FakeLspAdapter {
5234 capabilities: lsp::ServerCapabilities {
5235 workspace: Some(lsp::WorkspaceServerCapabilities {
5236 workspace_folders: None,
5237 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5238 did_rename: Some(watched_paths.clone()),
5239 will_rename: Some(watched_paths),
5240 ..Default::default()
5241 }),
5242 }),
5243 ..Default::default()
5244 },
5245 ..Default::default()
5246 },
5247 );
5248
5249 let _ = project
5250 .update(cx, |project, cx| {
5251 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5252 })
5253 .await
5254 .unwrap();
5255
5256 let fake_server = fake_servers.next().await.unwrap();
5257 let response = project.update(cx, |project, cx| {
5258 let worktree = project.worktrees(cx).next().unwrap();
5259 let entry = worktree
5260 .read(cx)
5261 .entry_for_path(rel_path("one.rs"))
5262 .unwrap();
5263 project.rename_entry(
5264 entry.id,
5265 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5266 cx,
5267 )
5268 });
5269 let expected_edit = lsp::WorkspaceEdit {
5270 changes: None,
5271 document_changes: Some(DocumentChanges::Edits({
5272 vec![TextDocumentEdit {
5273 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5274 range: lsp::Range {
5275 start: lsp::Position {
5276 line: 0,
5277 character: 1,
5278 },
5279 end: lsp::Position {
5280 line: 0,
5281 character: 3,
5282 },
5283 },
5284 new_text: "This is not a drill".to_owned(),
5285 })],
5286 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5287 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5288 version: Some(1337),
5289 },
5290 }]
5291 })),
5292 change_annotations: None,
5293 };
5294 let resolved_workspace_edit = Arc::new(OnceLock::new());
5295 fake_server
5296 .set_request_handler::<WillRenameFiles, _, _>({
5297 let resolved_workspace_edit = resolved_workspace_edit.clone();
5298 let expected_edit = expected_edit.clone();
5299 move |params, _| {
5300 let resolved_workspace_edit = resolved_workspace_edit.clone();
5301 let expected_edit = expected_edit.clone();
5302 async move {
5303 assert_eq!(params.files.len(), 1);
5304 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5305 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5306 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5307 Ok(Some(expected_edit))
5308 }
5309 }
5310 })
5311 .next()
5312 .await
5313 .unwrap();
5314 let _ = response.await.unwrap();
5315 fake_server
5316 .handle_notification::<DidRenameFiles, _>(|params, _| {
5317 assert_eq!(params.files.len(), 1);
5318 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5319 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5320 })
5321 .next()
5322 .await
5323 .unwrap();
5324 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5325}
5326
5327#[gpui::test]
5328async fn test_rename(cx: &mut gpui::TestAppContext) {
5329 // hi
5330 init_test(cx);
5331
5332 let fs = FakeFs::new(cx.executor());
5333 fs.insert_tree(
5334 path!("/dir"),
5335 json!({
5336 "one.rs": "const ONE: usize = 1;",
5337 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5338 }),
5339 )
5340 .await;
5341
5342 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5343
5344 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5345 language_registry.add(rust_lang());
5346 let mut fake_servers = language_registry.register_fake_lsp(
5347 "Rust",
5348 FakeLspAdapter {
5349 capabilities: lsp::ServerCapabilities {
5350 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5351 prepare_provider: Some(true),
5352 work_done_progress_options: Default::default(),
5353 })),
5354 ..Default::default()
5355 },
5356 ..Default::default()
5357 },
5358 );
5359
5360 let (buffer, _handle) = project
5361 .update(cx, |project, cx| {
5362 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5363 })
5364 .await
5365 .unwrap();
5366
5367 let fake_server = fake_servers.next().await.unwrap();
5368
5369 let response = project.update(cx, |project, cx| {
5370 project.prepare_rename(buffer.clone(), 7, cx)
5371 });
5372 fake_server
5373 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5374 assert_eq!(
5375 params.text_document.uri.as_str(),
5376 uri!("file:///dir/one.rs")
5377 );
5378 assert_eq!(params.position, lsp::Position::new(0, 7));
5379 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5380 lsp::Position::new(0, 6),
5381 lsp::Position::new(0, 9),
5382 ))))
5383 })
5384 .next()
5385 .await
5386 .unwrap();
5387 let response = response.await.unwrap();
5388 let PrepareRenameResponse::Success(range) = response else {
5389 panic!("{:?}", response);
5390 };
5391 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5392 assert_eq!(range, 6..9);
5393
5394 let response = project.update(cx, |project, cx| {
5395 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5396 });
5397 fake_server
5398 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5399 assert_eq!(
5400 params.text_document_position.text_document.uri.as_str(),
5401 uri!("file:///dir/one.rs")
5402 );
5403 assert_eq!(
5404 params.text_document_position.position,
5405 lsp::Position::new(0, 7)
5406 );
5407 assert_eq!(params.new_name, "THREE");
5408 Ok(Some(lsp::WorkspaceEdit {
5409 changes: Some(
5410 [
5411 (
5412 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5413 vec![lsp::TextEdit::new(
5414 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5415 "THREE".to_string(),
5416 )],
5417 ),
5418 (
5419 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5420 vec![
5421 lsp::TextEdit::new(
5422 lsp::Range::new(
5423 lsp::Position::new(0, 24),
5424 lsp::Position::new(0, 27),
5425 ),
5426 "THREE".to_string(),
5427 ),
5428 lsp::TextEdit::new(
5429 lsp::Range::new(
5430 lsp::Position::new(0, 35),
5431 lsp::Position::new(0, 38),
5432 ),
5433 "THREE".to_string(),
5434 ),
5435 ],
5436 ),
5437 ]
5438 .into_iter()
5439 .collect(),
5440 ),
5441 ..Default::default()
5442 }))
5443 })
5444 .next()
5445 .await
5446 .unwrap();
5447 let mut transaction = response.await.unwrap().0;
5448 assert_eq!(transaction.len(), 2);
5449 assert_eq!(
5450 transaction
5451 .remove_entry(&buffer)
5452 .unwrap()
5453 .0
5454 .update(cx, |buffer, _| buffer.text()),
5455 "const THREE: usize = 1;"
5456 );
5457 assert_eq!(
5458 transaction
5459 .into_keys()
5460 .next()
5461 .unwrap()
5462 .update(cx, |buffer, _| buffer.text()),
5463 "const TWO: usize = one::THREE + one::THREE;"
5464 );
5465}
5466
5467#[gpui::test]
5468async fn test_search(cx: &mut gpui::TestAppContext) {
5469 init_test(cx);
5470
5471 let fs = FakeFs::new(cx.executor());
5472 fs.insert_tree(
5473 path!("/dir"),
5474 json!({
5475 "one.rs": "const ONE: usize = 1;",
5476 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5477 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5478 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5479 }),
5480 )
5481 .await;
5482 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5483 assert_eq!(
5484 search(
5485 &project,
5486 SearchQuery::text(
5487 "TWO",
5488 false,
5489 true,
5490 false,
5491 Default::default(),
5492 Default::default(),
5493 false,
5494 None
5495 )
5496 .unwrap(),
5497 cx
5498 )
5499 .await
5500 .unwrap(),
5501 HashMap::from_iter([
5502 (path!("dir/two.rs").to_string(), vec![6..9]),
5503 (path!("dir/three.rs").to_string(), vec![37..40])
5504 ])
5505 );
5506
5507 let buffer_4 = project
5508 .update(cx, |project, cx| {
5509 project.open_local_buffer(path!("/dir/four.rs"), cx)
5510 })
5511 .await
5512 .unwrap();
5513 buffer_4.update(cx, |buffer, cx| {
5514 let text = "two::TWO";
5515 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5516 });
5517
5518 assert_eq!(
5519 search(
5520 &project,
5521 SearchQuery::text(
5522 "TWO",
5523 false,
5524 true,
5525 false,
5526 Default::default(),
5527 Default::default(),
5528 false,
5529 None,
5530 )
5531 .unwrap(),
5532 cx
5533 )
5534 .await
5535 .unwrap(),
5536 HashMap::from_iter([
5537 (path!("dir/two.rs").to_string(), vec![6..9]),
5538 (path!("dir/three.rs").to_string(), vec![37..40]),
5539 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5540 ])
5541 );
5542}
5543
5544#[gpui::test]
5545async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5546 init_test(cx);
5547
5548 let search_query = "file";
5549
5550 let fs = FakeFs::new(cx.executor());
5551 fs.insert_tree(
5552 path!("/dir"),
5553 json!({
5554 "one.rs": r#"// Rust file one"#,
5555 "one.ts": r#"// TypeScript file one"#,
5556 "two.rs": r#"// Rust file two"#,
5557 "two.ts": r#"// TypeScript file two"#,
5558 }),
5559 )
5560 .await;
5561 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5562
5563 assert!(
5564 search(
5565 &project,
5566 SearchQuery::text(
5567 search_query,
5568 false,
5569 true,
5570 false,
5571 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5572 Default::default(),
5573 false,
5574 None
5575 )
5576 .unwrap(),
5577 cx
5578 )
5579 .await
5580 .unwrap()
5581 .is_empty(),
5582 "If no inclusions match, no files should be returned"
5583 );
5584
5585 assert_eq!(
5586 search(
5587 &project,
5588 SearchQuery::text(
5589 search_query,
5590 false,
5591 true,
5592 false,
5593 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5594 Default::default(),
5595 false,
5596 None
5597 )
5598 .unwrap(),
5599 cx
5600 )
5601 .await
5602 .unwrap(),
5603 HashMap::from_iter([
5604 (path!("dir/one.rs").to_string(), vec![8..12]),
5605 (path!("dir/two.rs").to_string(), vec![8..12]),
5606 ]),
5607 "Rust only search should give only Rust files"
5608 );
5609
5610 assert_eq!(
5611 search(
5612 &project,
5613 SearchQuery::text(
5614 search_query,
5615 false,
5616 true,
5617 false,
5618 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5619 .unwrap(),
5620 Default::default(),
5621 false,
5622 None,
5623 )
5624 .unwrap(),
5625 cx
5626 )
5627 .await
5628 .unwrap(),
5629 HashMap::from_iter([
5630 (path!("dir/one.ts").to_string(), vec![14..18]),
5631 (path!("dir/two.ts").to_string(), vec![14..18]),
5632 ]),
5633 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5634 );
5635
5636 assert_eq!(
5637 search(
5638 &project,
5639 SearchQuery::text(
5640 search_query,
5641 false,
5642 true,
5643 false,
5644 PathMatcher::new(
5645 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5646 PathStyle::local()
5647 )
5648 .unwrap(),
5649 Default::default(),
5650 false,
5651 None,
5652 )
5653 .unwrap(),
5654 cx
5655 )
5656 .await
5657 .unwrap(),
5658 HashMap::from_iter([
5659 (path!("dir/two.ts").to_string(), vec![14..18]),
5660 (path!("dir/one.rs").to_string(), vec![8..12]),
5661 (path!("dir/one.ts").to_string(), vec![14..18]),
5662 (path!("dir/two.rs").to_string(), vec![8..12]),
5663 ]),
5664 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5665 );
5666}
5667
5668#[gpui::test]
5669async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5670 init_test(cx);
5671
5672 let search_query = "file";
5673
5674 let fs = FakeFs::new(cx.executor());
5675 fs.insert_tree(
5676 path!("/dir"),
5677 json!({
5678 "one.rs": r#"// Rust file one"#,
5679 "one.ts": r#"// TypeScript file one"#,
5680 "two.rs": r#"// Rust file two"#,
5681 "two.ts": r#"// TypeScript file two"#,
5682 }),
5683 )
5684 .await;
5685 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5686
5687 assert_eq!(
5688 search(
5689 &project,
5690 SearchQuery::text(
5691 search_query,
5692 false,
5693 true,
5694 false,
5695 Default::default(),
5696 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5697 false,
5698 None,
5699 )
5700 .unwrap(),
5701 cx
5702 )
5703 .await
5704 .unwrap(),
5705 HashMap::from_iter([
5706 (path!("dir/one.rs").to_string(), vec![8..12]),
5707 (path!("dir/one.ts").to_string(), vec![14..18]),
5708 (path!("dir/two.rs").to_string(), vec![8..12]),
5709 (path!("dir/two.ts").to_string(), vec![14..18]),
5710 ]),
5711 "If no exclusions match, all files should be returned"
5712 );
5713
5714 assert_eq!(
5715 search(
5716 &project,
5717 SearchQuery::text(
5718 search_query,
5719 false,
5720 true,
5721 false,
5722 Default::default(),
5723 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5724 false,
5725 None,
5726 )
5727 .unwrap(),
5728 cx
5729 )
5730 .await
5731 .unwrap(),
5732 HashMap::from_iter([
5733 (path!("dir/one.ts").to_string(), vec![14..18]),
5734 (path!("dir/two.ts").to_string(), vec![14..18]),
5735 ]),
5736 "Rust exclusion search should give only TypeScript files"
5737 );
5738
5739 assert_eq!(
5740 search(
5741 &project,
5742 SearchQuery::text(
5743 search_query,
5744 false,
5745 true,
5746 false,
5747 Default::default(),
5748 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5749 .unwrap(),
5750 false,
5751 None,
5752 )
5753 .unwrap(),
5754 cx
5755 )
5756 .await
5757 .unwrap(),
5758 HashMap::from_iter([
5759 (path!("dir/one.rs").to_string(), vec![8..12]),
5760 (path!("dir/two.rs").to_string(), vec![8..12]),
5761 ]),
5762 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5763 );
5764
5765 assert!(
5766 search(
5767 &project,
5768 SearchQuery::text(
5769 search_query,
5770 false,
5771 true,
5772 false,
5773 Default::default(),
5774 PathMatcher::new(
5775 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5776 PathStyle::local(),
5777 )
5778 .unwrap(),
5779 false,
5780 None,
5781 )
5782 .unwrap(),
5783 cx
5784 )
5785 .await
5786 .unwrap()
5787 .is_empty(),
5788 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5789 );
5790}
5791
5792#[gpui::test]
5793async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5794 init_test(cx);
5795
5796 let search_query = "file";
5797
5798 let fs = FakeFs::new(cx.executor());
5799 fs.insert_tree(
5800 path!("/dir"),
5801 json!({
5802 "one.rs": r#"// Rust file one"#,
5803 "one.ts": r#"// TypeScript file one"#,
5804 "two.rs": r#"// Rust file two"#,
5805 "two.ts": r#"// TypeScript file two"#,
5806 }),
5807 )
5808 .await;
5809
5810 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5811 let path_style = PathStyle::local();
5812 let _buffer = project.update(cx, |project, cx| {
5813 project.create_local_buffer("file", None, false, cx)
5814 });
5815
5816 assert_eq!(
5817 search(
5818 &project,
5819 SearchQuery::text(
5820 search_query,
5821 false,
5822 true,
5823 false,
5824 Default::default(),
5825 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5826 false,
5827 None,
5828 )
5829 .unwrap(),
5830 cx
5831 )
5832 .await
5833 .unwrap(),
5834 HashMap::from_iter([
5835 (path!("dir/one.rs").to_string(), vec![8..12]),
5836 (path!("dir/one.ts").to_string(), vec![14..18]),
5837 (path!("dir/two.rs").to_string(), vec![8..12]),
5838 (path!("dir/two.ts").to_string(), vec![14..18]),
5839 ]),
5840 "If no exclusions match, all files should be returned"
5841 );
5842
5843 assert_eq!(
5844 search(
5845 &project,
5846 SearchQuery::text(
5847 search_query,
5848 false,
5849 true,
5850 false,
5851 Default::default(),
5852 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5853 false,
5854 None,
5855 )
5856 .unwrap(),
5857 cx
5858 )
5859 .await
5860 .unwrap(),
5861 HashMap::from_iter([
5862 (path!("dir/one.ts").to_string(), vec![14..18]),
5863 (path!("dir/two.ts").to_string(), vec![14..18]),
5864 ]),
5865 "Rust exclusion search should give only TypeScript files"
5866 );
5867
5868 assert_eq!(
5869 search(
5870 &project,
5871 SearchQuery::text(
5872 search_query,
5873 false,
5874 true,
5875 false,
5876 Default::default(),
5877 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5878 false,
5879 None,
5880 )
5881 .unwrap(),
5882 cx
5883 )
5884 .await
5885 .unwrap(),
5886 HashMap::from_iter([
5887 (path!("dir/one.rs").to_string(), vec![8..12]),
5888 (path!("dir/two.rs").to_string(), vec![8..12]),
5889 ]),
5890 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5891 );
5892
5893 assert!(
5894 search(
5895 &project,
5896 SearchQuery::text(
5897 search_query,
5898 false,
5899 true,
5900 false,
5901 Default::default(),
5902 PathMatcher::new(
5903 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5904 PathStyle::local(),
5905 )
5906 .unwrap(),
5907 false,
5908 None,
5909 )
5910 .unwrap(),
5911 cx
5912 )
5913 .await
5914 .unwrap()
5915 .is_empty(),
5916 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5917 );
5918}
5919
5920#[gpui::test]
5921async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5922 init_test(cx);
5923
5924 let search_query = "file";
5925
5926 let fs = FakeFs::new(cx.executor());
5927 fs.insert_tree(
5928 path!("/dir"),
5929 json!({
5930 "one.rs": r#"// Rust file one"#,
5931 "one.ts": r#"// TypeScript file one"#,
5932 "two.rs": r#"// Rust file two"#,
5933 "two.ts": r#"// TypeScript file two"#,
5934 }),
5935 )
5936 .await;
5937 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5938 assert!(
5939 search(
5940 &project,
5941 SearchQuery::text(
5942 search_query,
5943 false,
5944 true,
5945 false,
5946 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5947 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5948 false,
5949 None,
5950 )
5951 .unwrap(),
5952 cx
5953 )
5954 .await
5955 .unwrap()
5956 .is_empty(),
5957 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5958 );
5959
5960 assert!(
5961 search(
5962 &project,
5963 SearchQuery::text(
5964 search_query,
5965 false,
5966 true,
5967 false,
5968 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5969 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5970 false,
5971 None,
5972 )
5973 .unwrap(),
5974 cx
5975 )
5976 .await
5977 .unwrap()
5978 .is_empty(),
5979 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5980 );
5981
5982 assert!(
5983 search(
5984 &project,
5985 SearchQuery::text(
5986 search_query,
5987 false,
5988 true,
5989 false,
5990 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5991 .unwrap(),
5992 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5993 .unwrap(),
5994 false,
5995 None,
5996 )
5997 .unwrap(),
5998 cx
5999 )
6000 .await
6001 .unwrap()
6002 .is_empty(),
6003 "Non-matching inclusions and exclusions should not change that."
6004 );
6005
6006 assert_eq!(
6007 search(
6008 &project,
6009 SearchQuery::text(
6010 search_query,
6011 false,
6012 true,
6013 false,
6014 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6015 .unwrap(),
6016 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6017 .unwrap(),
6018 false,
6019 None,
6020 )
6021 .unwrap(),
6022 cx
6023 )
6024 .await
6025 .unwrap(),
6026 HashMap::from_iter([
6027 (path!("dir/one.ts").to_string(), vec![14..18]),
6028 (path!("dir/two.ts").to_string(), vec![14..18]),
6029 ]),
6030 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6031 );
6032}
6033
6034#[gpui::test]
6035async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6036 init_test(cx);
6037
6038 let fs = FakeFs::new(cx.executor());
6039 fs.insert_tree(
6040 path!("/worktree-a"),
6041 json!({
6042 "haystack.rs": r#"// NEEDLE"#,
6043 "haystack.ts": r#"// NEEDLE"#,
6044 }),
6045 )
6046 .await;
6047 fs.insert_tree(
6048 path!("/worktree-b"),
6049 json!({
6050 "haystack.rs": r#"// NEEDLE"#,
6051 "haystack.ts": r#"// NEEDLE"#,
6052 }),
6053 )
6054 .await;
6055
6056 let path_style = PathStyle::local();
6057 let project = Project::test(
6058 fs.clone(),
6059 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6060 cx,
6061 )
6062 .await;
6063
6064 assert_eq!(
6065 search(
6066 &project,
6067 SearchQuery::text(
6068 "NEEDLE",
6069 false,
6070 true,
6071 false,
6072 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6073 Default::default(),
6074 true,
6075 None,
6076 )
6077 .unwrap(),
6078 cx
6079 )
6080 .await
6081 .unwrap(),
6082 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6083 "should only return results from included worktree"
6084 );
6085 assert_eq!(
6086 search(
6087 &project,
6088 SearchQuery::text(
6089 "NEEDLE",
6090 false,
6091 true,
6092 false,
6093 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6094 Default::default(),
6095 true,
6096 None,
6097 )
6098 .unwrap(),
6099 cx
6100 )
6101 .await
6102 .unwrap(),
6103 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6104 "should only return results from included worktree"
6105 );
6106
6107 assert_eq!(
6108 search(
6109 &project,
6110 SearchQuery::text(
6111 "NEEDLE",
6112 false,
6113 true,
6114 false,
6115 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6116 Default::default(),
6117 false,
6118 None,
6119 )
6120 .unwrap(),
6121 cx
6122 )
6123 .await
6124 .unwrap(),
6125 HashMap::from_iter([
6126 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6127 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6128 ]),
6129 "should return results from both worktrees"
6130 );
6131}
6132
6133#[gpui::test]
6134async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6135 init_test(cx);
6136
6137 let fs = FakeFs::new(cx.background_executor.clone());
6138 fs.insert_tree(
6139 path!("/dir"),
6140 json!({
6141 ".git": {},
6142 ".gitignore": "**/target\n/node_modules\n",
6143 "target": {
6144 "index.txt": "index_key:index_value"
6145 },
6146 "node_modules": {
6147 "eslint": {
6148 "index.ts": "const eslint_key = 'eslint value'",
6149 "package.json": r#"{ "some_key": "some value" }"#,
6150 },
6151 "prettier": {
6152 "index.ts": "const prettier_key = 'prettier value'",
6153 "package.json": r#"{ "other_key": "other value" }"#,
6154 },
6155 },
6156 "package.json": r#"{ "main_key": "main value" }"#,
6157 }),
6158 )
6159 .await;
6160 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6161
6162 let query = "key";
6163 assert_eq!(
6164 search(
6165 &project,
6166 SearchQuery::text(
6167 query,
6168 false,
6169 false,
6170 false,
6171 Default::default(),
6172 Default::default(),
6173 false,
6174 None,
6175 )
6176 .unwrap(),
6177 cx
6178 )
6179 .await
6180 .unwrap(),
6181 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6182 "Only one non-ignored file should have the query"
6183 );
6184
6185 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6186 let path_style = PathStyle::local();
6187 assert_eq!(
6188 search(
6189 &project,
6190 SearchQuery::text(
6191 query,
6192 false,
6193 false,
6194 true,
6195 Default::default(),
6196 Default::default(),
6197 false,
6198 None,
6199 )
6200 .unwrap(),
6201 cx
6202 )
6203 .await
6204 .unwrap(),
6205 HashMap::from_iter([
6206 (path!("dir/package.json").to_string(), vec![8..11]),
6207 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6208 (
6209 path!("dir/node_modules/prettier/package.json").to_string(),
6210 vec![9..12]
6211 ),
6212 (
6213 path!("dir/node_modules/prettier/index.ts").to_string(),
6214 vec![15..18]
6215 ),
6216 (
6217 path!("dir/node_modules/eslint/index.ts").to_string(),
6218 vec![13..16]
6219 ),
6220 (
6221 path!("dir/node_modules/eslint/package.json").to_string(),
6222 vec![8..11]
6223 ),
6224 ]),
6225 "Unrestricted search with ignored directories should find every file with the query"
6226 );
6227
6228 let files_to_include =
6229 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6230 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6231 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6232 assert_eq!(
6233 search(
6234 &project,
6235 SearchQuery::text(
6236 query,
6237 false,
6238 false,
6239 true,
6240 files_to_include,
6241 files_to_exclude,
6242 false,
6243 None,
6244 )
6245 .unwrap(),
6246 cx
6247 )
6248 .await
6249 .unwrap(),
6250 HashMap::from_iter([(
6251 path!("dir/node_modules/prettier/package.json").to_string(),
6252 vec![9..12]
6253 )]),
6254 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6255 );
6256}
6257
6258#[gpui::test]
6259async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6260 init_test(cx);
6261
6262 let fs = FakeFs::new(cx.executor());
6263 fs.insert_tree(
6264 path!("/dir"),
6265 json!({
6266 "one.rs": "// ПРИВЕТ? привет!",
6267 "two.rs": "// ПРИВЕТ.",
6268 "three.rs": "// привет",
6269 }),
6270 )
6271 .await;
6272 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6273 let unicode_case_sensitive_query = SearchQuery::text(
6274 "привет",
6275 false,
6276 true,
6277 false,
6278 Default::default(),
6279 Default::default(),
6280 false,
6281 None,
6282 );
6283 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6284 assert_eq!(
6285 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6286 .await
6287 .unwrap(),
6288 HashMap::from_iter([
6289 (path!("dir/one.rs").to_string(), vec![17..29]),
6290 (path!("dir/three.rs").to_string(), vec![3..15]),
6291 ])
6292 );
6293
6294 let unicode_case_insensitive_query = SearchQuery::text(
6295 "привет",
6296 false,
6297 false,
6298 false,
6299 Default::default(),
6300 Default::default(),
6301 false,
6302 None,
6303 );
6304 assert_matches!(
6305 unicode_case_insensitive_query,
6306 Ok(SearchQuery::Regex { .. })
6307 );
6308 assert_eq!(
6309 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6310 .await
6311 .unwrap(),
6312 HashMap::from_iter([
6313 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6314 (path!("dir/two.rs").to_string(), vec![3..15]),
6315 (path!("dir/three.rs").to_string(), vec![3..15]),
6316 ])
6317 );
6318
6319 assert_eq!(
6320 search(
6321 &project,
6322 SearchQuery::text(
6323 "привет.",
6324 false,
6325 false,
6326 false,
6327 Default::default(),
6328 Default::default(),
6329 false,
6330 None,
6331 )
6332 .unwrap(),
6333 cx
6334 )
6335 .await
6336 .unwrap(),
6337 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6338 );
6339}
6340
6341#[gpui::test]
6342async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6343 init_test(cx);
6344
6345 let fs = FakeFs::new(cx.executor());
6346 fs.insert_tree(
6347 "/one/two",
6348 json!({
6349 "three": {
6350 "a.txt": "",
6351 "four": {}
6352 },
6353 "c.rs": ""
6354 }),
6355 )
6356 .await;
6357
6358 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6359 project
6360 .update(cx, |project, cx| {
6361 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6362 project.create_entry((id, rel_path("b..")), true, cx)
6363 })
6364 .await
6365 .unwrap()
6366 .into_included()
6367 .unwrap();
6368
6369 assert_eq!(
6370 fs.paths(true),
6371 vec![
6372 PathBuf::from(path!("/")),
6373 PathBuf::from(path!("/one")),
6374 PathBuf::from(path!("/one/two")),
6375 PathBuf::from(path!("/one/two/c.rs")),
6376 PathBuf::from(path!("/one/two/three")),
6377 PathBuf::from(path!("/one/two/three/a.txt")),
6378 PathBuf::from(path!("/one/two/three/b..")),
6379 PathBuf::from(path!("/one/two/three/four")),
6380 ]
6381 );
6382}
6383
6384#[gpui::test]
6385async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6386 init_test(cx);
6387
6388 let fs = FakeFs::new(cx.executor());
6389 fs.insert_tree(
6390 path!("/dir"),
6391 json!({
6392 "a.tsx": "a",
6393 }),
6394 )
6395 .await;
6396
6397 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6398
6399 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6400 language_registry.add(tsx_lang());
6401 let language_server_names = [
6402 "TypeScriptServer",
6403 "TailwindServer",
6404 "ESLintServer",
6405 "NoHoverCapabilitiesServer",
6406 ];
6407 let mut language_servers = [
6408 language_registry.register_fake_lsp(
6409 "tsx",
6410 FakeLspAdapter {
6411 name: language_server_names[0],
6412 capabilities: lsp::ServerCapabilities {
6413 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6414 ..lsp::ServerCapabilities::default()
6415 },
6416 ..FakeLspAdapter::default()
6417 },
6418 ),
6419 language_registry.register_fake_lsp(
6420 "tsx",
6421 FakeLspAdapter {
6422 name: language_server_names[1],
6423 capabilities: lsp::ServerCapabilities {
6424 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6425 ..lsp::ServerCapabilities::default()
6426 },
6427 ..FakeLspAdapter::default()
6428 },
6429 ),
6430 language_registry.register_fake_lsp(
6431 "tsx",
6432 FakeLspAdapter {
6433 name: language_server_names[2],
6434 capabilities: lsp::ServerCapabilities {
6435 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6436 ..lsp::ServerCapabilities::default()
6437 },
6438 ..FakeLspAdapter::default()
6439 },
6440 ),
6441 language_registry.register_fake_lsp(
6442 "tsx",
6443 FakeLspAdapter {
6444 name: language_server_names[3],
6445 capabilities: lsp::ServerCapabilities {
6446 hover_provider: None,
6447 ..lsp::ServerCapabilities::default()
6448 },
6449 ..FakeLspAdapter::default()
6450 },
6451 ),
6452 ];
6453
6454 let (buffer, _handle) = project
6455 .update(cx, |p, cx| {
6456 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6457 })
6458 .await
6459 .unwrap();
6460 cx.executor().run_until_parked();
6461
6462 let mut servers_with_hover_requests = HashMap::default();
6463 for i in 0..language_server_names.len() {
6464 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6465 panic!(
6466 "Failed to get language server #{i} with name {}",
6467 &language_server_names[i]
6468 )
6469 });
6470 let new_server_name = new_server.server.name();
6471 assert!(
6472 !servers_with_hover_requests.contains_key(&new_server_name),
6473 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6474 );
6475 match new_server_name.as_ref() {
6476 "TailwindServer" | "TypeScriptServer" => {
6477 servers_with_hover_requests.insert(
6478 new_server_name.clone(),
6479 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6480 move |_, _| {
6481 let name = new_server_name.clone();
6482 async move {
6483 Ok(Some(lsp::Hover {
6484 contents: lsp::HoverContents::Scalar(
6485 lsp::MarkedString::String(format!("{name} hover")),
6486 ),
6487 range: None,
6488 }))
6489 }
6490 },
6491 ),
6492 );
6493 }
6494 "ESLintServer" => {
6495 servers_with_hover_requests.insert(
6496 new_server_name,
6497 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6498 |_, _| async move { Ok(None) },
6499 ),
6500 );
6501 }
6502 "NoHoverCapabilitiesServer" => {
6503 let _never_handled = new_server
6504 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6505 panic!(
6506 "Should not call for hovers server with no corresponding capabilities"
6507 )
6508 });
6509 }
6510 unexpected => panic!("Unexpected server name: {unexpected}"),
6511 }
6512 }
6513
6514 let hover_task = project.update(cx, |project, cx| {
6515 project.hover(&buffer, Point::new(0, 0), cx)
6516 });
6517 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6518 |mut hover_request| async move {
6519 hover_request
6520 .next()
6521 .await
6522 .expect("All hover requests should have been triggered")
6523 },
6524 ))
6525 .await;
6526 assert_eq!(
6527 vec!["TailwindServer hover", "TypeScriptServer hover"],
6528 hover_task
6529 .await
6530 .into_iter()
6531 .flatten()
6532 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6533 .sorted()
6534 .collect::<Vec<_>>(),
6535 "Should receive hover responses from all related servers with hover capabilities"
6536 );
6537}
6538
6539#[gpui::test]
6540async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6541 init_test(cx);
6542
6543 let fs = FakeFs::new(cx.executor());
6544 fs.insert_tree(
6545 path!("/dir"),
6546 json!({
6547 "a.ts": "a",
6548 }),
6549 )
6550 .await;
6551
6552 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6553
6554 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6555 language_registry.add(typescript_lang());
6556 let mut fake_language_servers = language_registry.register_fake_lsp(
6557 "TypeScript",
6558 FakeLspAdapter {
6559 capabilities: lsp::ServerCapabilities {
6560 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6561 ..lsp::ServerCapabilities::default()
6562 },
6563 ..FakeLspAdapter::default()
6564 },
6565 );
6566
6567 let (buffer, _handle) = project
6568 .update(cx, |p, cx| {
6569 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6570 })
6571 .await
6572 .unwrap();
6573 cx.executor().run_until_parked();
6574
6575 let fake_server = fake_language_servers
6576 .next()
6577 .await
6578 .expect("failed to get the language server");
6579
6580 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6581 move |_, _| async move {
6582 Ok(Some(lsp::Hover {
6583 contents: lsp::HoverContents::Array(vec![
6584 lsp::MarkedString::String("".to_string()),
6585 lsp::MarkedString::String(" ".to_string()),
6586 lsp::MarkedString::String("\n\n\n".to_string()),
6587 ]),
6588 range: None,
6589 }))
6590 },
6591 );
6592
6593 let hover_task = project.update(cx, |project, cx| {
6594 project.hover(&buffer, Point::new(0, 0), cx)
6595 });
6596 let () = request_handled
6597 .next()
6598 .await
6599 .expect("All hover requests should have been triggered");
6600 assert_eq!(
6601 Vec::<String>::new(),
6602 hover_task
6603 .await
6604 .into_iter()
6605 .flatten()
6606 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6607 .sorted()
6608 .collect::<Vec<_>>(),
6609 "Empty hover parts should be ignored"
6610 );
6611}
6612
6613#[gpui::test]
6614async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6615 init_test(cx);
6616
6617 let fs = FakeFs::new(cx.executor());
6618 fs.insert_tree(
6619 path!("/dir"),
6620 json!({
6621 "a.ts": "a",
6622 }),
6623 )
6624 .await;
6625
6626 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6627
6628 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6629 language_registry.add(typescript_lang());
6630 let mut fake_language_servers = language_registry.register_fake_lsp(
6631 "TypeScript",
6632 FakeLspAdapter {
6633 capabilities: lsp::ServerCapabilities {
6634 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6635 ..lsp::ServerCapabilities::default()
6636 },
6637 ..FakeLspAdapter::default()
6638 },
6639 );
6640
6641 let (buffer, _handle) = project
6642 .update(cx, |p, cx| {
6643 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6644 })
6645 .await
6646 .unwrap();
6647 cx.executor().run_until_parked();
6648
6649 let fake_server = fake_language_servers
6650 .next()
6651 .await
6652 .expect("failed to get the language server");
6653
6654 let mut request_handled = fake_server
6655 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6656 Ok(Some(vec![
6657 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6658 title: "organize imports".to_string(),
6659 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6660 ..lsp::CodeAction::default()
6661 }),
6662 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6663 title: "fix code".to_string(),
6664 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6665 ..lsp::CodeAction::default()
6666 }),
6667 ]))
6668 });
6669
6670 let code_actions_task = project.update(cx, |project, cx| {
6671 project.code_actions(
6672 &buffer,
6673 0..buffer.read(cx).len(),
6674 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6675 cx,
6676 )
6677 });
6678
6679 let () = request_handled
6680 .next()
6681 .await
6682 .expect("The code action request should have been triggered");
6683
6684 let code_actions = code_actions_task.await.unwrap().unwrap();
6685 assert_eq!(code_actions.len(), 1);
6686 assert_eq!(
6687 code_actions[0].lsp_action.action_kind(),
6688 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6689 );
6690}
6691
6692#[gpui::test]
6693async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6694 init_test(cx);
6695
6696 let fs = FakeFs::new(cx.executor());
6697 fs.insert_tree(
6698 path!("/dir"),
6699 json!({
6700 "a.tsx": "a",
6701 }),
6702 )
6703 .await;
6704
6705 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6706
6707 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6708 language_registry.add(tsx_lang());
6709 let language_server_names = [
6710 "TypeScriptServer",
6711 "TailwindServer",
6712 "ESLintServer",
6713 "NoActionsCapabilitiesServer",
6714 ];
6715
6716 let mut language_server_rxs = [
6717 language_registry.register_fake_lsp(
6718 "tsx",
6719 FakeLspAdapter {
6720 name: language_server_names[0],
6721 capabilities: lsp::ServerCapabilities {
6722 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6723 ..lsp::ServerCapabilities::default()
6724 },
6725 ..FakeLspAdapter::default()
6726 },
6727 ),
6728 language_registry.register_fake_lsp(
6729 "tsx",
6730 FakeLspAdapter {
6731 name: language_server_names[1],
6732 capabilities: lsp::ServerCapabilities {
6733 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6734 ..lsp::ServerCapabilities::default()
6735 },
6736 ..FakeLspAdapter::default()
6737 },
6738 ),
6739 language_registry.register_fake_lsp(
6740 "tsx",
6741 FakeLspAdapter {
6742 name: language_server_names[2],
6743 capabilities: lsp::ServerCapabilities {
6744 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6745 ..lsp::ServerCapabilities::default()
6746 },
6747 ..FakeLspAdapter::default()
6748 },
6749 ),
6750 language_registry.register_fake_lsp(
6751 "tsx",
6752 FakeLspAdapter {
6753 name: language_server_names[3],
6754 capabilities: lsp::ServerCapabilities {
6755 code_action_provider: None,
6756 ..lsp::ServerCapabilities::default()
6757 },
6758 ..FakeLspAdapter::default()
6759 },
6760 ),
6761 ];
6762
6763 let (buffer, _handle) = project
6764 .update(cx, |p, cx| {
6765 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6766 })
6767 .await
6768 .unwrap();
6769 cx.executor().run_until_parked();
6770
6771 let mut servers_with_actions_requests = HashMap::default();
6772 for i in 0..language_server_names.len() {
6773 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6774 panic!(
6775 "Failed to get language server #{i} with name {}",
6776 &language_server_names[i]
6777 )
6778 });
6779 let new_server_name = new_server.server.name();
6780
6781 assert!(
6782 !servers_with_actions_requests.contains_key(&new_server_name),
6783 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6784 );
6785 match new_server_name.0.as_ref() {
6786 "TailwindServer" | "TypeScriptServer" => {
6787 servers_with_actions_requests.insert(
6788 new_server_name.clone(),
6789 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6790 move |_, _| {
6791 let name = new_server_name.clone();
6792 async move {
6793 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6794 lsp::CodeAction {
6795 title: format!("{name} code action"),
6796 ..lsp::CodeAction::default()
6797 },
6798 )]))
6799 }
6800 },
6801 ),
6802 );
6803 }
6804 "ESLintServer" => {
6805 servers_with_actions_requests.insert(
6806 new_server_name,
6807 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6808 |_, _| async move { Ok(None) },
6809 ),
6810 );
6811 }
6812 "NoActionsCapabilitiesServer" => {
6813 let _never_handled = new_server
6814 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6815 panic!(
6816 "Should not call for code actions server with no corresponding capabilities"
6817 )
6818 });
6819 }
6820 unexpected => panic!("Unexpected server name: {unexpected}"),
6821 }
6822 }
6823
6824 let code_actions_task = project.update(cx, |project, cx| {
6825 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6826 });
6827
6828 // cx.run_until_parked();
6829 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6830 |mut code_actions_request| async move {
6831 code_actions_request
6832 .next()
6833 .await
6834 .expect("All code actions requests should have been triggered")
6835 },
6836 ))
6837 .await;
6838 assert_eq!(
6839 vec!["TailwindServer code action", "TypeScriptServer code action"],
6840 code_actions_task
6841 .await
6842 .unwrap()
6843 .unwrap()
6844 .into_iter()
6845 .map(|code_action| code_action.lsp_action.title().to_owned())
6846 .sorted()
6847 .collect::<Vec<_>>(),
6848 "Should receive code actions responses from all related servers with hover capabilities"
6849 );
6850}
6851
6852#[gpui::test]
6853async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6854 init_test(cx);
6855
6856 let fs = FakeFs::new(cx.executor());
6857 fs.insert_tree(
6858 "/dir",
6859 json!({
6860 "a.rs": "let a = 1;",
6861 "b.rs": "let b = 2;",
6862 "c.rs": "let c = 2;",
6863 }),
6864 )
6865 .await;
6866
6867 let project = Project::test(
6868 fs,
6869 [
6870 "/dir/a.rs".as_ref(),
6871 "/dir/b.rs".as_ref(),
6872 "/dir/c.rs".as_ref(),
6873 ],
6874 cx,
6875 )
6876 .await;
6877
6878 // check the initial state and get the worktrees
6879 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6880 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6881 assert_eq!(worktrees.len(), 3);
6882
6883 let worktree_a = worktrees[0].read(cx);
6884 let worktree_b = worktrees[1].read(cx);
6885 let worktree_c = worktrees[2].read(cx);
6886
6887 // check they start in the right order
6888 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6889 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6890 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6891
6892 (
6893 worktrees[0].clone(),
6894 worktrees[1].clone(),
6895 worktrees[2].clone(),
6896 )
6897 });
6898
6899 // move first worktree to after the second
6900 // [a, b, c] -> [b, a, c]
6901 project
6902 .update(cx, |project, cx| {
6903 let first = worktree_a.read(cx);
6904 let second = worktree_b.read(cx);
6905 project.move_worktree(first.id(), second.id(), cx)
6906 })
6907 .expect("moving first after second");
6908
6909 // check the state after moving
6910 project.update(cx, |project, cx| {
6911 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6912 assert_eq!(worktrees.len(), 3);
6913
6914 let first = worktrees[0].read(cx);
6915 let second = worktrees[1].read(cx);
6916 let third = worktrees[2].read(cx);
6917
6918 // check they are now in the right order
6919 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6920 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6921 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6922 });
6923
6924 // move the second worktree to before the first
6925 // [b, a, c] -> [a, b, c]
6926 project
6927 .update(cx, |project, cx| {
6928 let second = worktree_a.read(cx);
6929 let first = worktree_b.read(cx);
6930 project.move_worktree(first.id(), second.id(), cx)
6931 })
6932 .expect("moving second before first");
6933
6934 // check the state after moving
6935 project.update(cx, |project, cx| {
6936 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6937 assert_eq!(worktrees.len(), 3);
6938
6939 let first = worktrees[0].read(cx);
6940 let second = worktrees[1].read(cx);
6941 let third = worktrees[2].read(cx);
6942
6943 // check they are now in the right order
6944 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6945 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6946 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6947 });
6948
6949 // move the second worktree to after the third
6950 // [a, b, c] -> [a, c, b]
6951 project
6952 .update(cx, |project, cx| {
6953 let second = worktree_b.read(cx);
6954 let third = worktree_c.read(cx);
6955 project.move_worktree(second.id(), third.id(), cx)
6956 })
6957 .expect("moving second after third");
6958
6959 // check the state after moving
6960 project.update(cx, |project, cx| {
6961 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6962 assert_eq!(worktrees.len(), 3);
6963
6964 let first = worktrees[0].read(cx);
6965 let second = worktrees[1].read(cx);
6966 let third = worktrees[2].read(cx);
6967
6968 // check they are now in the right order
6969 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6970 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6971 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6972 });
6973
6974 // move the third worktree to before the second
6975 // [a, c, b] -> [a, b, c]
6976 project
6977 .update(cx, |project, cx| {
6978 let third = worktree_c.read(cx);
6979 let second = worktree_b.read(cx);
6980 project.move_worktree(third.id(), second.id(), cx)
6981 })
6982 .expect("moving third before second");
6983
6984 // check the state after moving
6985 project.update(cx, |project, cx| {
6986 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6987 assert_eq!(worktrees.len(), 3);
6988
6989 let first = worktrees[0].read(cx);
6990 let second = worktrees[1].read(cx);
6991 let third = worktrees[2].read(cx);
6992
6993 // check they are now in the right order
6994 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6995 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6996 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6997 });
6998
6999 // move the first worktree to after the third
7000 // [a, b, c] -> [b, c, a]
7001 project
7002 .update(cx, |project, cx| {
7003 let first = worktree_a.read(cx);
7004 let third = worktree_c.read(cx);
7005 project.move_worktree(first.id(), third.id(), cx)
7006 })
7007 .expect("moving first after third");
7008
7009 // check the state after moving
7010 project.update(cx, |project, cx| {
7011 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7012 assert_eq!(worktrees.len(), 3);
7013
7014 let first = worktrees[0].read(cx);
7015 let second = worktrees[1].read(cx);
7016 let third = worktrees[2].read(cx);
7017
7018 // check they are now in the right order
7019 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7020 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7021 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7022 });
7023
7024 // move the third worktree to before the first
7025 // [b, c, a] -> [a, b, c]
7026 project
7027 .update(cx, |project, cx| {
7028 let third = worktree_a.read(cx);
7029 let first = worktree_b.read(cx);
7030 project.move_worktree(third.id(), first.id(), cx)
7031 })
7032 .expect("moving third before first");
7033
7034 // check the state after moving
7035 project.update(cx, |project, cx| {
7036 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7037 assert_eq!(worktrees.len(), 3);
7038
7039 let first = worktrees[0].read(cx);
7040 let second = worktrees[1].read(cx);
7041 let third = worktrees[2].read(cx);
7042
7043 // check they are now in the right order
7044 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7045 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7046 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7047 });
7048}
7049
7050#[gpui::test]
7051async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7052 init_test(cx);
7053
7054 let staged_contents = r#"
7055 fn main() {
7056 println!("hello world");
7057 }
7058 "#
7059 .unindent();
7060 let file_contents = r#"
7061 // print goodbye
7062 fn main() {
7063 println!("goodbye world");
7064 }
7065 "#
7066 .unindent();
7067
7068 let fs = FakeFs::new(cx.background_executor.clone());
7069 fs.insert_tree(
7070 "/dir",
7071 json!({
7072 ".git": {},
7073 "src": {
7074 "main.rs": file_contents,
7075 }
7076 }),
7077 )
7078 .await;
7079
7080 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7081
7082 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7083
7084 let buffer = project
7085 .update(cx, |project, cx| {
7086 project.open_local_buffer("/dir/src/main.rs", cx)
7087 })
7088 .await
7089 .unwrap();
7090 let unstaged_diff = project
7091 .update(cx, |project, cx| {
7092 project.open_unstaged_diff(buffer.clone(), cx)
7093 })
7094 .await
7095 .unwrap();
7096
7097 cx.run_until_parked();
7098 unstaged_diff.update(cx, |unstaged_diff, cx| {
7099 let snapshot = buffer.read(cx).snapshot();
7100 assert_hunks(
7101 unstaged_diff.hunks(&snapshot, cx),
7102 &snapshot,
7103 &unstaged_diff.base_text_string().unwrap(),
7104 &[
7105 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7106 (
7107 2..3,
7108 " println!(\"hello world\");\n",
7109 " println!(\"goodbye world\");\n",
7110 DiffHunkStatus::modified_none(),
7111 ),
7112 ],
7113 );
7114 });
7115
7116 let staged_contents = r#"
7117 // print goodbye
7118 fn main() {
7119 }
7120 "#
7121 .unindent();
7122
7123 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7124
7125 cx.run_until_parked();
7126 unstaged_diff.update(cx, |unstaged_diff, cx| {
7127 let snapshot = buffer.read(cx).snapshot();
7128 assert_hunks(
7129 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7130 &snapshot,
7131 &unstaged_diff.base_text().text(),
7132 &[(
7133 2..3,
7134 "",
7135 " println!(\"goodbye world\");\n",
7136 DiffHunkStatus::added_none(),
7137 )],
7138 );
7139 });
7140}
7141
7142#[gpui::test]
7143async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7144 init_test(cx);
7145
7146 let committed_contents = r#"
7147 fn main() {
7148 println!("hello world");
7149 }
7150 "#
7151 .unindent();
7152 let staged_contents = r#"
7153 fn main() {
7154 println!("goodbye world");
7155 }
7156 "#
7157 .unindent();
7158 let file_contents = r#"
7159 // print goodbye
7160 fn main() {
7161 println!("goodbye world");
7162 }
7163 "#
7164 .unindent();
7165
7166 let fs = FakeFs::new(cx.background_executor.clone());
7167 fs.insert_tree(
7168 "/dir",
7169 json!({
7170 ".git": {},
7171 "src": {
7172 "modification.rs": file_contents,
7173 }
7174 }),
7175 )
7176 .await;
7177
7178 fs.set_head_for_repo(
7179 Path::new("/dir/.git"),
7180 &[
7181 ("src/modification.rs", committed_contents),
7182 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7183 ],
7184 "deadbeef",
7185 );
7186 fs.set_index_for_repo(
7187 Path::new("/dir/.git"),
7188 &[
7189 ("src/modification.rs", staged_contents),
7190 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7191 ],
7192 );
7193
7194 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7196 let language = rust_lang();
7197 language_registry.add(language.clone());
7198
7199 let buffer_1 = project
7200 .update(cx, |project, cx| {
7201 project.open_local_buffer("/dir/src/modification.rs", cx)
7202 })
7203 .await
7204 .unwrap();
7205 let diff_1 = project
7206 .update(cx, |project, cx| {
7207 project.open_uncommitted_diff(buffer_1.clone(), cx)
7208 })
7209 .await
7210 .unwrap();
7211 diff_1.read_with(cx, |diff, _| {
7212 assert_eq!(diff.base_text().language().cloned(), Some(language))
7213 });
7214 cx.run_until_parked();
7215 diff_1.update(cx, |diff, cx| {
7216 let snapshot = buffer_1.read(cx).snapshot();
7217 assert_hunks(
7218 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7219 &snapshot,
7220 &diff.base_text_string().unwrap(),
7221 &[
7222 (
7223 0..1,
7224 "",
7225 "// print goodbye\n",
7226 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7227 ),
7228 (
7229 2..3,
7230 " println!(\"hello world\");\n",
7231 " println!(\"goodbye world\");\n",
7232 DiffHunkStatus::modified_none(),
7233 ),
7234 ],
7235 );
7236 });
7237
7238 // Reset HEAD to a version that differs from both the buffer and the index.
7239 let committed_contents = r#"
7240 // print goodbye
7241 fn main() {
7242 }
7243 "#
7244 .unindent();
7245 fs.set_head_for_repo(
7246 Path::new("/dir/.git"),
7247 &[
7248 ("src/modification.rs", committed_contents.clone()),
7249 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7250 ],
7251 "deadbeef",
7252 );
7253
7254 // Buffer now has an unstaged hunk.
7255 cx.run_until_parked();
7256 diff_1.update(cx, |diff, cx| {
7257 let snapshot = buffer_1.read(cx).snapshot();
7258 assert_hunks(
7259 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7260 &snapshot,
7261 &diff.base_text().text(),
7262 &[(
7263 2..3,
7264 "",
7265 " println!(\"goodbye world\");\n",
7266 DiffHunkStatus::added_none(),
7267 )],
7268 );
7269 });
7270
7271 // Open a buffer for a file that's been deleted.
7272 let buffer_2 = project
7273 .update(cx, |project, cx| {
7274 project.open_local_buffer("/dir/src/deletion.rs", cx)
7275 })
7276 .await
7277 .unwrap();
7278 let diff_2 = project
7279 .update(cx, |project, cx| {
7280 project.open_uncommitted_diff(buffer_2.clone(), cx)
7281 })
7282 .await
7283 .unwrap();
7284 cx.run_until_parked();
7285 diff_2.update(cx, |diff, cx| {
7286 let snapshot = buffer_2.read(cx).snapshot();
7287 assert_hunks(
7288 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7289 &snapshot,
7290 &diff.base_text_string().unwrap(),
7291 &[(
7292 0..0,
7293 "// the-deleted-contents\n",
7294 "",
7295 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7296 )],
7297 );
7298 });
7299
7300 // Stage the deletion of this file
7301 fs.set_index_for_repo(
7302 Path::new("/dir/.git"),
7303 &[("src/modification.rs", committed_contents.clone())],
7304 );
7305 cx.run_until_parked();
7306 diff_2.update(cx, |diff, cx| {
7307 let snapshot = buffer_2.read(cx).snapshot();
7308 assert_hunks(
7309 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7310 &snapshot,
7311 &diff.base_text_string().unwrap(),
7312 &[(
7313 0..0,
7314 "// the-deleted-contents\n",
7315 "",
7316 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7317 )],
7318 );
7319 });
7320}
7321
7322#[gpui::test]
7323async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7324 use DiffHunkSecondaryStatus::*;
7325 init_test(cx);
7326
7327 let committed_contents = r#"
7328 zero
7329 one
7330 two
7331 three
7332 four
7333 five
7334 "#
7335 .unindent();
7336 let file_contents = r#"
7337 one
7338 TWO
7339 three
7340 FOUR
7341 five
7342 "#
7343 .unindent();
7344
7345 let fs = FakeFs::new(cx.background_executor.clone());
7346 fs.insert_tree(
7347 "/dir",
7348 json!({
7349 ".git": {},
7350 "file.txt": file_contents.clone()
7351 }),
7352 )
7353 .await;
7354
7355 fs.set_head_and_index_for_repo(
7356 path!("/dir/.git").as_ref(),
7357 &[("file.txt", committed_contents.clone())],
7358 );
7359
7360 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7361
7362 let buffer = project
7363 .update(cx, |project, cx| {
7364 project.open_local_buffer("/dir/file.txt", cx)
7365 })
7366 .await
7367 .unwrap();
7368 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7369 let uncommitted_diff = project
7370 .update(cx, |project, cx| {
7371 project.open_uncommitted_diff(buffer.clone(), cx)
7372 })
7373 .await
7374 .unwrap();
7375 let mut diff_events = cx.events(&uncommitted_diff);
7376
7377 // The hunks are initially unstaged.
7378 uncommitted_diff.read_with(cx, |diff, cx| {
7379 assert_hunks(
7380 diff.hunks(&snapshot, cx),
7381 &snapshot,
7382 &diff.base_text_string().unwrap(),
7383 &[
7384 (
7385 0..0,
7386 "zero\n",
7387 "",
7388 DiffHunkStatus::deleted(HasSecondaryHunk),
7389 ),
7390 (
7391 1..2,
7392 "two\n",
7393 "TWO\n",
7394 DiffHunkStatus::modified(HasSecondaryHunk),
7395 ),
7396 (
7397 3..4,
7398 "four\n",
7399 "FOUR\n",
7400 DiffHunkStatus::modified(HasSecondaryHunk),
7401 ),
7402 ],
7403 );
7404 });
7405
7406 // Stage a hunk. It appears as optimistically staged.
7407 uncommitted_diff.update(cx, |diff, cx| {
7408 let range =
7409 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7410 let hunks = diff
7411 .hunks_intersecting_range(range, &snapshot, cx)
7412 .collect::<Vec<_>>();
7413 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7414
7415 assert_hunks(
7416 diff.hunks(&snapshot, cx),
7417 &snapshot,
7418 &diff.base_text_string().unwrap(),
7419 &[
7420 (
7421 0..0,
7422 "zero\n",
7423 "",
7424 DiffHunkStatus::deleted(HasSecondaryHunk),
7425 ),
7426 (
7427 1..2,
7428 "two\n",
7429 "TWO\n",
7430 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7431 ),
7432 (
7433 3..4,
7434 "four\n",
7435 "FOUR\n",
7436 DiffHunkStatus::modified(HasSecondaryHunk),
7437 ),
7438 ],
7439 );
7440 });
7441
7442 // The diff emits a change event for the range of the staged hunk.
7443 assert!(matches!(
7444 diff_events.next().await.unwrap(),
7445 BufferDiffEvent::HunksStagedOrUnstaged(_)
7446 ));
7447 let event = diff_events.next().await.unwrap();
7448 if let BufferDiffEvent::DiffChanged {
7449 changed_range: Some(changed_range),
7450 } = event
7451 {
7452 let changed_range = changed_range.to_point(&snapshot);
7453 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7454 } else {
7455 panic!("Unexpected event {event:?}");
7456 }
7457
7458 // When the write to the index completes, it appears as staged.
7459 cx.run_until_parked();
7460 uncommitted_diff.update(cx, |diff, cx| {
7461 assert_hunks(
7462 diff.hunks(&snapshot, cx),
7463 &snapshot,
7464 &diff.base_text_string().unwrap(),
7465 &[
7466 (
7467 0..0,
7468 "zero\n",
7469 "",
7470 DiffHunkStatus::deleted(HasSecondaryHunk),
7471 ),
7472 (
7473 1..2,
7474 "two\n",
7475 "TWO\n",
7476 DiffHunkStatus::modified(NoSecondaryHunk),
7477 ),
7478 (
7479 3..4,
7480 "four\n",
7481 "FOUR\n",
7482 DiffHunkStatus::modified(HasSecondaryHunk),
7483 ),
7484 ],
7485 );
7486 });
7487
7488 // The diff emits a change event for the changed index text.
7489 let event = diff_events.next().await.unwrap();
7490 if let BufferDiffEvent::DiffChanged {
7491 changed_range: Some(changed_range),
7492 } = event
7493 {
7494 let changed_range = changed_range.to_point(&snapshot);
7495 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7496 } else {
7497 panic!("Unexpected event {event:?}");
7498 }
7499
7500 // Simulate a problem writing to the git index.
7501 fs.set_error_message_for_index_write(
7502 "/dir/.git".as_ref(),
7503 Some("failed to write git index".into()),
7504 );
7505
7506 // Stage another hunk.
7507 uncommitted_diff.update(cx, |diff, cx| {
7508 let range =
7509 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7510 let hunks = diff
7511 .hunks_intersecting_range(range, &snapshot, cx)
7512 .collect::<Vec<_>>();
7513 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7514
7515 assert_hunks(
7516 diff.hunks(&snapshot, cx),
7517 &snapshot,
7518 &diff.base_text_string().unwrap(),
7519 &[
7520 (
7521 0..0,
7522 "zero\n",
7523 "",
7524 DiffHunkStatus::deleted(HasSecondaryHunk),
7525 ),
7526 (
7527 1..2,
7528 "two\n",
7529 "TWO\n",
7530 DiffHunkStatus::modified(NoSecondaryHunk),
7531 ),
7532 (
7533 3..4,
7534 "four\n",
7535 "FOUR\n",
7536 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7537 ),
7538 ],
7539 );
7540 });
7541 assert!(matches!(
7542 diff_events.next().await.unwrap(),
7543 BufferDiffEvent::HunksStagedOrUnstaged(_)
7544 ));
7545 let event = diff_events.next().await.unwrap();
7546 if let BufferDiffEvent::DiffChanged {
7547 changed_range: Some(changed_range),
7548 } = event
7549 {
7550 let changed_range = changed_range.to_point(&snapshot);
7551 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7552 } else {
7553 panic!("Unexpected event {event:?}");
7554 }
7555
7556 // When the write fails, the hunk returns to being unstaged.
7557 cx.run_until_parked();
7558 uncommitted_diff.update(cx, |diff, cx| {
7559 assert_hunks(
7560 diff.hunks(&snapshot, cx),
7561 &snapshot,
7562 &diff.base_text_string().unwrap(),
7563 &[
7564 (
7565 0..0,
7566 "zero\n",
7567 "",
7568 DiffHunkStatus::deleted(HasSecondaryHunk),
7569 ),
7570 (
7571 1..2,
7572 "two\n",
7573 "TWO\n",
7574 DiffHunkStatus::modified(NoSecondaryHunk),
7575 ),
7576 (
7577 3..4,
7578 "four\n",
7579 "FOUR\n",
7580 DiffHunkStatus::modified(HasSecondaryHunk),
7581 ),
7582 ],
7583 );
7584 });
7585
7586 let event = diff_events.next().await.unwrap();
7587 if let BufferDiffEvent::DiffChanged {
7588 changed_range: Some(changed_range),
7589 } = event
7590 {
7591 let changed_range = changed_range.to_point(&snapshot);
7592 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7593 } else {
7594 panic!("Unexpected event {event:?}");
7595 }
7596
7597 // Allow writing to the git index to succeed again.
7598 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7599
7600 // Stage two hunks with separate operations.
7601 uncommitted_diff.update(cx, |diff, cx| {
7602 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7603 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7604 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7605 });
7606
7607 // Both staged hunks appear as pending.
7608 uncommitted_diff.update(cx, |diff, cx| {
7609 assert_hunks(
7610 diff.hunks(&snapshot, cx),
7611 &snapshot,
7612 &diff.base_text_string().unwrap(),
7613 &[
7614 (
7615 0..0,
7616 "zero\n",
7617 "",
7618 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7619 ),
7620 (
7621 1..2,
7622 "two\n",
7623 "TWO\n",
7624 DiffHunkStatus::modified(NoSecondaryHunk),
7625 ),
7626 (
7627 3..4,
7628 "four\n",
7629 "FOUR\n",
7630 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7631 ),
7632 ],
7633 );
7634 });
7635
7636 // Both staging operations take effect.
7637 cx.run_until_parked();
7638 uncommitted_diff.update(cx, |diff, cx| {
7639 assert_hunks(
7640 diff.hunks(&snapshot, cx),
7641 &snapshot,
7642 &diff.base_text_string().unwrap(),
7643 &[
7644 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7645 (
7646 1..2,
7647 "two\n",
7648 "TWO\n",
7649 DiffHunkStatus::modified(NoSecondaryHunk),
7650 ),
7651 (
7652 3..4,
7653 "four\n",
7654 "FOUR\n",
7655 DiffHunkStatus::modified(NoSecondaryHunk),
7656 ),
7657 ],
7658 );
7659 });
7660}
7661
7662#[gpui::test(seeds(340, 472))]
7663async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7664 use DiffHunkSecondaryStatus::*;
7665 init_test(cx);
7666
7667 let committed_contents = r#"
7668 zero
7669 one
7670 two
7671 three
7672 four
7673 five
7674 "#
7675 .unindent();
7676 let file_contents = r#"
7677 one
7678 TWO
7679 three
7680 FOUR
7681 five
7682 "#
7683 .unindent();
7684
7685 let fs = FakeFs::new(cx.background_executor.clone());
7686 fs.insert_tree(
7687 "/dir",
7688 json!({
7689 ".git": {},
7690 "file.txt": file_contents.clone()
7691 }),
7692 )
7693 .await;
7694
7695 fs.set_head_for_repo(
7696 "/dir/.git".as_ref(),
7697 &[("file.txt", committed_contents.clone())],
7698 "deadbeef",
7699 );
7700 fs.set_index_for_repo(
7701 "/dir/.git".as_ref(),
7702 &[("file.txt", committed_contents.clone())],
7703 );
7704
7705 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7706
7707 let buffer = project
7708 .update(cx, |project, cx| {
7709 project.open_local_buffer("/dir/file.txt", cx)
7710 })
7711 .await
7712 .unwrap();
7713 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7714 let uncommitted_diff = project
7715 .update(cx, |project, cx| {
7716 project.open_uncommitted_diff(buffer.clone(), cx)
7717 })
7718 .await
7719 .unwrap();
7720
7721 // The hunks are initially unstaged.
7722 uncommitted_diff.read_with(cx, |diff, cx| {
7723 assert_hunks(
7724 diff.hunks(&snapshot, cx),
7725 &snapshot,
7726 &diff.base_text_string().unwrap(),
7727 &[
7728 (
7729 0..0,
7730 "zero\n",
7731 "",
7732 DiffHunkStatus::deleted(HasSecondaryHunk),
7733 ),
7734 (
7735 1..2,
7736 "two\n",
7737 "TWO\n",
7738 DiffHunkStatus::modified(HasSecondaryHunk),
7739 ),
7740 (
7741 3..4,
7742 "four\n",
7743 "FOUR\n",
7744 DiffHunkStatus::modified(HasSecondaryHunk),
7745 ),
7746 ],
7747 );
7748 });
7749
7750 // Pause IO events
7751 fs.pause_events();
7752
7753 // Stage the first hunk.
7754 uncommitted_diff.update(cx, |diff, cx| {
7755 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7756 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7757 assert_hunks(
7758 diff.hunks(&snapshot, cx),
7759 &snapshot,
7760 &diff.base_text_string().unwrap(),
7761 &[
7762 (
7763 0..0,
7764 "zero\n",
7765 "",
7766 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7767 ),
7768 (
7769 1..2,
7770 "two\n",
7771 "TWO\n",
7772 DiffHunkStatus::modified(HasSecondaryHunk),
7773 ),
7774 (
7775 3..4,
7776 "four\n",
7777 "FOUR\n",
7778 DiffHunkStatus::modified(HasSecondaryHunk),
7779 ),
7780 ],
7781 );
7782 });
7783
7784 // Stage the second hunk *before* receiving the FS event for the first hunk.
7785 cx.run_until_parked();
7786 uncommitted_diff.update(cx, |diff, cx| {
7787 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7788 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7789 assert_hunks(
7790 diff.hunks(&snapshot, cx),
7791 &snapshot,
7792 &diff.base_text_string().unwrap(),
7793 &[
7794 (
7795 0..0,
7796 "zero\n",
7797 "",
7798 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7799 ),
7800 (
7801 1..2,
7802 "two\n",
7803 "TWO\n",
7804 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7805 ),
7806 (
7807 3..4,
7808 "four\n",
7809 "FOUR\n",
7810 DiffHunkStatus::modified(HasSecondaryHunk),
7811 ),
7812 ],
7813 );
7814 });
7815
7816 // Process the FS event for staging the first hunk (second event is still pending).
7817 fs.flush_events(1);
7818 cx.run_until_parked();
7819
7820 // Stage the third hunk before receiving the second FS event.
7821 uncommitted_diff.update(cx, |diff, cx| {
7822 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7823 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7824 });
7825
7826 // Wait for all remaining IO.
7827 cx.run_until_parked();
7828 fs.flush_events(fs.buffered_event_count());
7829
7830 // Now all hunks are staged.
7831 cx.run_until_parked();
7832 uncommitted_diff.update(cx, |diff, cx| {
7833 assert_hunks(
7834 diff.hunks(&snapshot, cx),
7835 &snapshot,
7836 &diff.base_text_string().unwrap(),
7837 &[
7838 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7839 (
7840 1..2,
7841 "two\n",
7842 "TWO\n",
7843 DiffHunkStatus::modified(NoSecondaryHunk),
7844 ),
7845 (
7846 3..4,
7847 "four\n",
7848 "FOUR\n",
7849 DiffHunkStatus::modified(NoSecondaryHunk),
7850 ),
7851 ],
7852 );
7853 });
7854}
7855
7856#[gpui::test(iterations = 25)]
7857async fn test_staging_random_hunks(
7858 mut rng: StdRng,
7859 executor: BackgroundExecutor,
7860 cx: &mut gpui::TestAppContext,
7861) {
7862 let operations = env::var("OPERATIONS")
7863 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7864 .unwrap_or(20);
7865
7866 // Try to induce races between diff recalculation and index writes.
7867 if rng.random_bool(0.5) {
7868 executor.deprioritize(*CALCULATE_DIFF_TASK);
7869 }
7870
7871 use DiffHunkSecondaryStatus::*;
7872 init_test(cx);
7873
7874 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7875 let index_text = committed_text.clone();
7876 let buffer_text = (0..30)
7877 .map(|i| match i % 5 {
7878 0 => format!("line {i} (modified)\n"),
7879 _ => format!("line {i}\n"),
7880 })
7881 .collect::<String>();
7882
7883 let fs = FakeFs::new(cx.background_executor.clone());
7884 fs.insert_tree(
7885 path!("/dir"),
7886 json!({
7887 ".git": {},
7888 "file.txt": buffer_text.clone()
7889 }),
7890 )
7891 .await;
7892 fs.set_head_for_repo(
7893 path!("/dir/.git").as_ref(),
7894 &[("file.txt", committed_text.clone())],
7895 "deadbeef",
7896 );
7897 fs.set_index_for_repo(
7898 path!("/dir/.git").as_ref(),
7899 &[("file.txt", index_text.clone())],
7900 );
7901 let repo = fs
7902 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7903 .unwrap();
7904
7905 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7906 let buffer = project
7907 .update(cx, |project, cx| {
7908 project.open_local_buffer(path!("/dir/file.txt"), cx)
7909 })
7910 .await
7911 .unwrap();
7912 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7913 let uncommitted_diff = project
7914 .update(cx, |project, cx| {
7915 project.open_uncommitted_diff(buffer.clone(), cx)
7916 })
7917 .await
7918 .unwrap();
7919
7920 let mut hunks =
7921 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7922 assert_eq!(hunks.len(), 6);
7923
7924 for _i in 0..operations {
7925 let hunk_ix = rng.random_range(0..hunks.len());
7926 let hunk = &mut hunks[hunk_ix];
7927 let row = hunk.range.start.row;
7928
7929 if hunk.status().has_secondary_hunk() {
7930 log::info!("staging hunk at {row}");
7931 uncommitted_diff.update(cx, |diff, cx| {
7932 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7933 });
7934 hunk.secondary_status = SecondaryHunkRemovalPending;
7935 } else {
7936 log::info!("unstaging hunk at {row}");
7937 uncommitted_diff.update(cx, |diff, cx| {
7938 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7939 });
7940 hunk.secondary_status = SecondaryHunkAdditionPending;
7941 }
7942
7943 for _ in 0..rng.random_range(0..10) {
7944 log::info!("yielding");
7945 cx.executor().simulate_random_delay().await;
7946 }
7947 }
7948
7949 cx.executor().run_until_parked();
7950
7951 for hunk in &mut hunks {
7952 if hunk.secondary_status == SecondaryHunkRemovalPending {
7953 hunk.secondary_status = NoSecondaryHunk;
7954 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7955 hunk.secondary_status = HasSecondaryHunk;
7956 }
7957 }
7958
7959 log::info!(
7960 "index text:\n{}",
7961 repo.load_index_text(rel_path("file.txt").into())
7962 .await
7963 .unwrap()
7964 );
7965
7966 uncommitted_diff.update(cx, |diff, cx| {
7967 let expected_hunks = hunks
7968 .iter()
7969 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7970 .collect::<Vec<_>>();
7971 let actual_hunks = diff
7972 .hunks(&snapshot, cx)
7973 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7974 .collect::<Vec<_>>();
7975 assert_eq!(actual_hunks, expected_hunks);
7976 });
7977}
7978
7979#[gpui::test]
7980async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7981 init_test(cx);
7982
7983 let committed_contents = r#"
7984 fn main() {
7985 println!("hello from HEAD");
7986 }
7987 "#
7988 .unindent();
7989 let file_contents = r#"
7990 fn main() {
7991 println!("hello from the working copy");
7992 }
7993 "#
7994 .unindent();
7995
7996 let fs = FakeFs::new(cx.background_executor.clone());
7997 fs.insert_tree(
7998 "/dir",
7999 json!({
8000 ".git": {},
8001 "src": {
8002 "main.rs": file_contents,
8003 }
8004 }),
8005 )
8006 .await;
8007
8008 fs.set_head_for_repo(
8009 Path::new("/dir/.git"),
8010 &[("src/main.rs", committed_contents.clone())],
8011 "deadbeef",
8012 );
8013 fs.set_index_for_repo(
8014 Path::new("/dir/.git"),
8015 &[("src/main.rs", committed_contents.clone())],
8016 );
8017
8018 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8019
8020 let buffer = project
8021 .update(cx, |project, cx| {
8022 project.open_local_buffer("/dir/src/main.rs", cx)
8023 })
8024 .await
8025 .unwrap();
8026 let uncommitted_diff = project
8027 .update(cx, |project, cx| {
8028 project.open_uncommitted_diff(buffer.clone(), cx)
8029 })
8030 .await
8031 .unwrap();
8032
8033 cx.run_until_parked();
8034 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8035 let snapshot = buffer.read(cx).snapshot();
8036 assert_hunks(
8037 uncommitted_diff.hunks(&snapshot, cx),
8038 &snapshot,
8039 &uncommitted_diff.base_text_string().unwrap(),
8040 &[(
8041 1..2,
8042 " println!(\"hello from HEAD\");\n",
8043 " println!(\"hello from the working copy\");\n",
8044 DiffHunkStatus {
8045 kind: DiffHunkStatusKind::Modified,
8046 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8047 },
8048 )],
8049 );
8050 });
8051}
8052
8053#[gpui::test]
8054async fn test_repository_and_path_for_project_path(
8055 background_executor: BackgroundExecutor,
8056 cx: &mut gpui::TestAppContext,
8057) {
8058 init_test(cx);
8059 let fs = FakeFs::new(background_executor);
8060 fs.insert_tree(
8061 path!("/root"),
8062 json!({
8063 "c.txt": "",
8064 "dir1": {
8065 ".git": {},
8066 "deps": {
8067 "dep1": {
8068 ".git": {},
8069 "src": {
8070 "a.txt": ""
8071 }
8072 }
8073 },
8074 "src": {
8075 "b.txt": ""
8076 }
8077 },
8078 }),
8079 )
8080 .await;
8081
8082 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8083 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8084 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8085 project
8086 .update(cx, |project, cx| project.git_scans_complete(cx))
8087 .await;
8088 cx.run_until_parked();
8089
8090 project.read_with(cx, |project, cx| {
8091 let git_store = project.git_store().read(cx);
8092 let pairs = [
8093 ("c.txt", None),
8094 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8095 (
8096 "dir1/deps/dep1/src/a.txt",
8097 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8098 ),
8099 ];
8100 let expected = pairs
8101 .iter()
8102 .map(|(path, result)| {
8103 (
8104 path,
8105 result.map(|(repo, repo_path)| {
8106 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8107 }),
8108 )
8109 })
8110 .collect::<Vec<_>>();
8111 let actual = pairs
8112 .iter()
8113 .map(|(path, _)| {
8114 let project_path = (tree_id, rel_path(path)).into();
8115 let result = maybe!({
8116 let (repo, repo_path) =
8117 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8118 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8119 });
8120 (path, result)
8121 })
8122 .collect::<Vec<_>>();
8123 pretty_assertions::assert_eq!(expected, actual);
8124 });
8125
8126 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8127 .await
8128 .unwrap();
8129 cx.run_until_parked();
8130
8131 project.read_with(cx, |project, cx| {
8132 let git_store = project.git_store().read(cx);
8133 assert_eq!(
8134 git_store.repository_and_path_for_project_path(
8135 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8136 cx
8137 ),
8138 None
8139 );
8140 });
8141}
8142
8143#[gpui::test]
8144async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8145 init_test(cx);
8146 let fs = FakeFs::new(cx.background_executor.clone());
8147 let home = paths::home_dir();
8148 fs.insert_tree(
8149 home,
8150 json!({
8151 ".git": {},
8152 "project": {
8153 "a.txt": "A"
8154 },
8155 }),
8156 )
8157 .await;
8158
8159 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8160 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8161 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8162
8163 project
8164 .update(cx, |project, cx| project.git_scans_complete(cx))
8165 .await;
8166 tree.flush_fs_events(cx).await;
8167
8168 project.read_with(cx, |project, cx| {
8169 let containing = project
8170 .git_store()
8171 .read(cx)
8172 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8173 assert!(containing.is_none());
8174 });
8175
8176 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8177 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8178 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8179 project
8180 .update(cx, |project, cx| project.git_scans_complete(cx))
8181 .await;
8182 tree.flush_fs_events(cx).await;
8183
8184 project.read_with(cx, |project, cx| {
8185 let containing = project
8186 .git_store()
8187 .read(cx)
8188 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8189 assert_eq!(
8190 containing
8191 .unwrap()
8192 .0
8193 .read(cx)
8194 .work_directory_abs_path
8195 .as_ref(),
8196 home,
8197 );
8198 });
8199}
8200
8201#[gpui::test]
8202async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8203 init_test(cx);
8204 cx.executor().allow_parking();
8205
8206 let root = TempTree::new(json!({
8207 "project": {
8208 "a.txt": "a", // Modified
8209 "b.txt": "bb", // Added
8210 "c.txt": "ccc", // Unchanged
8211 "d.txt": "dddd", // Deleted
8212 },
8213 }));
8214
8215 // Set up git repository before creating the project.
8216 let work_dir = root.path().join("project");
8217 let repo = git_init(work_dir.as_path());
8218 git_add("a.txt", &repo);
8219 git_add("c.txt", &repo);
8220 git_add("d.txt", &repo);
8221 git_commit("Initial commit", &repo);
8222 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8223 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8224
8225 let project = Project::test(
8226 Arc::new(RealFs::new(None, cx.executor())),
8227 [root.path()],
8228 cx,
8229 )
8230 .await;
8231
8232 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8233 tree.flush_fs_events(cx).await;
8234 project
8235 .update(cx, |project, cx| project.git_scans_complete(cx))
8236 .await;
8237 cx.executor().run_until_parked();
8238
8239 let repository = project.read_with(cx, |project, cx| {
8240 project.repositories(cx).values().next().unwrap().clone()
8241 });
8242
8243 // Check that the right git state is observed on startup
8244 repository.read_with(cx, |repository, _| {
8245 let entries = repository.cached_status().collect::<Vec<_>>();
8246 assert_eq!(
8247 entries,
8248 [
8249 StatusEntry {
8250 repo_path: repo_path("a.txt"),
8251 status: StatusCode::Modified.worktree(),
8252 },
8253 StatusEntry {
8254 repo_path: repo_path("b.txt"),
8255 status: FileStatus::Untracked,
8256 },
8257 StatusEntry {
8258 repo_path: repo_path("d.txt"),
8259 status: StatusCode::Deleted.worktree(),
8260 },
8261 ]
8262 );
8263 });
8264
8265 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8266
8267 tree.flush_fs_events(cx).await;
8268 project
8269 .update(cx, |project, cx| project.git_scans_complete(cx))
8270 .await;
8271 cx.executor().run_until_parked();
8272
8273 repository.read_with(cx, |repository, _| {
8274 let entries = repository.cached_status().collect::<Vec<_>>();
8275 assert_eq!(
8276 entries,
8277 [
8278 StatusEntry {
8279 repo_path: repo_path("a.txt"),
8280 status: StatusCode::Modified.worktree(),
8281 },
8282 StatusEntry {
8283 repo_path: repo_path("b.txt"),
8284 status: FileStatus::Untracked,
8285 },
8286 StatusEntry {
8287 repo_path: repo_path("c.txt"),
8288 status: StatusCode::Modified.worktree(),
8289 },
8290 StatusEntry {
8291 repo_path: repo_path("d.txt"),
8292 status: StatusCode::Deleted.worktree(),
8293 },
8294 ]
8295 );
8296 });
8297
8298 git_add("a.txt", &repo);
8299 git_add("c.txt", &repo);
8300 git_remove_index(Path::new("d.txt"), &repo);
8301 git_commit("Another commit", &repo);
8302 tree.flush_fs_events(cx).await;
8303 project
8304 .update(cx, |project, cx| project.git_scans_complete(cx))
8305 .await;
8306 cx.executor().run_until_parked();
8307
8308 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8309 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8310 tree.flush_fs_events(cx).await;
8311 project
8312 .update(cx, |project, cx| project.git_scans_complete(cx))
8313 .await;
8314 cx.executor().run_until_parked();
8315
8316 repository.read_with(cx, |repository, _cx| {
8317 let entries = repository.cached_status().collect::<Vec<_>>();
8318
8319 // Deleting an untracked entry, b.txt, should leave no status
8320 // a.txt was tracked, and so should have a status
8321 assert_eq!(
8322 entries,
8323 [StatusEntry {
8324 repo_path: repo_path("a.txt"),
8325 status: StatusCode::Deleted.worktree(),
8326 }]
8327 );
8328 });
8329}
8330
8331#[gpui::test]
8332async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8333 init_test(cx);
8334 cx.executor().allow_parking();
8335
8336 let root = TempTree::new(json!({
8337 "project": {
8338 "sub": {},
8339 "a.txt": "",
8340 },
8341 }));
8342
8343 let work_dir = root.path().join("project");
8344 let repo = git_init(work_dir.as_path());
8345 // a.txt exists in HEAD and the working copy but is deleted in the index.
8346 git_add("a.txt", &repo);
8347 git_commit("Initial commit", &repo);
8348 git_remove_index("a.txt".as_ref(), &repo);
8349 // `sub` is a nested git repository.
8350 let _sub = git_init(&work_dir.join("sub"));
8351
8352 let project = Project::test(
8353 Arc::new(RealFs::new(None, cx.executor())),
8354 [root.path()],
8355 cx,
8356 )
8357 .await;
8358
8359 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8360 tree.flush_fs_events(cx).await;
8361 project
8362 .update(cx, |project, cx| project.git_scans_complete(cx))
8363 .await;
8364 cx.executor().run_until_parked();
8365
8366 let repository = project.read_with(cx, |project, cx| {
8367 project
8368 .repositories(cx)
8369 .values()
8370 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8371 .unwrap()
8372 .clone()
8373 });
8374
8375 repository.read_with(cx, |repository, _cx| {
8376 let entries = repository.cached_status().collect::<Vec<_>>();
8377
8378 // `sub` doesn't appear in our computed statuses.
8379 // a.txt appears with a combined `DA` status.
8380 assert_eq!(
8381 entries,
8382 [StatusEntry {
8383 repo_path: repo_path("a.txt"),
8384 status: TrackedStatus {
8385 index_status: StatusCode::Deleted,
8386 worktree_status: StatusCode::Added
8387 }
8388 .into(),
8389 }]
8390 )
8391 });
8392}
8393
8394#[gpui::test]
8395async fn test_repository_subfolder_git_status(
8396 executor: gpui::BackgroundExecutor,
8397 cx: &mut gpui::TestAppContext,
8398) {
8399 init_test(cx);
8400
8401 let fs = FakeFs::new(executor);
8402 fs.insert_tree(
8403 path!("/root"),
8404 json!({
8405 "my-repo": {
8406 ".git": {},
8407 "a.txt": "a",
8408 "sub-folder-1": {
8409 "sub-folder-2": {
8410 "c.txt": "cc",
8411 "d": {
8412 "e.txt": "eee"
8413 }
8414 },
8415 }
8416 },
8417 }),
8418 )
8419 .await;
8420
8421 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8422 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8423
8424 fs.set_status_for_repo(
8425 path!("/root/my-repo/.git").as_ref(),
8426 &[(E_TXT, FileStatus::Untracked)],
8427 );
8428
8429 let project = Project::test(
8430 fs.clone(),
8431 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8432 cx,
8433 )
8434 .await;
8435
8436 project
8437 .update(cx, |project, cx| project.git_scans_complete(cx))
8438 .await;
8439 cx.run_until_parked();
8440
8441 let repository = project.read_with(cx, |project, cx| {
8442 project.repositories(cx).values().next().unwrap().clone()
8443 });
8444
8445 // Ensure that the git status is loaded correctly
8446 repository.read_with(cx, |repository, _cx| {
8447 assert_eq!(
8448 repository.work_directory_abs_path,
8449 Path::new(path!("/root/my-repo")).into()
8450 );
8451
8452 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8453 assert_eq!(
8454 repository
8455 .status_for_path(&repo_path(E_TXT))
8456 .unwrap()
8457 .status,
8458 FileStatus::Untracked
8459 );
8460 });
8461
8462 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8463 project
8464 .update(cx, |project, cx| project.git_scans_complete(cx))
8465 .await;
8466 cx.run_until_parked();
8467
8468 repository.read_with(cx, |repository, _cx| {
8469 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8470 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8471 });
8472}
8473
8474// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8475#[cfg(any())]
8476#[gpui::test]
8477async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8478 init_test(cx);
8479 cx.executor().allow_parking();
8480
8481 let root = TempTree::new(json!({
8482 "project": {
8483 "a.txt": "a",
8484 },
8485 }));
8486 let root_path = root.path();
8487
8488 let repo = git_init(&root_path.join("project"));
8489 git_add("a.txt", &repo);
8490 git_commit("init", &repo);
8491
8492 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8493
8494 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8495 tree.flush_fs_events(cx).await;
8496 project
8497 .update(cx, |project, cx| project.git_scans_complete(cx))
8498 .await;
8499 cx.executor().run_until_parked();
8500
8501 let repository = project.read_with(cx, |project, cx| {
8502 project.repositories(cx).values().next().unwrap().clone()
8503 });
8504
8505 git_branch("other-branch", &repo);
8506 git_checkout("refs/heads/other-branch", &repo);
8507 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8508 git_add("a.txt", &repo);
8509 git_commit("capitalize", &repo);
8510 let commit = repo
8511 .head()
8512 .expect("Failed to get HEAD")
8513 .peel_to_commit()
8514 .expect("HEAD is not a commit");
8515 git_checkout("refs/heads/main", &repo);
8516 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8517 git_add("a.txt", &repo);
8518 git_commit("improve letter", &repo);
8519 git_cherry_pick(&commit, &repo);
8520 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8521 .expect("No CHERRY_PICK_HEAD");
8522 pretty_assertions::assert_eq!(
8523 git_status(&repo),
8524 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8525 );
8526 tree.flush_fs_events(cx).await;
8527 project
8528 .update(cx, |project, cx| project.git_scans_complete(cx))
8529 .await;
8530 cx.executor().run_until_parked();
8531 let conflicts = repository.update(cx, |repository, _| {
8532 repository
8533 .merge_conflicts
8534 .iter()
8535 .cloned()
8536 .collect::<Vec<_>>()
8537 });
8538 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8539
8540 git_add("a.txt", &repo);
8541 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8542 git_commit("whatevs", &repo);
8543 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8544 .expect("Failed to remove CHERRY_PICK_HEAD");
8545 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8546 tree.flush_fs_events(cx).await;
8547 let conflicts = repository.update(cx, |repository, _| {
8548 repository
8549 .merge_conflicts
8550 .iter()
8551 .cloned()
8552 .collect::<Vec<_>>()
8553 });
8554 pretty_assertions::assert_eq!(conflicts, []);
8555}
8556
8557#[gpui::test]
8558async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8559 init_test(cx);
8560 let fs = FakeFs::new(cx.background_executor.clone());
8561 fs.insert_tree(
8562 path!("/root"),
8563 json!({
8564 ".git": {},
8565 ".gitignore": "*.txt\n",
8566 "a.xml": "<a></a>",
8567 "b.txt": "Some text"
8568 }),
8569 )
8570 .await;
8571
8572 fs.set_head_and_index_for_repo(
8573 path!("/root/.git").as_ref(),
8574 &[
8575 (".gitignore", "*.txt\n".into()),
8576 ("a.xml", "<a></a>".into()),
8577 ],
8578 );
8579
8580 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8581
8582 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8583 tree.flush_fs_events(cx).await;
8584 project
8585 .update(cx, |project, cx| project.git_scans_complete(cx))
8586 .await;
8587 cx.executor().run_until_parked();
8588
8589 let repository = project.read_with(cx, |project, cx| {
8590 project.repositories(cx).values().next().unwrap().clone()
8591 });
8592
8593 // One file is unmodified, the other is ignored.
8594 cx.read(|cx| {
8595 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8596 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8597 });
8598
8599 // Change the gitignore, and stage the newly non-ignored file.
8600 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8601 .await
8602 .unwrap();
8603 fs.set_index_for_repo(
8604 Path::new(path!("/root/.git")),
8605 &[
8606 (".gitignore", "*.txt\n".into()),
8607 ("a.xml", "<a></a>".into()),
8608 ("b.txt", "Some text".into()),
8609 ],
8610 );
8611
8612 cx.executor().run_until_parked();
8613 cx.read(|cx| {
8614 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8615 assert_entry_git_state(
8616 tree.read(cx),
8617 repository.read(cx),
8618 "b.txt",
8619 Some(StatusCode::Added),
8620 false,
8621 );
8622 });
8623}
8624
8625// NOTE:
8626// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8627// a directory which some program has already open.
8628// This is a limitation of the Windows.
8629// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8630// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8631#[gpui::test]
8632#[cfg_attr(target_os = "windows", ignore)]
8633async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8634 init_test(cx);
8635 cx.executor().allow_parking();
8636 let root = TempTree::new(json!({
8637 "projects": {
8638 "project1": {
8639 "a": "",
8640 "b": "",
8641 }
8642 },
8643
8644 }));
8645 let root_path = root.path();
8646
8647 let repo = git_init(&root_path.join("projects/project1"));
8648 git_add("a", &repo);
8649 git_commit("init", &repo);
8650 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8651
8652 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8653
8654 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8655 tree.flush_fs_events(cx).await;
8656 project
8657 .update(cx, |project, cx| project.git_scans_complete(cx))
8658 .await;
8659 cx.executor().run_until_parked();
8660
8661 let repository = project.read_with(cx, |project, cx| {
8662 project.repositories(cx).values().next().unwrap().clone()
8663 });
8664
8665 repository.read_with(cx, |repository, _| {
8666 assert_eq!(
8667 repository.work_directory_abs_path.as_ref(),
8668 root_path.join("projects/project1").as_path()
8669 );
8670 assert_eq!(
8671 repository
8672 .status_for_path(&repo_path("a"))
8673 .map(|entry| entry.status),
8674 Some(StatusCode::Modified.worktree()),
8675 );
8676 assert_eq!(
8677 repository
8678 .status_for_path(&repo_path("b"))
8679 .map(|entry| entry.status),
8680 Some(FileStatus::Untracked),
8681 );
8682 });
8683
8684 std::fs::rename(
8685 root_path.join("projects/project1"),
8686 root_path.join("projects/project2"),
8687 )
8688 .unwrap();
8689 tree.flush_fs_events(cx).await;
8690
8691 repository.read_with(cx, |repository, _| {
8692 assert_eq!(
8693 repository.work_directory_abs_path.as_ref(),
8694 root_path.join("projects/project2").as_path()
8695 );
8696 assert_eq!(
8697 repository.status_for_path(&repo_path("a")).unwrap().status,
8698 StatusCode::Modified.worktree(),
8699 );
8700 assert_eq!(
8701 repository.status_for_path(&repo_path("b")).unwrap().status,
8702 FileStatus::Untracked,
8703 );
8704 });
8705}
8706
8707// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8708// you can't rename a directory which some program has already open. This is a
8709// limitation of the Windows. See:
8710// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8711// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8712#[gpui::test]
8713#[cfg_attr(target_os = "windows", ignore)]
8714async fn test_file_status(cx: &mut gpui::TestAppContext) {
8715 init_test(cx);
8716 cx.executor().allow_parking();
8717 const IGNORE_RULE: &str = "**/target";
8718
8719 let root = TempTree::new(json!({
8720 "project": {
8721 "a.txt": "a",
8722 "b.txt": "bb",
8723 "c": {
8724 "d": {
8725 "e.txt": "eee"
8726 }
8727 },
8728 "f.txt": "ffff",
8729 "target": {
8730 "build_file": "???"
8731 },
8732 ".gitignore": IGNORE_RULE
8733 },
8734
8735 }));
8736 let root_path = root.path();
8737
8738 const A_TXT: &str = "a.txt";
8739 const B_TXT: &str = "b.txt";
8740 const E_TXT: &str = "c/d/e.txt";
8741 const F_TXT: &str = "f.txt";
8742 const DOTGITIGNORE: &str = ".gitignore";
8743 const BUILD_FILE: &str = "target/build_file";
8744
8745 // Set up git repository before creating the worktree.
8746 let work_dir = root.path().join("project");
8747 let mut repo = git_init(work_dir.as_path());
8748 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8749 git_add(A_TXT, &repo);
8750 git_add(E_TXT, &repo);
8751 git_add(DOTGITIGNORE, &repo);
8752 git_commit("Initial commit", &repo);
8753
8754 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8755
8756 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8757 tree.flush_fs_events(cx).await;
8758 project
8759 .update(cx, |project, cx| project.git_scans_complete(cx))
8760 .await;
8761 cx.executor().run_until_parked();
8762
8763 let repository = project.read_with(cx, |project, cx| {
8764 project.repositories(cx).values().next().unwrap().clone()
8765 });
8766
8767 // Check that the right git state is observed on startup
8768 repository.read_with(cx, |repository, _cx| {
8769 assert_eq!(
8770 repository.work_directory_abs_path.as_ref(),
8771 root_path.join("project").as_path()
8772 );
8773
8774 assert_eq!(
8775 repository
8776 .status_for_path(&repo_path(B_TXT))
8777 .unwrap()
8778 .status,
8779 FileStatus::Untracked,
8780 );
8781 assert_eq!(
8782 repository
8783 .status_for_path(&repo_path(F_TXT))
8784 .unwrap()
8785 .status,
8786 FileStatus::Untracked,
8787 );
8788 });
8789
8790 // Modify a file in the working copy.
8791 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8792 tree.flush_fs_events(cx).await;
8793 project
8794 .update(cx, |project, cx| project.git_scans_complete(cx))
8795 .await;
8796 cx.executor().run_until_parked();
8797
8798 // The worktree detects that the file's git status has changed.
8799 repository.read_with(cx, |repository, _| {
8800 assert_eq!(
8801 repository
8802 .status_for_path(&repo_path(A_TXT))
8803 .unwrap()
8804 .status,
8805 StatusCode::Modified.worktree(),
8806 );
8807 });
8808
8809 // Create a commit in the git repository.
8810 git_add(A_TXT, &repo);
8811 git_add(B_TXT, &repo);
8812 git_commit("Committing modified and added", &repo);
8813 tree.flush_fs_events(cx).await;
8814 project
8815 .update(cx, |project, cx| project.git_scans_complete(cx))
8816 .await;
8817 cx.executor().run_until_parked();
8818
8819 // The worktree detects that the files' git status have changed.
8820 repository.read_with(cx, |repository, _cx| {
8821 assert_eq!(
8822 repository
8823 .status_for_path(&repo_path(F_TXT))
8824 .unwrap()
8825 .status,
8826 FileStatus::Untracked,
8827 );
8828 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8829 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8830 });
8831
8832 // Modify files in the working copy and perform git operations on other files.
8833 git_reset(0, &repo);
8834 git_remove_index(Path::new(B_TXT), &repo);
8835 git_stash(&mut repo);
8836 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8837 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8838 tree.flush_fs_events(cx).await;
8839 project
8840 .update(cx, |project, cx| project.git_scans_complete(cx))
8841 .await;
8842 cx.executor().run_until_parked();
8843
8844 // Check that more complex repo changes are tracked
8845 repository.read_with(cx, |repository, _cx| {
8846 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8847 assert_eq!(
8848 repository
8849 .status_for_path(&repo_path(B_TXT))
8850 .unwrap()
8851 .status,
8852 FileStatus::Untracked,
8853 );
8854 assert_eq!(
8855 repository
8856 .status_for_path(&repo_path(E_TXT))
8857 .unwrap()
8858 .status,
8859 StatusCode::Modified.worktree(),
8860 );
8861 });
8862
8863 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8864 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8865 std::fs::write(
8866 work_dir.join(DOTGITIGNORE),
8867 [IGNORE_RULE, "f.txt"].join("\n"),
8868 )
8869 .unwrap();
8870
8871 git_add(Path::new(DOTGITIGNORE), &repo);
8872 git_commit("Committing modified git ignore", &repo);
8873
8874 tree.flush_fs_events(cx).await;
8875 cx.executor().run_until_parked();
8876
8877 let mut renamed_dir_name = "first_directory/second_directory";
8878 const RENAMED_FILE: &str = "rf.txt";
8879
8880 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8881 std::fs::write(
8882 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8883 "new-contents",
8884 )
8885 .unwrap();
8886
8887 tree.flush_fs_events(cx).await;
8888 project
8889 .update(cx, |project, cx| project.git_scans_complete(cx))
8890 .await;
8891 cx.executor().run_until_parked();
8892
8893 repository.read_with(cx, |repository, _cx| {
8894 assert_eq!(
8895 repository
8896 .status_for_path(
8897 &rel_path(renamed_dir_name)
8898 .join(rel_path(RENAMED_FILE))
8899 .into()
8900 )
8901 .unwrap()
8902 .status,
8903 FileStatus::Untracked,
8904 );
8905 });
8906
8907 renamed_dir_name = "new_first_directory/second_directory";
8908
8909 std::fs::rename(
8910 work_dir.join("first_directory"),
8911 work_dir.join("new_first_directory"),
8912 )
8913 .unwrap();
8914
8915 tree.flush_fs_events(cx).await;
8916 project
8917 .update(cx, |project, cx| project.git_scans_complete(cx))
8918 .await;
8919 cx.executor().run_until_parked();
8920
8921 repository.read_with(cx, |repository, _cx| {
8922 assert_eq!(
8923 repository
8924 .status_for_path(
8925 &rel_path(renamed_dir_name)
8926 .join(rel_path(RENAMED_FILE))
8927 .into()
8928 )
8929 .unwrap()
8930 .status,
8931 FileStatus::Untracked,
8932 );
8933 });
8934}
8935
8936#[gpui::test]
8937#[ignore]
8938async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8939 init_test(cx);
8940 cx.executor().allow_parking();
8941
8942 const IGNORE_RULE: &str = "**/target";
8943
8944 let root = TempTree::new(json!({
8945 "project": {
8946 "src": {
8947 "main.rs": "fn main() {}"
8948 },
8949 "target": {
8950 "debug": {
8951 "important_text.txt": "important text",
8952 },
8953 },
8954 ".gitignore": IGNORE_RULE
8955 },
8956
8957 }));
8958 let root_path = root.path();
8959
8960 // Set up git repository before creating the worktree.
8961 let work_dir = root.path().join("project");
8962 let repo = git_init(work_dir.as_path());
8963 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8964 git_add("src/main.rs", &repo);
8965 git_add(".gitignore", &repo);
8966 git_commit("Initial commit", &repo);
8967
8968 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8969 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8970 let project_events = Arc::new(Mutex::new(Vec::new()));
8971 project.update(cx, |project, cx| {
8972 let repo_events = repository_updates.clone();
8973 cx.subscribe(project.git_store(), move |_, _, e, _| {
8974 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8975 repo_events.lock().push(e.clone());
8976 }
8977 })
8978 .detach();
8979 let project_events = project_events.clone();
8980 cx.subscribe_self(move |_, e, _| {
8981 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8982 project_events.lock().extend(
8983 updates
8984 .iter()
8985 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8986 .filter(|(path, _)| path != "fs-event-sentinel"),
8987 );
8988 }
8989 })
8990 .detach();
8991 });
8992
8993 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8994 tree.flush_fs_events(cx).await;
8995 tree.update(cx, |tree, cx| {
8996 tree.load_file(
8997 rel_path("project/target/debug/important_text.txt"),
8998 &Default::default(),
8999 cx,
9000 )
9001 })
9002 .await
9003 .unwrap();
9004 tree.update(cx, |tree, _| {
9005 assert_eq!(
9006 tree.entries(true, 0)
9007 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9008 .collect::<Vec<_>>(),
9009 vec![
9010 (rel_path(""), false),
9011 (rel_path("project/"), false),
9012 (rel_path("project/.gitignore"), false),
9013 (rel_path("project/src"), false),
9014 (rel_path("project/src/main.rs"), false),
9015 (rel_path("project/target"), true),
9016 (rel_path("project/target/debug"), true),
9017 (rel_path("project/target/debug/important_text.txt"), true),
9018 ]
9019 );
9020 });
9021
9022 assert_eq!(
9023 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9024 vec![
9025 RepositoryEvent::StatusesChanged { full_scan: true },
9026 RepositoryEvent::MergeHeadsChanged,
9027 ],
9028 "Initial worktree scan should produce a repo update event"
9029 );
9030 assert_eq!(
9031 project_events.lock().drain(..).collect::<Vec<_>>(),
9032 vec![
9033 ("project/target".to_string(), PathChange::Loaded),
9034 ("project/target/debug".to_string(), PathChange::Loaded),
9035 (
9036 "project/target/debug/important_text.txt".to_string(),
9037 PathChange::Loaded
9038 ),
9039 ],
9040 "Initial project changes should show that all not-ignored and all opened files are loaded"
9041 );
9042
9043 let deps_dir = work_dir.join("target").join("debug").join("deps");
9044 std::fs::create_dir_all(&deps_dir).unwrap();
9045 tree.flush_fs_events(cx).await;
9046 project
9047 .update(cx, |project, cx| project.git_scans_complete(cx))
9048 .await;
9049 cx.executor().run_until_parked();
9050 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9051 tree.flush_fs_events(cx).await;
9052 project
9053 .update(cx, |project, cx| project.git_scans_complete(cx))
9054 .await;
9055 cx.executor().run_until_parked();
9056 std::fs::remove_dir_all(&deps_dir).unwrap();
9057 tree.flush_fs_events(cx).await;
9058 project
9059 .update(cx, |project, cx| project.git_scans_complete(cx))
9060 .await;
9061 cx.executor().run_until_parked();
9062
9063 tree.update(cx, |tree, _| {
9064 assert_eq!(
9065 tree.entries(true, 0)
9066 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9067 .collect::<Vec<_>>(),
9068 vec![
9069 (rel_path(""), false),
9070 (rel_path("project/"), false),
9071 (rel_path("project/.gitignore"), false),
9072 (rel_path("project/src"), false),
9073 (rel_path("project/src/main.rs"), false),
9074 (rel_path("project/target"), true),
9075 (rel_path("project/target/debug"), true),
9076 (rel_path("project/target/debug/important_text.txt"), true),
9077 ],
9078 "No stray temp files should be left after the flycheck changes"
9079 );
9080 });
9081
9082 assert_eq!(
9083 repository_updates
9084 .lock()
9085 .iter()
9086 .cloned()
9087 .collect::<Vec<_>>(),
9088 Vec::new(),
9089 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9090 );
9091 assert_eq!(
9092 project_events.lock().as_slice(),
9093 vec![
9094 ("project/target/debug/deps".to_string(), PathChange::Added),
9095 ("project/target/debug/deps".to_string(), PathChange::Removed),
9096 ],
9097 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9098 No updates for more nested directories should happen as those are ignored",
9099 );
9100}
9101
9102#[gpui::test]
9103async fn test_odd_events_for_ignored_dirs(
9104 executor: BackgroundExecutor,
9105 cx: &mut gpui::TestAppContext,
9106) {
9107 init_test(cx);
9108 let fs = FakeFs::new(executor);
9109 fs.insert_tree(
9110 path!("/root"),
9111 json!({
9112 ".git": {},
9113 ".gitignore": "**/target/",
9114 "src": {
9115 "main.rs": "fn main() {}",
9116 },
9117 "target": {
9118 "debug": {
9119 "foo.txt": "foo",
9120 "deps": {}
9121 }
9122 }
9123 }),
9124 )
9125 .await;
9126 fs.set_head_and_index_for_repo(
9127 path!("/root/.git").as_ref(),
9128 &[
9129 (".gitignore", "**/target/".into()),
9130 ("src/main.rs", "fn main() {}".into()),
9131 ],
9132 );
9133
9134 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9135 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9136 let project_events = Arc::new(Mutex::new(Vec::new()));
9137 project.update(cx, |project, cx| {
9138 let repository_updates = repository_updates.clone();
9139 cx.subscribe(project.git_store(), move |_, _, e, _| {
9140 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9141 repository_updates.lock().push(e.clone());
9142 }
9143 })
9144 .detach();
9145 let project_events = project_events.clone();
9146 cx.subscribe_self(move |_, e, _| {
9147 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9148 project_events.lock().extend(
9149 updates
9150 .iter()
9151 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9152 .filter(|(path, _)| path != "fs-event-sentinel"),
9153 );
9154 }
9155 })
9156 .detach();
9157 });
9158
9159 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9160 tree.update(cx, |tree, cx| {
9161 tree.load_file(rel_path("target/debug/foo.txt"), &Default::default(), cx)
9162 })
9163 .await
9164 .unwrap();
9165 tree.flush_fs_events(cx).await;
9166 project
9167 .update(cx, |project, cx| project.git_scans_complete(cx))
9168 .await;
9169 cx.run_until_parked();
9170 tree.update(cx, |tree, _| {
9171 assert_eq!(
9172 tree.entries(true, 0)
9173 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9174 .collect::<Vec<_>>(),
9175 vec![
9176 (rel_path(""), false),
9177 (rel_path(".gitignore"), false),
9178 (rel_path("src"), false),
9179 (rel_path("src/main.rs"), false),
9180 (rel_path("target"), true),
9181 (rel_path("target/debug"), true),
9182 (rel_path("target/debug/deps"), true),
9183 (rel_path("target/debug/foo.txt"), true),
9184 ]
9185 );
9186 });
9187
9188 assert_eq!(
9189 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9190 vec![
9191 RepositoryEvent::MergeHeadsChanged,
9192 RepositoryEvent::BranchChanged,
9193 RepositoryEvent::StatusesChanged { full_scan: false },
9194 RepositoryEvent::StatusesChanged { full_scan: false },
9195 ],
9196 "Initial worktree scan should produce a repo update event"
9197 );
9198 assert_eq!(
9199 project_events.lock().drain(..).collect::<Vec<_>>(),
9200 vec![
9201 ("target".to_string(), PathChange::Loaded),
9202 ("target/debug".to_string(), PathChange::Loaded),
9203 ("target/debug/deps".to_string(), PathChange::Loaded),
9204 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9205 ],
9206 "All non-ignored entries and all opened firs should be getting a project event",
9207 );
9208
9209 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9210 // This may happen multiple times during a single flycheck, but once is enough for testing.
9211 fs.emit_fs_event("/root/target/debug/deps", None);
9212 tree.flush_fs_events(cx).await;
9213 project
9214 .update(cx, |project, cx| project.git_scans_complete(cx))
9215 .await;
9216 cx.executor().run_until_parked();
9217
9218 assert_eq!(
9219 repository_updates
9220 .lock()
9221 .iter()
9222 .cloned()
9223 .collect::<Vec<_>>(),
9224 Vec::new(),
9225 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9226 );
9227 assert_eq!(
9228 project_events.lock().as_slice(),
9229 Vec::new(),
9230 "No further project events should happen, as only ignored dirs received FS events",
9231 );
9232}
9233
9234#[gpui::test]
9235async fn test_repos_in_invisible_worktrees(
9236 executor: BackgroundExecutor,
9237 cx: &mut gpui::TestAppContext,
9238) {
9239 init_test(cx);
9240 let fs = FakeFs::new(executor);
9241 fs.insert_tree(
9242 path!("/root"),
9243 json!({
9244 "dir1": {
9245 ".git": {},
9246 "dep1": {
9247 ".git": {},
9248 "src": {
9249 "a.txt": "",
9250 },
9251 },
9252 "b.txt": "",
9253 },
9254 }),
9255 )
9256 .await;
9257
9258 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9259 let _visible_worktree =
9260 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9261 project
9262 .update(cx, |project, cx| project.git_scans_complete(cx))
9263 .await;
9264
9265 let repos = project.read_with(cx, |project, cx| {
9266 project
9267 .repositories(cx)
9268 .values()
9269 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9270 .collect::<Vec<_>>()
9271 });
9272 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9273
9274 let (_invisible_worktree, _) = project
9275 .update(cx, |project, cx| {
9276 project.worktree_store.update(cx, |worktree_store, cx| {
9277 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9278 })
9279 })
9280 .await
9281 .expect("failed to create worktree");
9282 project
9283 .update(cx, |project, cx| project.git_scans_complete(cx))
9284 .await;
9285
9286 let repos = project.read_with(cx, |project, cx| {
9287 project
9288 .repositories(cx)
9289 .values()
9290 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9291 .collect::<Vec<_>>()
9292 });
9293 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9294}
9295
9296#[gpui::test(iterations = 10)]
9297async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9298 init_test(cx);
9299 cx.update(|cx| {
9300 cx.update_global::<SettingsStore, _>(|store, cx| {
9301 store.update_user_settings(cx, |settings| {
9302 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9303 });
9304 });
9305 });
9306 let fs = FakeFs::new(cx.background_executor.clone());
9307 fs.insert_tree(
9308 path!("/root"),
9309 json!({
9310 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9311 "tree": {
9312 ".git": {},
9313 ".gitignore": "ignored-dir\n",
9314 "tracked-dir": {
9315 "tracked-file1": "",
9316 "ancestor-ignored-file1": "",
9317 },
9318 "ignored-dir": {
9319 "ignored-file1": ""
9320 }
9321 }
9322 }),
9323 )
9324 .await;
9325 fs.set_head_and_index_for_repo(
9326 path!("/root/tree/.git").as_ref(),
9327 &[
9328 (".gitignore", "ignored-dir\n".into()),
9329 ("tracked-dir/tracked-file1", "".into()),
9330 ],
9331 );
9332
9333 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9334
9335 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9336 tree.flush_fs_events(cx).await;
9337 project
9338 .update(cx, |project, cx| project.git_scans_complete(cx))
9339 .await;
9340 cx.executor().run_until_parked();
9341
9342 let repository = project.read_with(cx, |project, cx| {
9343 project.repositories(cx).values().next().unwrap().clone()
9344 });
9345
9346 tree.read_with(cx, |tree, _| {
9347 tree.as_local()
9348 .unwrap()
9349 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9350 })
9351 .recv()
9352 .await;
9353
9354 cx.read(|cx| {
9355 assert_entry_git_state(
9356 tree.read(cx),
9357 repository.read(cx),
9358 "tracked-dir/tracked-file1",
9359 None,
9360 false,
9361 );
9362 assert_entry_git_state(
9363 tree.read(cx),
9364 repository.read(cx),
9365 "tracked-dir/ancestor-ignored-file1",
9366 None,
9367 false,
9368 );
9369 assert_entry_git_state(
9370 tree.read(cx),
9371 repository.read(cx),
9372 "ignored-dir/ignored-file1",
9373 None,
9374 true,
9375 );
9376 });
9377
9378 fs.create_file(
9379 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9380 Default::default(),
9381 )
9382 .await
9383 .unwrap();
9384 fs.set_index_for_repo(
9385 path!("/root/tree/.git").as_ref(),
9386 &[
9387 (".gitignore", "ignored-dir\n".into()),
9388 ("tracked-dir/tracked-file1", "".into()),
9389 ("tracked-dir/tracked-file2", "".into()),
9390 ],
9391 );
9392 fs.create_file(
9393 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9394 Default::default(),
9395 )
9396 .await
9397 .unwrap();
9398 fs.create_file(
9399 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9400 Default::default(),
9401 )
9402 .await
9403 .unwrap();
9404
9405 cx.executor().run_until_parked();
9406 cx.read(|cx| {
9407 assert_entry_git_state(
9408 tree.read(cx),
9409 repository.read(cx),
9410 "tracked-dir/tracked-file2",
9411 Some(StatusCode::Added),
9412 false,
9413 );
9414 assert_entry_git_state(
9415 tree.read(cx),
9416 repository.read(cx),
9417 "tracked-dir/ancestor-ignored-file2",
9418 None,
9419 false,
9420 );
9421 assert_entry_git_state(
9422 tree.read(cx),
9423 repository.read(cx),
9424 "ignored-dir/ignored-file2",
9425 None,
9426 true,
9427 );
9428 assert!(
9429 tree.read(cx)
9430 .entry_for_path(&rel_path(".git"))
9431 .unwrap()
9432 .is_ignored
9433 );
9434 });
9435}
9436
9437#[gpui::test]
9438async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9439 init_test(cx);
9440
9441 let fs = FakeFs::new(cx.executor());
9442 fs.insert_tree(
9443 path!("/project"),
9444 json!({
9445 ".git": {
9446 "worktrees": {
9447 "some-worktree": {
9448 "commondir": "../..\n",
9449 // For is_git_dir
9450 "HEAD": "",
9451 "config": ""
9452 }
9453 },
9454 "modules": {
9455 "subdir": {
9456 "some-submodule": {
9457 // For is_git_dir
9458 "HEAD": "",
9459 "config": "",
9460 }
9461 }
9462 }
9463 },
9464 "src": {
9465 "a.txt": "A",
9466 },
9467 "some-worktree": {
9468 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9469 "src": {
9470 "b.txt": "B",
9471 }
9472 },
9473 "subdir": {
9474 "some-submodule": {
9475 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9476 "c.txt": "C",
9477 }
9478 }
9479 }),
9480 )
9481 .await;
9482
9483 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9484 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9485 scan_complete.await;
9486
9487 let mut repositories = project.update(cx, |project, cx| {
9488 project
9489 .repositories(cx)
9490 .values()
9491 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9492 .collect::<Vec<_>>()
9493 });
9494 repositories.sort();
9495 pretty_assertions::assert_eq!(
9496 repositories,
9497 [
9498 Path::new(path!("/project")).into(),
9499 Path::new(path!("/project/some-worktree")).into(),
9500 Path::new(path!("/project/subdir/some-submodule")).into(),
9501 ]
9502 );
9503
9504 // Generate a git-related event for the worktree and check that it's refreshed.
9505 fs.with_git_state(
9506 path!("/project/some-worktree/.git").as_ref(),
9507 true,
9508 |state| {
9509 state
9510 .head_contents
9511 .insert(repo_path("src/b.txt"), "b".to_owned());
9512 state
9513 .index_contents
9514 .insert(repo_path("src/b.txt"), "b".to_owned());
9515 },
9516 )
9517 .unwrap();
9518 cx.run_until_parked();
9519
9520 let buffer = project
9521 .update(cx, |project, cx| {
9522 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9523 })
9524 .await
9525 .unwrap();
9526 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9527 let (repo, _) = project
9528 .git_store()
9529 .read(cx)
9530 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9531 .unwrap();
9532 pretty_assertions::assert_eq!(
9533 repo.read(cx).work_directory_abs_path,
9534 Path::new(path!("/project/some-worktree")).into(),
9535 );
9536 let barrier = repo.update(cx, |repo, _| repo.barrier());
9537 (repo.clone(), barrier)
9538 });
9539 barrier.await.unwrap();
9540 worktree_repo.update(cx, |repo, _| {
9541 pretty_assertions::assert_eq!(
9542 repo.status_for_path(&repo_path("src/b.txt"))
9543 .unwrap()
9544 .status,
9545 StatusCode::Modified.worktree(),
9546 );
9547 });
9548
9549 // The same for the submodule.
9550 fs.with_git_state(
9551 path!("/project/subdir/some-submodule/.git").as_ref(),
9552 true,
9553 |state| {
9554 state
9555 .head_contents
9556 .insert(repo_path("c.txt"), "c".to_owned());
9557 state
9558 .index_contents
9559 .insert(repo_path("c.txt"), "c".to_owned());
9560 },
9561 )
9562 .unwrap();
9563 cx.run_until_parked();
9564
9565 let buffer = project
9566 .update(cx, |project, cx| {
9567 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9568 })
9569 .await
9570 .unwrap();
9571 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9572 let (repo, _) = project
9573 .git_store()
9574 .read(cx)
9575 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9576 .unwrap();
9577 pretty_assertions::assert_eq!(
9578 repo.read(cx).work_directory_abs_path,
9579 Path::new(path!("/project/subdir/some-submodule")).into(),
9580 );
9581 let barrier = repo.update(cx, |repo, _| repo.barrier());
9582 (repo.clone(), barrier)
9583 });
9584 barrier.await.unwrap();
9585 submodule_repo.update(cx, |repo, _| {
9586 pretty_assertions::assert_eq!(
9587 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9588 StatusCode::Modified.worktree(),
9589 );
9590 });
9591}
9592
9593#[gpui::test]
9594async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9595 init_test(cx);
9596 let fs = FakeFs::new(cx.background_executor.clone());
9597 fs.insert_tree(
9598 path!("/root"),
9599 json!({
9600 "project": {
9601 ".git": {},
9602 "child1": {
9603 "a.txt": "A",
9604 },
9605 "child2": {
9606 "b.txt": "B",
9607 }
9608 }
9609 }),
9610 )
9611 .await;
9612
9613 let project = Project::test(
9614 fs.clone(),
9615 [
9616 path!("/root/project/child1").as_ref(),
9617 path!("/root/project/child2").as_ref(),
9618 ],
9619 cx,
9620 )
9621 .await;
9622
9623 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9624 tree.flush_fs_events(cx).await;
9625 project
9626 .update(cx, |project, cx| project.git_scans_complete(cx))
9627 .await;
9628 cx.executor().run_until_parked();
9629
9630 let repos = project.read_with(cx, |project, cx| {
9631 project
9632 .repositories(cx)
9633 .values()
9634 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9635 .collect::<Vec<_>>()
9636 });
9637 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9638}
9639
9640async fn search(
9641 project: &Entity<Project>,
9642 query: SearchQuery,
9643 cx: &mut gpui::TestAppContext,
9644) -> Result<HashMap<String, Vec<Range<usize>>>> {
9645 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9646 let mut results = HashMap::default();
9647 while let Ok(search_result) = search_rx.recv().await {
9648 match search_result {
9649 SearchResult::Buffer { buffer, ranges } => {
9650 results.entry(buffer).or_insert(ranges);
9651 }
9652 SearchResult::LimitReached => {}
9653 }
9654 }
9655 Ok(results
9656 .into_iter()
9657 .map(|(buffer, ranges)| {
9658 buffer.update(cx, |buffer, cx| {
9659 let path = buffer
9660 .file()
9661 .unwrap()
9662 .full_path(cx)
9663 .to_string_lossy()
9664 .to_string();
9665 let ranges = ranges
9666 .into_iter()
9667 .map(|range| range.to_offset(buffer))
9668 .collect::<Vec<_>>();
9669 (path, ranges)
9670 })
9671 })
9672 .collect())
9673}
9674
9675pub fn init_test(cx: &mut gpui::TestAppContext) {
9676 zlog::init_test();
9677
9678 cx.update(|cx| {
9679 let settings_store = SettingsStore::test(cx);
9680 cx.set_global(settings_store);
9681 release_channel::init(SemanticVersion::default(), cx);
9682 language::init(cx);
9683 Project::init_settings(cx);
9684 });
9685}
9686
9687fn json_lang() -> Arc<Language> {
9688 Arc::new(Language::new(
9689 LanguageConfig {
9690 name: "JSON".into(),
9691 matcher: LanguageMatcher {
9692 path_suffixes: vec!["json".to_string()],
9693 ..Default::default()
9694 },
9695 ..Default::default()
9696 },
9697 None,
9698 ))
9699}
9700
9701fn js_lang() -> Arc<Language> {
9702 Arc::new(Language::new(
9703 LanguageConfig {
9704 name: "JavaScript".into(),
9705 matcher: LanguageMatcher {
9706 path_suffixes: vec!["js".to_string()],
9707 ..Default::default()
9708 },
9709 ..Default::default()
9710 },
9711 None,
9712 ))
9713}
9714
9715fn rust_lang() -> Arc<Language> {
9716 Arc::new(Language::new(
9717 LanguageConfig {
9718 name: "Rust".into(),
9719 matcher: LanguageMatcher {
9720 path_suffixes: vec!["rs".to_string()],
9721 ..Default::default()
9722 },
9723 ..Default::default()
9724 },
9725 Some(tree_sitter_rust::LANGUAGE.into()),
9726 ))
9727}
9728
9729fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9730 struct PythonMootToolchainLister(Arc<FakeFs>);
9731 #[async_trait]
9732 impl ToolchainLister for PythonMootToolchainLister {
9733 async fn list(
9734 &self,
9735 worktree_root: PathBuf,
9736 subroot_relative_path: Arc<RelPath>,
9737 _: Option<HashMap<String, String>>,
9738 _: &dyn Fs,
9739 ) -> ToolchainList {
9740 // This lister will always return a path .venv directories within ancestors
9741 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9742 let mut toolchains = vec![];
9743 for ancestor in ancestors {
9744 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9745 if self.0.is_dir(&venv_path).await {
9746 toolchains.push(Toolchain {
9747 name: SharedString::new("Python Venv"),
9748 path: venv_path.to_string_lossy().into_owned().into(),
9749 language_name: LanguageName(SharedString::new_static("Python")),
9750 as_json: serde_json::Value::Null,
9751 })
9752 }
9753 }
9754 ToolchainList {
9755 toolchains,
9756 ..Default::default()
9757 }
9758 }
9759 async fn resolve(
9760 &self,
9761 _: PathBuf,
9762 _: Option<HashMap<String, String>>,
9763 _: &dyn Fs,
9764 ) -> anyhow::Result<Toolchain> {
9765 Err(anyhow::anyhow!("Not implemented"))
9766 }
9767 fn meta(&self) -> ToolchainMetadata {
9768 ToolchainMetadata {
9769 term: SharedString::new_static("Virtual Environment"),
9770 new_toolchain_placeholder: SharedString::new_static(
9771 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9772 ),
9773 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9774 }
9775 }
9776 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9777 vec![]
9778 }
9779 }
9780 Arc::new(
9781 Language::new(
9782 LanguageConfig {
9783 name: "Python".into(),
9784 matcher: LanguageMatcher {
9785 path_suffixes: vec!["py".to_string()],
9786 ..Default::default()
9787 },
9788 ..Default::default()
9789 },
9790 None, // We're not testing Python parsing with this language.
9791 )
9792 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9793 "pyproject.toml",
9794 ))))
9795 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9796 )
9797}
9798
9799fn typescript_lang() -> Arc<Language> {
9800 Arc::new(Language::new(
9801 LanguageConfig {
9802 name: "TypeScript".into(),
9803 matcher: LanguageMatcher {
9804 path_suffixes: vec!["ts".to_string()],
9805 ..Default::default()
9806 },
9807 ..Default::default()
9808 },
9809 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9810 ))
9811}
9812
9813fn tsx_lang() -> Arc<Language> {
9814 Arc::new(Language::new(
9815 LanguageConfig {
9816 name: "tsx".into(),
9817 matcher: LanguageMatcher {
9818 path_suffixes: vec!["tsx".to_string()],
9819 ..Default::default()
9820 },
9821 ..Default::default()
9822 },
9823 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9824 ))
9825}
9826
9827fn get_all_tasks(
9828 project: &Entity<Project>,
9829 task_contexts: Arc<TaskContexts>,
9830 cx: &mut App,
9831) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9832 let new_tasks = project.update(cx, |project, cx| {
9833 project.task_store.update(cx, |task_store, cx| {
9834 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9835 this.used_and_current_resolved_tasks(task_contexts, cx)
9836 })
9837 })
9838 });
9839
9840 cx.background_spawn(async move {
9841 let (mut old, new) = new_tasks.await;
9842 old.extend(new);
9843 old
9844 })
9845}
9846
9847#[track_caller]
9848fn assert_entry_git_state(
9849 tree: &Worktree,
9850 repository: &Repository,
9851 path: &str,
9852 index_status: Option<StatusCode>,
9853 is_ignored: bool,
9854) {
9855 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9856 let entry = tree
9857 .entry_for_path(&rel_path(path))
9858 .unwrap_or_else(|| panic!("entry {path} not found"));
9859 let status = repository
9860 .status_for_path(&repo_path(path))
9861 .map(|entry| entry.status);
9862 let expected = index_status.map(|index_status| {
9863 TrackedStatus {
9864 index_status,
9865 worktree_status: StatusCode::Unmodified,
9866 }
9867 .into()
9868 });
9869 assert_eq!(
9870 status, expected,
9871 "expected {path} to have git status: {expected:?}"
9872 );
9873 assert_eq!(
9874 entry.is_ignored, is_ignored,
9875 "expected {path} to have is_ignored: {is_ignored}"
9876 );
9877}
9878
9879#[track_caller]
9880fn git_init(path: &Path) -> git2::Repository {
9881 let mut init_opts = RepositoryInitOptions::new();
9882 init_opts.initial_head("main");
9883 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9884}
9885
9886#[track_caller]
9887fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9888 let path = path.as_ref();
9889 let mut index = repo.index().expect("Failed to get index");
9890 index.add_path(path).expect("Failed to add file");
9891 index.write().expect("Failed to write index");
9892}
9893
9894#[track_caller]
9895fn git_remove_index(path: &Path, repo: &git2::Repository) {
9896 let mut index = repo.index().expect("Failed to get index");
9897 index.remove_path(path).expect("Failed to add file");
9898 index.write().expect("Failed to write index");
9899}
9900
9901#[track_caller]
9902fn git_commit(msg: &'static str, repo: &git2::Repository) {
9903 use git2::Signature;
9904
9905 let signature = Signature::now("test", "test@zed.dev").unwrap();
9906 let oid = repo.index().unwrap().write_tree().unwrap();
9907 let tree = repo.find_tree(oid).unwrap();
9908 if let Ok(head) = repo.head() {
9909 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9910
9911 let parent_commit = parent_obj.as_commit().unwrap();
9912
9913 repo.commit(
9914 Some("HEAD"),
9915 &signature,
9916 &signature,
9917 msg,
9918 &tree,
9919 &[parent_commit],
9920 )
9921 .expect("Failed to commit with parent");
9922 } else {
9923 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9924 .expect("Failed to commit");
9925 }
9926}
9927
9928#[cfg(any())]
9929#[track_caller]
9930fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9931 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9932}
9933
9934#[track_caller]
9935fn git_stash(repo: &mut git2::Repository) {
9936 use git2::Signature;
9937
9938 let signature = Signature::now("test", "test@zed.dev").unwrap();
9939 repo.stash_save(&signature, "N/A", None)
9940 .expect("Failed to stash");
9941}
9942
9943#[track_caller]
9944fn git_reset(offset: usize, repo: &git2::Repository) {
9945 let head = repo.head().expect("Couldn't get repo head");
9946 let object = head.peel(git2::ObjectType::Commit).unwrap();
9947 let commit = object.as_commit().unwrap();
9948 let new_head = commit
9949 .parents()
9950 .inspect(|parnet| {
9951 parnet.message();
9952 })
9953 .nth(offset)
9954 .expect("Not enough history");
9955 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9956 .expect("Could not reset");
9957}
9958
9959#[cfg(any())]
9960#[track_caller]
9961fn git_branch(name: &str, repo: &git2::Repository) {
9962 let head = repo
9963 .head()
9964 .expect("Couldn't get repo head")
9965 .peel_to_commit()
9966 .expect("HEAD is not a commit");
9967 repo.branch(name, &head, false).expect("Failed to commit");
9968}
9969
9970#[cfg(any())]
9971#[track_caller]
9972fn git_checkout(name: &str, repo: &git2::Repository) {
9973 repo.set_head(name).expect("Failed to set head");
9974 repo.checkout_head(None).expect("Failed to check out head");
9975}
9976
9977#[cfg(any())]
9978#[track_caller]
9979fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9980 repo.statuses(None)
9981 .unwrap()
9982 .iter()
9983 .map(|status| (status.path().unwrap().to_string(), status.status()))
9984 .collect()
9985}
9986
9987#[gpui::test]
9988async fn test_find_project_path_abs(
9989 background_executor: BackgroundExecutor,
9990 cx: &mut gpui::TestAppContext,
9991) {
9992 // find_project_path should work with absolute paths
9993 init_test(cx);
9994
9995 let fs = FakeFs::new(background_executor);
9996 fs.insert_tree(
9997 path!("/root"),
9998 json!({
9999 "project1": {
10000 "file1.txt": "content1",
10001 "subdir": {
10002 "file2.txt": "content2"
10003 }
10004 },
10005 "project2": {
10006 "file3.txt": "content3"
10007 }
10008 }),
10009 )
10010 .await;
10011
10012 let project = Project::test(
10013 fs.clone(),
10014 [
10015 path!("/root/project1").as_ref(),
10016 path!("/root/project2").as_ref(),
10017 ],
10018 cx,
10019 )
10020 .await;
10021
10022 // Make sure the worktrees are fully initialized
10023 project
10024 .update(cx, |project, cx| project.git_scans_complete(cx))
10025 .await;
10026 cx.run_until_parked();
10027
10028 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10029 project.read_with(cx, |project, cx| {
10030 let worktrees: Vec<_> = project.worktrees(cx).collect();
10031 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10032 let id1 = worktrees[0].read(cx).id();
10033 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10034 let id2 = worktrees[1].read(cx).id();
10035 (abs_path1, id1, abs_path2, id2)
10036 });
10037
10038 project.update(cx, |project, cx| {
10039 let abs_path = project1_abs_path.join("file1.txt");
10040 let found_path = project.find_project_path(abs_path, cx).unwrap();
10041 assert_eq!(found_path.worktree_id, project1_id);
10042 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10043
10044 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10045 let found_path = project.find_project_path(abs_path, cx).unwrap();
10046 assert_eq!(found_path.worktree_id, project1_id);
10047 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10048
10049 let abs_path = project2_abs_path.join("file3.txt");
10050 let found_path = project.find_project_path(abs_path, cx).unwrap();
10051 assert_eq!(found_path.worktree_id, project2_id);
10052 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10053
10054 let abs_path = project1_abs_path.join("nonexistent.txt");
10055 let found_path = project.find_project_path(abs_path, cx);
10056 assert!(
10057 found_path.is_some(),
10058 "Should find project path for nonexistent file in worktree"
10059 );
10060
10061 // Test with an absolute path outside any worktree
10062 let abs_path = Path::new("/some/other/path");
10063 let found_path = project.find_project_path(abs_path, cx);
10064 assert!(
10065 found_path.is_none(),
10066 "Should not find project path for path outside any worktree"
10067 );
10068 });
10069}