1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let settings_json_contents = json!({
1216 "languages": {
1217 "Rust": {
1218 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1219 }
1220 },
1221 "lsp": {
1222 "my_fake_lsp": {
1223 "binary": {
1224 // file exists, so this is treated as a relative path
1225 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1226 }
1227 },
1228 "lsp_on_path": {
1229 "binary": {
1230 // file doesn't exist, so it will fall back on PATH env var
1231 "path": path!("lsp_on_path.exe").to_string(),
1232 }
1233 }
1234 },
1235 });
1236
1237 let fs = FakeFs::new(cx.executor());
1238 fs.insert_tree(
1239 path!("/the-root"),
1240 json!({
1241 ".zed": {
1242 "settings.json": settings_json_contents.to_string(),
1243 },
1244 ".relative_path": {
1245 "to": {
1246 "my_fake_lsp.exe": "",
1247 },
1248 },
1249 "src": {
1250 "main.rs": "",
1251 }
1252 }),
1253 )
1254 .await;
1255
1256 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1257 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1258 language_registry.add(rust_lang());
1259
1260 let mut my_fake_lsp = language_registry.register_fake_lsp(
1261 "Rust",
1262 FakeLspAdapter {
1263 name: "my_fake_lsp",
1264 ..Default::default()
1265 },
1266 );
1267 let mut lsp_on_path = language_registry.register_fake_lsp(
1268 "Rust",
1269 FakeLspAdapter {
1270 name: "lsp_on_path",
1271 ..Default::default()
1272 },
1273 );
1274
1275 cx.run_until_parked();
1276
1277 // Start the language server by opening a buffer with a compatible file extension.
1278 project
1279 .update(cx, |project, cx| {
1280 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284
1285 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1286 assert_eq!(
1287 lsp_path.to_string_lossy(),
1288 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1289 );
1290
1291 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1292 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1293}
1294
1295#[gpui::test]
1296async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1297 init_test(cx);
1298
1299 let settings_json_contents = json!({
1300 "languages": {
1301 "Rust": {
1302 "language_servers": ["tilde_lsp"]
1303 }
1304 },
1305 "lsp": {
1306 "tilde_lsp": {
1307 "binary": {
1308 "path": "~/.local/bin/rust-analyzer",
1309 }
1310 }
1311 },
1312 });
1313
1314 let fs = FakeFs::new(cx.executor());
1315 fs.insert_tree(
1316 path!("/root"),
1317 json!({
1318 ".zed": {
1319 "settings.json": settings_json_contents.to_string(),
1320 },
1321 "src": {
1322 "main.rs": "fn main() {}",
1323 }
1324 }),
1325 )
1326 .await;
1327
1328 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1329 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1330 language_registry.add(rust_lang());
1331
1332 let mut tilde_lsp = language_registry.register_fake_lsp(
1333 "Rust",
1334 FakeLspAdapter {
1335 name: "tilde_lsp",
1336 ..Default::default()
1337 },
1338 );
1339 cx.run_until_parked();
1340
1341 project
1342 .update(cx, |project, cx| {
1343 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1344 })
1345 .await
1346 .unwrap();
1347
1348 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1349 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1350 assert_eq!(
1351 lsp_path, expected_path,
1352 "Tilde path should expand to home directory"
1353 );
1354}
1355
1356#[gpui::test]
1357async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1358 init_test(cx);
1359
1360 let fs = FakeFs::new(cx.executor());
1361 fs.insert_tree(
1362 path!("/the-root"),
1363 json!({
1364 ".gitignore": "target\n",
1365 "Cargo.lock": "",
1366 "src": {
1367 "a.rs": "",
1368 "b.rs": "",
1369 },
1370 "target": {
1371 "x": {
1372 "out": {
1373 "x.rs": ""
1374 }
1375 },
1376 "y": {
1377 "out": {
1378 "y.rs": "",
1379 }
1380 },
1381 "z": {
1382 "out": {
1383 "z.rs": ""
1384 }
1385 }
1386 }
1387 }),
1388 )
1389 .await;
1390 fs.insert_tree(
1391 path!("/the-registry"),
1392 json!({
1393 "dep1": {
1394 "src": {
1395 "dep1.rs": "",
1396 }
1397 },
1398 "dep2": {
1399 "src": {
1400 "dep2.rs": "",
1401 }
1402 },
1403 }),
1404 )
1405 .await;
1406 fs.insert_tree(
1407 path!("/the/stdlib"),
1408 json!({
1409 "LICENSE": "",
1410 "src": {
1411 "string.rs": "",
1412 }
1413 }),
1414 )
1415 .await;
1416
1417 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1418 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1419 (project.languages().clone(), project.lsp_store())
1420 });
1421 language_registry.add(rust_lang());
1422 let mut fake_servers = language_registry.register_fake_lsp(
1423 "Rust",
1424 FakeLspAdapter {
1425 name: "the-language-server",
1426 ..Default::default()
1427 },
1428 );
1429
1430 cx.executor().run_until_parked();
1431
1432 // Start the language server by opening a buffer with a compatible file extension.
1433 project
1434 .update(cx, |project, cx| {
1435 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1436 })
1437 .await
1438 .unwrap();
1439
1440 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1441 project.update(cx, |project, cx| {
1442 let worktree = project.worktrees(cx).next().unwrap();
1443 assert_eq!(
1444 worktree
1445 .read(cx)
1446 .snapshot()
1447 .entries(true, 0)
1448 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1449 .collect::<Vec<_>>(),
1450 &[
1451 ("", false),
1452 (".gitignore", false),
1453 ("Cargo.lock", false),
1454 ("src", false),
1455 ("src/a.rs", false),
1456 ("src/b.rs", false),
1457 ("target", true),
1458 ]
1459 );
1460 });
1461
1462 let prev_read_dir_count = fs.read_dir_call_count();
1463
1464 let fake_server = fake_servers.next().await.unwrap();
1465 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1466 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1467 id
1468 });
1469
1470 // Simulate jumping to a definition in a dependency outside of the worktree.
1471 let _out_of_worktree_buffer = project
1472 .update(cx, |project, cx| {
1473 project.open_local_buffer_via_lsp(
1474 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1475 server_id,
1476 cx,
1477 )
1478 })
1479 .await
1480 .unwrap();
1481
1482 // Keep track of the FS events reported to the language server.
1483 let file_changes = Arc::new(Mutex::new(Vec::new()));
1484 fake_server
1485 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1486 registrations: vec![lsp::Registration {
1487 id: Default::default(),
1488 method: "workspace/didChangeWatchedFiles".to_string(),
1489 register_options: serde_json::to_value(
1490 lsp::DidChangeWatchedFilesRegistrationOptions {
1491 watchers: vec![
1492 lsp::FileSystemWatcher {
1493 glob_pattern: lsp::GlobPattern::String(
1494 path!("/the-root/Cargo.toml").to_string(),
1495 ),
1496 kind: None,
1497 },
1498 lsp::FileSystemWatcher {
1499 glob_pattern: lsp::GlobPattern::String(
1500 path!("/the-root/src/*.{rs,c}").to_string(),
1501 ),
1502 kind: None,
1503 },
1504 lsp::FileSystemWatcher {
1505 glob_pattern: lsp::GlobPattern::String(
1506 path!("/the-root/target/y/**/*.rs").to_string(),
1507 ),
1508 kind: None,
1509 },
1510 lsp::FileSystemWatcher {
1511 glob_pattern: lsp::GlobPattern::String(
1512 path!("/the/stdlib/src/**/*.rs").to_string(),
1513 ),
1514 kind: None,
1515 },
1516 lsp::FileSystemWatcher {
1517 glob_pattern: lsp::GlobPattern::String(
1518 path!("**/Cargo.lock").to_string(),
1519 ),
1520 kind: None,
1521 },
1522 ],
1523 },
1524 )
1525 .ok(),
1526 }],
1527 })
1528 .await
1529 .into_response()
1530 .unwrap();
1531 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1532 let file_changes = file_changes.clone();
1533 move |params, _| {
1534 let mut file_changes = file_changes.lock();
1535 file_changes.extend(params.changes);
1536 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1537 }
1538 });
1539
1540 cx.executor().run_until_parked();
1541 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1542 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1543
1544 let mut new_watched_paths = fs.watched_paths();
1545 new_watched_paths.retain(|path| {
1546 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1547 });
1548 assert_eq!(
1549 &new_watched_paths,
1550 &[
1551 Path::new(path!("/the-root")),
1552 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1553 Path::new(path!("/the/stdlib/src"))
1554 ]
1555 );
1556
1557 // Now the language server has asked us to watch an ignored directory path,
1558 // so we recursively load it.
1559 project.update(cx, |project, cx| {
1560 let worktree = project.visible_worktrees(cx).next().unwrap();
1561 assert_eq!(
1562 worktree
1563 .read(cx)
1564 .snapshot()
1565 .entries(true, 0)
1566 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1567 .collect::<Vec<_>>(),
1568 &[
1569 ("", false),
1570 (".gitignore", false),
1571 ("Cargo.lock", false),
1572 ("src", false),
1573 ("src/a.rs", false),
1574 ("src/b.rs", false),
1575 ("target", true),
1576 ("target/x", true),
1577 ("target/y", true),
1578 ("target/y/out", true),
1579 ("target/y/out/y.rs", true),
1580 ("target/z", true),
1581 ]
1582 );
1583 });
1584
1585 // Perform some file system mutations, two of which match the watched patterns,
1586 // and one of which does not.
1587 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1588 .await
1589 .unwrap();
1590 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1591 .await
1592 .unwrap();
1593 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1594 .await
1595 .unwrap();
1596 fs.create_file(
1597 path!("/the-root/target/x/out/x2.rs").as_ref(),
1598 Default::default(),
1599 )
1600 .await
1601 .unwrap();
1602 fs.create_file(
1603 path!("/the-root/target/y/out/y2.rs").as_ref(),
1604 Default::default(),
1605 )
1606 .await
1607 .unwrap();
1608 fs.save(
1609 path!("/the-root/Cargo.lock").as_ref(),
1610 &"".into(),
1611 Default::default(),
1612 )
1613 .await
1614 .unwrap();
1615 fs.save(
1616 path!("/the-stdlib/LICENSE").as_ref(),
1617 &"".into(),
1618 Default::default(),
1619 )
1620 .await
1621 .unwrap();
1622 fs.save(
1623 path!("/the/stdlib/src/string.rs").as_ref(),
1624 &"".into(),
1625 Default::default(),
1626 )
1627 .await
1628 .unwrap();
1629
1630 // The language server receives events for the FS mutations that match its watch patterns.
1631 cx.executor().run_until_parked();
1632 assert_eq!(
1633 &*file_changes.lock(),
1634 &[
1635 lsp::FileEvent {
1636 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1637 typ: lsp::FileChangeType::CHANGED,
1638 },
1639 lsp::FileEvent {
1640 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1641 typ: lsp::FileChangeType::DELETED,
1642 },
1643 lsp::FileEvent {
1644 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1645 typ: lsp::FileChangeType::CREATED,
1646 },
1647 lsp::FileEvent {
1648 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1649 typ: lsp::FileChangeType::CREATED,
1650 },
1651 lsp::FileEvent {
1652 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1653 typ: lsp::FileChangeType::CHANGED,
1654 },
1655 ]
1656 );
1657}
1658
1659#[gpui::test]
1660async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1661 init_test(cx);
1662
1663 let fs = FakeFs::new(cx.executor());
1664 fs.insert_tree(
1665 path!("/dir"),
1666 json!({
1667 "a.rs": "let a = 1;",
1668 "b.rs": "let b = 2;"
1669 }),
1670 )
1671 .await;
1672
1673 let project = Project::test(
1674 fs,
1675 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1676 cx,
1677 )
1678 .await;
1679 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1680
1681 let buffer_a = project
1682 .update(cx, |project, cx| {
1683 project.open_local_buffer(path!("/dir/a.rs"), cx)
1684 })
1685 .await
1686 .unwrap();
1687 let buffer_b = project
1688 .update(cx, |project, cx| {
1689 project.open_local_buffer(path!("/dir/b.rs"), cx)
1690 })
1691 .await
1692 .unwrap();
1693
1694 lsp_store.update(cx, |lsp_store, cx| {
1695 lsp_store
1696 .update_diagnostics(
1697 LanguageServerId(0),
1698 lsp::PublishDiagnosticsParams {
1699 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1700 version: None,
1701 diagnostics: vec![lsp::Diagnostic {
1702 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1703 severity: Some(lsp::DiagnosticSeverity::ERROR),
1704 message: "error 1".to_string(),
1705 ..Default::default()
1706 }],
1707 },
1708 None,
1709 DiagnosticSourceKind::Pushed,
1710 &[],
1711 cx,
1712 )
1713 .unwrap();
1714 lsp_store
1715 .update_diagnostics(
1716 LanguageServerId(0),
1717 lsp::PublishDiagnosticsParams {
1718 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1719 version: None,
1720 diagnostics: vec![lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1722 severity: Some(DiagnosticSeverity::WARNING),
1723 message: "error 2".to_string(),
1724 ..Default::default()
1725 }],
1726 },
1727 None,
1728 DiagnosticSourceKind::Pushed,
1729 &[],
1730 cx,
1731 )
1732 .unwrap();
1733 });
1734
1735 buffer_a.update(cx, |buffer, _| {
1736 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1737 assert_eq!(
1738 chunks
1739 .iter()
1740 .map(|(s, d)| (s.as_str(), *d))
1741 .collect::<Vec<_>>(),
1742 &[
1743 ("let ", None),
1744 ("a", Some(DiagnosticSeverity::ERROR)),
1745 (" = 1;", None),
1746 ]
1747 );
1748 });
1749 buffer_b.update(cx, |buffer, _| {
1750 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1751 assert_eq!(
1752 chunks
1753 .iter()
1754 .map(|(s, d)| (s.as_str(), *d))
1755 .collect::<Vec<_>>(),
1756 &[
1757 ("let ", None),
1758 ("b", Some(DiagnosticSeverity::WARNING)),
1759 (" = 2;", None),
1760 ]
1761 );
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(
1771 path!("/root"),
1772 json!({
1773 "dir": {
1774 ".git": {
1775 "HEAD": "ref: refs/heads/main",
1776 },
1777 ".gitignore": "b.rs",
1778 "a.rs": "let a = 1;",
1779 "b.rs": "let b = 2;",
1780 },
1781 "other.rs": "let b = c;"
1782 }),
1783 )
1784 .await;
1785
1786 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1787 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1788 let (worktree, _) = project
1789 .update(cx, |project, cx| {
1790 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1791 })
1792 .await
1793 .unwrap();
1794 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1795
1796 let (worktree, _) = project
1797 .update(cx, |project, cx| {
1798 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1799 })
1800 .await
1801 .unwrap();
1802 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1803
1804 let server_id = LanguageServerId(0);
1805 lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store
1807 .update_diagnostics(
1808 server_id,
1809 lsp::PublishDiagnosticsParams {
1810 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1811 version: None,
1812 diagnostics: vec![lsp::Diagnostic {
1813 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1814 severity: Some(lsp::DiagnosticSeverity::ERROR),
1815 message: "unused variable 'b'".to_string(),
1816 ..Default::default()
1817 }],
1818 },
1819 None,
1820 DiagnosticSourceKind::Pushed,
1821 &[],
1822 cx,
1823 )
1824 .unwrap();
1825 lsp_store
1826 .update_diagnostics(
1827 server_id,
1828 lsp::PublishDiagnosticsParams {
1829 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1830 version: None,
1831 diagnostics: vec![lsp::Diagnostic {
1832 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1833 severity: Some(lsp::DiagnosticSeverity::ERROR),
1834 message: "unknown variable 'c'".to_string(),
1835 ..Default::default()
1836 }],
1837 },
1838 None,
1839 DiagnosticSourceKind::Pushed,
1840 &[],
1841 cx,
1842 )
1843 .unwrap();
1844 });
1845
1846 let main_ignored_buffer = project
1847 .update(cx, |project, cx| {
1848 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1849 })
1850 .await
1851 .unwrap();
1852 main_ignored_buffer.update(cx, |buffer, _| {
1853 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1854 assert_eq!(
1855 chunks
1856 .iter()
1857 .map(|(s, d)| (s.as_str(), *d))
1858 .collect::<Vec<_>>(),
1859 &[
1860 ("let ", None),
1861 ("b", Some(DiagnosticSeverity::ERROR)),
1862 (" = 2;", None),
1863 ],
1864 "Gigitnored buffers should still get in-buffer diagnostics",
1865 );
1866 });
1867 let other_buffer = project
1868 .update(cx, |project, cx| {
1869 project.open_buffer((other_worktree_id, rel_path("")), cx)
1870 })
1871 .await
1872 .unwrap();
1873 other_buffer.update(cx, |buffer, _| {
1874 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1875 assert_eq!(
1876 chunks
1877 .iter()
1878 .map(|(s, d)| (s.as_str(), *d))
1879 .collect::<Vec<_>>(),
1880 &[
1881 ("let b = ", None),
1882 ("c", Some(DiagnosticSeverity::ERROR)),
1883 (";", None),
1884 ],
1885 "Buffers from hidden projects should still get in-buffer diagnostics"
1886 );
1887 });
1888
1889 project.update(cx, |project, cx| {
1890 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1891 assert_eq!(
1892 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1893 vec![(
1894 ProjectPath {
1895 worktree_id: main_worktree_id,
1896 path: rel_path("b.rs").into(),
1897 },
1898 server_id,
1899 DiagnosticSummary {
1900 error_count: 1,
1901 warning_count: 0,
1902 }
1903 )]
1904 );
1905 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1906 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1907 });
1908}
1909
1910#[gpui::test]
1911async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1912 init_test(cx);
1913
1914 let progress_token = "the-progress-token";
1915
1916 let fs = FakeFs::new(cx.executor());
1917 fs.insert_tree(
1918 path!("/dir"),
1919 json!({
1920 "a.rs": "fn a() { A }",
1921 "b.rs": "const y: i32 = 1",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1928
1929 language_registry.add(rust_lang());
1930 let mut fake_servers = language_registry.register_fake_lsp(
1931 "Rust",
1932 FakeLspAdapter {
1933 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1934 disk_based_diagnostics_sources: vec!["disk".into()],
1935 ..Default::default()
1936 },
1937 );
1938
1939 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1940
1941 // Cause worktree to start the fake language server
1942 let _ = project
1943 .update(cx, |project, cx| {
1944 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1945 })
1946 .await
1947 .unwrap();
1948
1949 let mut events = cx.events(&project);
1950
1951 let fake_server = fake_servers.next().await.unwrap();
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(0),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 ),
1959 );
1960
1961 fake_server
1962 .start_progress(format!("{}/0", progress_token))
1963 .await;
1964 assert_eq!(
1965 events.next().await.unwrap(),
1966 Event::DiskBasedDiagnosticsStarted {
1967 language_server_id: LanguageServerId(0),
1968 }
1969 );
1970
1971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1973 version: None,
1974 diagnostics: vec![lsp::Diagnostic {
1975 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1976 severity: Some(lsp::DiagnosticSeverity::ERROR),
1977 message: "undefined variable 'A'".to_string(),
1978 ..Default::default()
1979 }],
1980 });
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiagnosticsUpdated {
1984 language_server_id: LanguageServerId(0),
1985 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1986 }
1987 );
1988
1989 fake_server.end_progress(format!("{}/0", progress_token));
1990 assert_eq!(
1991 events.next().await.unwrap(),
1992 Event::DiskBasedDiagnosticsFinished {
1993 language_server_id: LanguageServerId(0)
1994 }
1995 );
1996
1997 let buffer = project
1998 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1999 .await
2000 .unwrap();
2001
2002 buffer.update(cx, |buffer, _| {
2003 let snapshot = buffer.snapshot();
2004 let diagnostics = snapshot
2005 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2006 .collect::<Vec<_>>();
2007 assert_eq!(
2008 diagnostics,
2009 &[DiagnosticEntryRef {
2010 range: Point::new(0, 9)..Point::new(0, 10),
2011 diagnostic: &Diagnostic {
2012 severity: lsp::DiagnosticSeverity::ERROR,
2013 message: "undefined variable 'A'".to_string(),
2014 group_id: 0,
2015 is_primary: true,
2016 source_kind: DiagnosticSourceKind::Pushed,
2017 ..Diagnostic::default()
2018 }
2019 }]
2020 )
2021 });
2022
2023 // Ensure publishing empty diagnostics twice only results in one update event.
2024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2025 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2026 version: None,
2027 diagnostics: Default::default(),
2028 });
2029 assert_eq!(
2030 events.next().await.unwrap(),
2031 Event::DiagnosticsUpdated {
2032 language_server_id: LanguageServerId(0),
2033 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2034 }
2035 );
2036
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: Default::default(),
2041 });
2042 cx.executor().run_until_parked();
2043 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2044}
2045
2046#[gpui::test]
2047async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2048 init_test(cx);
2049
2050 let progress_token = "the-progress-token";
2051
2052 let fs = FakeFs::new(cx.executor());
2053 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2054
2055 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2056
2057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2058 language_registry.add(rust_lang());
2059 let mut fake_servers = language_registry.register_fake_lsp(
2060 "Rust",
2061 FakeLspAdapter {
2062 name: "the-language-server",
2063 disk_based_diagnostics_sources: vec!["disk".into()],
2064 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2065 ..FakeLspAdapter::default()
2066 },
2067 );
2068
2069 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2070
2071 let (buffer, _handle) = project
2072 .update(cx, |project, cx| {
2073 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2074 })
2075 .await
2076 .unwrap();
2077 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2078 // Simulate diagnostics starting to update.
2079 let fake_server = fake_servers.next().await.unwrap();
2080 fake_server.start_progress(progress_token).await;
2081
2082 // Restart the server before the diagnostics finish updating.
2083 project.update(cx, |project, cx| {
2084 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2085 });
2086 let mut events = cx.events(&project);
2087
2088 // Simulate the newly started server sending more diagnostics.
2089 let fake_server = fake_servers.next().await.unwrap();
2090 assert_eq!(
2091 events.next().await.unwrap(),
2092 Event::LanguageServerRemoved(LanguageServerId(0))
2093 );
2094 assert_eq!(
2095 events.next().await.unwrap(),
2096 Event::LanguageServerAdded(
2097 LanguageServerId(1),
2098 fake_server.server.name(),
2099 Some(worktree_id)
2100 )
2101 );
2102 fake_server.start_progress(progress_token).await;
2103 assert_eq!(
2104 events.next().await.unwrap(),
2105 Event::LanguageServerBufferRegistered {
2106 server_id: LanguageServerId(1),
2107 buffer_id,
2108 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2109 name: Some(fake_server.server.name())
2110 }
2111 );
2112 assert_eq!(
2113 events.next().await.unwrap(),
2114 Event::DiskBasedDiagnosticsStarted {
2115 language_server_id: LanguageServerId(1)
2116 }
2117 );
2118 project.update(cx, |project, cx| {
2119 assert_eq!(
2120 project
2121 .language_servers_running_disk_based_diagnostics(cx)
2122 .collect::<Vec<_>>(),
2123 [LanguageServerId(1)]
2124 );
2125 });
2126
2127 // All diagnostics are considered done, despite the old server's diagnostic
2128 // task never completing.
2129 fake_server.end_progress(progress_token);
2130 assert_eq!(
2131 events.next().await.unwrap(),
2132 Event::DiskBasedDiagnosticsFinished {
2133 language_server_id: LanguageServerId(1)
2134 }
2135 );
2136 project.update(cx, |project, cx| {
2137 assert_eq!(
2138 project
2139 .language_servers_running_disk_based_diagnostics(cx)
2140 .collect::<Vec<_>>(),
2141 [] as [language::LanguageServerId; 0]
2142 );
2143 });
2144}
2145
2146#[gpui::test]
2147async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2148 init_test(cx);
2149
2150 let fs = FakeFs::new(cx.executor());
2151 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2152
2153 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2154
2155 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2156 language_registry.add(rust_lang());
2157 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2158
2159 let (buffer, _) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Publish diagnostics
2167 let fake_server = fake_servers.next().await.unwrap();
2168 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2169 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2170 version: None,
2171 diagnostics: vec![lsp::Diagnostic {
2172 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2173 severity: Some(lsp::DiagnosticSeverity::ERROR),
2174 message: "the message".to_string(),
2175 ..Default::default()
2176 }],
2177 });
2178
2179 cx.executor().run_until_parked();
2180 buffer.update(cx, |buffer, _| {
2181 assert_eq!(
2182 buffer
2183 .snapshot()
2184 .diagnostics_in_range::<_, usize>(0..1, false)
2185 .map(|entry| entry.diagnostic.message.clone())
2186 .collect::<Vec<_>>(),
2187 ["the message".to_string()]
2188 );
2189 });
2190 project.update(cx, |project, cx| {
2191 assert_eq!(
2192 project.diagnostic_summary(false, cx),
2193 DiagnosticSummary {
2194 error_count: 1,
2195 warning_count: 0,
2196 }
2197 );
2198 });
2199
2200 project.update(cx, |project, cx| {
2201 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2202 });
2203
2204 // The diagnostics are cleared.
2205 cx.executor().run_until_parked();
2206 buffer.update(cx, |buffer, _| {
2207 assert_eq!(
2208 buffer
2209 .snapshot()
2210 .diagnostics_in_range::<_, usize>(0..1, false)
2211 .map(|entry| entry.diagnostic.message.clone())
2212 .collect::<Vec<_>>(),
2213 Vec::<String>::new(),
2214 );
2215 });
2216 project.update(cx, |project, cx| {
2217 assert_eq!(
2218 project.diagnostic_summary(false, cx),
2219 DiagnosticSummary {
2220 error_count: 0,
2221 warning_count: 0,
2222 }
2223 );
2224 });
2225}
2226
2227#[gpui::test]
2228async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2229 init_test(cx);
2230
2231 let fs = FakeFs::new(cx.executor());
2232 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2233
2234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2235 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2236
2237 language_registry.add(rust_lang());
2238 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2239
2240 let (buffer, _handle) = project
2241 .update(cx, |project, cx| {
2242 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2243 })
2244 .await
2245 .unwrap();
2246
2247 // Before restarting the server, report diagnostics with an unknown buffer version.
2248 let fake_server = fake_servers.next().await.unwrap();
2249 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2250 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2251 version: Some(10000),
2252 diagnostics: Vec::new(),
2253 });
2254 cx.executor().run_until_parked();
2255 project.update(cx, |project, cx| {
2256 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2257 });
2258
2259 let mut fake_server = fake_servers.next().await.unwrap();
2260 let notification = fake_server
2261 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2262 .await
2263 .text_document;
2264 assert_eq!(notification.version, 0);
2265}
2266
2267#[gpui::test]
2268async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2269 init_test(cx);
2270
2271 let progress_token = "the-progress-token";
2272
2273 let fs = FakeFs::new(cx.executor());
2274 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2275
2276 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2277
2278 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2279 language_registry.add(rust_lang());
2280 let mut fake_servers = language_registry.register_fake_lsp(
2281 "Rust",
2282 FakeLspAdapter {
2283 name: "the-language-server",
2284 disk_based_diagnostics_sources: vec!["disk".into()],
2285 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2286 ..Default::default()
2287 },
2288 );
2289
2290 let (buffer, _handle) = project
2291 .update(cx, |project, cx| {
2292 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2293 })
2294 .await
2295 .unwrap();
2296
2297 // Simulate diagnostics starting to update.
2298 let mut fake_server = fake_servers.next().await.unwrap();
2299 fake_server
2300 .start_progress_with(
2301 "another-token",
2302 lsp::WorkDoneProgressBegin {
2303 cancellable: Some(false),
2304 ..Default::default()
2305 },
2306 )
2307 .await;
2308 fake_server
2309 .start_progress_with(
2310 progress_token,
2311 lsp::WorkDoneProgressBegin {
2312 cancellable: Some(true),
2313 ..Default::default()
2314 },
2315 )
2316 .await;
2317 cx.executor().run_until_parked();
2318
2319 project.update(cx, |project, cx| {
2320 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2321 });
2322
2323 let cancel_notification = fake_server
2324 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2325 .await;
2326 assert_eq!(
2327 cancel_notification.token,
2328 NumberOrString::String(progress_token.into())
2329 );
2330}
2331
2332#[gpui::test]
2333async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2334 init_test(cx);
2335
2336 let fs = FakeFs::new(cx.executor());
2337 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2338 .await;
2339
2340 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2341 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2342
2343 let mut fake_rust_servers = language_registry.register_fake_lsp(
2344 "Rust",
2345 FakeLspAdapter {
2346 name: "rust-lsp",
2347 ..Default::default()
2348 },
2349 );
2350 let mut fake_js_servers = language_registry.register_fake_lsp(
2351 "JavaScript",
2352 FakeLspAdapter {
2353 name: "js-lsp",
2354 ..Default::default()
2355 },
2356 );
2357 language_registry.add(rust_lang());
2358 language_registry.add(js_lang());
2359
2360 let _rs_buffer = project
2361 .update(cx, |project, cx| {
2362 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2363 })
2364 .await
2365 .unwrap();
2366 let _js_buffer = project
2367 .update(cx, |project, cx| {
2368 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2369 })
2370 .await
2371 .unwrap();
2372
2373 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2374 assert_eq!(
2375 fake_rust_server_1
2376 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2377 .await
2378 .text_document
2379 .uri
2380 .as_str(),
2381 uri!("file:///dir/a.rs")
2382 );
2383
2384 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2385 assert_eq!(
2386 fake_js_server
2387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2388 .await
2389 .text_document
2390 .uri
2391 .as_str(),
2392 uri!("file:///dir/b.js")
2393 );
2394
2395 // Disable Rust language server, ensuring only that server gets stopped.
2396 cx.update(|cx| {
2397 SettingsStore::update_global(cx, |settings, cx| {
2398 settings.update_user_settings(cx, |settings| {
2399 settings.languages_mut().insert(
2400 "Rust".into(),
2401 LanguageSettingsContent {
2402 enable_language_server: Some(false),
2403 ..Default::default()
2404 },
2405 );
2406 });
2407 })
2408 });
2409 fake_rust_server_1
2410 .receive_notification::<lsp::notification::Exit>()
2411 .await;
2412
2413 // Enable Rust and disable JavaScript language servers, ensuring that the
2414 // former gets started again and that the latter stops.
2415 cx.update(|cx| {
2416 SettingsStore::update_global(cx, |settings, cx| {
2417 settings.update_user_settings(cx, |settings| {
2418 settings.languages_mut().insert(
2419 "Rust".into(),
2420 LanguageSettingsContent {
2421 enable_language_server: Some(true),
2422 ..Default::default()
2423 },
2424 );
2425 settings.languages_mut().insert(
2426 "JavaScript".into(),
2427 LanguageSettingsContent {
2428 enable_language_server: Some(false),
2429 ..Default::default()
2430 },
2431 );
2432 });
2433 })
2434 });
2435 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2436 assert_eq!(
2437 fake_rust_server_2
2438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2439 .await
2440 .text_document
2441 .uri
2442 .as_str(),
2443 uri!("file:///dir/a.rs")
2444 );
2445 fake_js_server
2446 .receive_notification::<lsp::notification::Exit>()
2447 .await;
2448}
2449
2450#[gpui::test(iterations = 3)]
2451async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2452 init_test(cx);
2453
2454 let text = "
2455 fn a() { A }
2456 fn b() { BB }
2457 fn c() { CCC }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2463
2464 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2465 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2466
2467 language_registry.add(rust_lang());
2468 let mut fake_servers = language_registry.register_fake_lsp(
2469 "Rust",
2470 FakeLspAdapter {
2471 disk_based_diagnostics_sources: vec!["disk".into()],
2472 ..Default::default()
2473 },
2474 );
2475
2476 let buffer = project
2477 .update(cx, |project, cx| {
2478 project.open_local_buffer(path!("/dir/a.rs"), cx)
2479 })
2480 .await
2481 .unwrap();
2482
2483 let _handle = project.update(cx, |project, cx| {
2484 project.register_buffer_with_language_servers(&buffer, cx)
2485 });
2486
2487 let mut fake_server = fake_servers.next().await.unwrap();
2488 let open_notification = fake_server
2489 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2490 .await;
2491
2492 // Edit the buffer, moving the content down
2493 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2494 let change_notification_1 = fake_server
2495 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2496 .await;
2497 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2498
2499 // Report some diagnostics for the initial version of the buffer
2500 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2501 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2502 version: Some(open_notification.text_document.version),
2503 diagnostics: vec![
2504 lsp::Diagnostic {
2505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2506 severity: Some(DiagnosticSeverity::ERROR),
2507 message: "undefined variable 'A'".to_string(),
2508 source: Some("disk".to_string()),
2509 ..Default::default()
2510 },
2511 lsp::Diagnostic {
2512 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2513 severity: Some(DiagnosticSeverity::ERROR),
2514 message: "undefined variable 'BB'".to_string(),
2515 source: Some("disk".to_string()),
2516 ..Default::default()
2517 },
2518 lsp::Diagnostic {
2519 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2520 severity: Some(DiagnosticSeverity::ERROR),
2521 source: Some("disk".to_string()),
2522 message: "undefined variable 'CCC'".to_string(),
2523 ..Default::default()
2524 },
2525 ],
2526 });
2527
2528 // The diagnostics have moved down since they were created.
2529 cx.executor().run_until_parked();
2530 buffer.update(cx, |buffer, _| {
2531 assert_eq!(
2532 buffer
2533 .snapshot()
2534 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2535 .collect::<Vec<_>>(),
2536 &[
2537 DiagnosticEntry {
2538 range: Point::new(3, 9)..Point::new(3, 11),
2539 diagnostic: Diagnostic {
2540 source: Some("disk".into()),
2541 severity: DiagnosticSeverity::ERROR,
2542 message: "undefined variable 'BB'".to_string(),
2543 is_disk_based: true,
2544 group_id: 1,
2545 is_primary: true,
2546 source_kind: DiagnosticSourceKind::Pushed,
2547 ..Diagnostic::default()
2548 },
2549 },
2550 DiagnosticEntry {
2551 range: Point::new(4, 9)..Point::new(4, 12),
2552 diagnostic: Diagnostic {
2553 source: Some("disk".into()),
2554 severity: DiagnosticSeverity::ERROR,
2555 message: "undefined variable 'CCC'".to_string(),
2556 is_disk_based: true,
2557 group_id: 2,
2558 is_primary: true,
2559 source_kind: DiagnosticSourceKind::Pushed,
2560 ..Diagnostic::default()
2561 }
2562 }
2563 ]
2564 );
2565 assert_eq!(
2566 chunks_with_diagnostics(buffer, 0..buffer.len()),
2567 [
2568 ("\n\nfn a() { ".to_string(), None),
2569 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2570 (" }\nfn b() { ".to_string(), None),
2571 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2572 (" }\nfn c() { ".to_string(), None),
2573 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2574 (" }\n".to_string(), None),
2575 ]
2576 );
2577 assert_eq!(
2578 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2579 [
2580 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2581 (" }\nfn c() { ".to_string(), None),
2582 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2583 ]
2584 );
2585 });
2586
2587 // Ensure overlapping diagnostics are highlighted correctly.
2588 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2589 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2590 version: Some(open_notification.text_document.version),
2591 diagnostics: vec![
2592 lsp::Diagnostic {
2593 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2594 severity: Some(DiagnosticSeverity::ERROR),
2595 message: "undefined variable 'A'".to_string(),
2596 source: Some("disk".to_string()),
2597 ..Default::default()
2598 },
2599 lsp::Diagnostic {
2600 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2601 severity: Some(DiagnosticSeverity::WARNING),
2602 message: "unreachable statement".to_string(),
2603 source: Some("disk".to_string()),
2604 ..Default::default()
2605 },
2606 ],
2607 });
2608
2609 cx.executor().run_until_parked();
2610 buffer.update(cx, |buffer, _| {
2611 assert_eq!(
2612 buffer
2613 .snapshot()
2614 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2615 .collect::<Vec<_>>(),
2616 &[
2617 DiagnosticEntry {
2618 range: Point::new(2, 9)..Point::new(2, 12),
2619 diagnostic: Diagnostic {
2620 source: Some("disk".into()),
2621 severity: DiagnosticSeverity::WARNING,
2622 message: "unreachable statement".to_string(),
2623 is_disk_based: true,
2624 group_id: 4,
2625 is_primary: true,
2626 source_kind: DiagnosticSourceKind::Pushed,
2627 ..Diagnostic::default()
2628 }
2629 },
2630 DiagnosticEntry {
2631 range: Point::new(2, 9)..Point::new(2, 10),
2632 diagnostic: Diagnostic {
2633 source: Some("disk".into()),
2634 severity: DiagnosticSeverity::ERROR,
2635 message: "undefined variable 'A'".to_string(),
2636 is_disk_based: true,
2637 group_id: 3,
2638 is_primary: true,
2639 source_kind: DiagnosticSourceKind::Pushed,
2640 ..Diagnostic::default()
2641 },
2642 }
2643 ]
2644 );
2645 assert_eq!(
2646 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2647 [
2648 ("fn a() { ".to_string(), None),
2649 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2650 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2651 ("\n".to_string(), None),
2652 ]
2653 );
2654 assert_eq!(
2655 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2656 [
2657 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2658 ("\n".to_string(), None),
2659 ]
2660 );
2661 });
2662
2663 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2664 // changes since the last save.
2665 buffer.update(cx, |buffer, cx| {
2666 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2667 buffer.edit(
2668 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2669 None,
2670 cx,
2671 );
2672 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2673 });
2674 let change_notification_2 = fake_server
2675 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2676 .await;
2677 assert!(
2678 change_notification_2.text_document.version > change_notification_1.text_document.version
2679 );
2680
2681 // Handle out-of-order diagnostics
2682 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2683 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2684 version: Some(change_notification_2.text_document.version),
2685 diagnostics: vec![
2686 lsp::Diagnostic {
2687 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2688 severity: Some(DiagnosticSeverity::ERROR),
2689 message: "undefined variable 'BB'".to_string(),
2690 source: Some("disk".to_string()),
2691 ..Default::default()
2692 },
2693 lsp::Diagnostic {
2694 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2695 severity: Some(DiagnosticSeverity::WARNING),
2696 message: "undefined variable 'A'".to_string(),
2697 source: Some("disk".to_string()),
2698 ..Default::default()
2699 },
2700 ],
2701 });
2702
2703 cx.executor().run_until_parked();
2704 buffer.update(cx, |buffer, _| {
2705 assert_eq!(
2706 buffer
2707 .snapshot()
2708 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2709 .collect::<Vec<_>>(),
2710 &[
2711 DiagnosticEntry {
2712 range: Point::new(2, 21)..Point::new(2, 22),
2713 diagnostic: Diagnostic {
2714 source: Some("disk".into()),
2715 severity: DiagnosticSeverity::WARNING,
2716 message: "undefined variable 'A'".to_string(),
2717 is_disk_based: true,
2718 group_id: 6,
2719 is_primary: true,
2720 source_kind: DiagnosticSourceKind::Pushed,
2721 ..Diagnostic::default()
2722 }
2723 },
2724 DiagnosticEntry {
2725 range: Point::new(3, 9)..Point::new(3, 14),
2726 diagnostic: Diagnostic {
2727 source: Some("disk".into()),
2728 severity: DiagnosticSeverity::ERROR,
2729 message: "undefined variable 'BB'".to_string(),
2730 is_disk_based: true,
2731 group_id: 5,
2732 is_primary: true,
2733 source_kind: DiagnosticSourceKind::Pushed,
2734 ..Diagnostic::default()
2735 },
2736 }
2737 ]
2738 );
2739 });
2740}
2741
2742#[gpui::test]
2743async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2744 init_test(cx);
2745
2746 let text = concat!(
2747 "let one = ;\n", //
2748 "let two = \n",
2749 "let three = 3;\n",
2750 );
2751
2752 let fs = FakeFs::new(cx.executor());
2753 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2754
2755 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2756 let buffer = project
2757 .update(cx, |project, cx| {
2758 project.open_local_buffer(path!("/dir/a.rs"), cx)
2759 })
2760 .await
2761 .unwrap();
2762
2763 project.update(cx, |project, cx| {
2764 project.lsp_store.update(cx, |lsp_store, cx| {
2765 lsp_store
2766 .update_diagnostic_entries(
2767 LanguageServerId(0),
2768 PathBuf::from(path!("/dir/a.rs")),
2769 None,
2770 None,
2771 vec![
2772 DiagnosticEntry {
2773 range: Unclipped(PointUtf16::new(0, 10))
2774 ..Unclipped(PointUtf16::new(0, 10)),
2775 diagnostic: Diagnostic {
2776 severity: DiagnosticSeverity::ERROR,
2777 message: "syntax error 1".to_string(),
2778 source_kind: DiagnosticSourceKind::Pushed,
2779 ..Diagnostic::default()
2780 },
2781 },
2782 DiagnosticEntry {
2783 range: Unclipped(PointUtf16::new(1, 10))
2784 ..Unclipped(PointUtf16::new(1, 10)),
2785 diagnostic: Diagnostic {
2786 severity: DiagnosticSeverity::ERROR,
2787 message: "syntax error 2".to_string(),
2788 source_kind: DiagnosticSourceKind::Pushed,
2789 ..Diagnostic::default()
2790 },
2791 },
2792 ],
2793 cx,
2794 )
2795 .unwrap();
2796 })
2797 });
2798
2799 // An empty range is extended forward to include the following character.
2800 // At the end of a line, an empty range is extended backward to include
2801 // the preceding character.
2802 buffer.update(cx, |buffer, _| {
2803 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2804 assert_eq!(
2805 chunks
2806 .iter()
2807 .map(|(s, d)| (s.as_str(), *d))
2808 .collect::<Vec<_>>(),
2809 &[
2810 ("let one = ", None),
2811 (";", Some(DiagnosticSeverity::ERROR)),
2812 ("\nlet two =", None),
2813 (" ", Some(DiagnosticSeverity::ERROR)),
2814 ("\nlet three = 3;\n", None)
2815 ]
2816 );
2817 });
2818}
2819
2820#[gpui::test]
2821async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2822 init_test(cx);
2823
2824 let fs = FakeFs::new(cx.executor());
2825 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
2826 .await;
2827
2828 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2829 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2830
2831 lsp_store.update(cx, |lsp_store, cx| {
2832 lsp_store
2833 .update_diagnostic_entries(
2834 LanguageServerId(0),
2835 Path::new(path!("/dir/a.rs")).to_owned(),
2836 None,
2837 None,
2838 vec![DiagnosticEntry {
2839 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2840 diagnostic: Diagnostic {
2841 severity: DiagnosticSeverity::ERROR,
2842 is_primary: true,
2843 message: "syntax error a1".to_string(),
2844 source_kind: DiagnosticSourceKind::Pushed,
2845 ..Diagnostic::default()
2846 },
2847 }],
2848 cx,
2849 )
2850 .unwrap();
2851 lsp_store
2852 .update_diagnostic_entries(
2853 LanguageServerId(1),
2854 Path::new(path!("/dir/a.rs")).to_owned(),
2855 None,
2856 None,
2857 vec![DiagnosticEntry {
2858 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2859 diagnostic: Diagnostic {
2860 severity: DiagnosticSeverity::ERROR,
2861 is_primary: true,
2862 message: "syntax error b1".to_string(),
2863 source_kind: DiagnosticSourceKind::Pushed,
2864 ..Diagnostic::default()
2865 },
2866 }],
2867 cx,
2868 )
2869 .unwrap();
2870
2871 assert_eq!(
2872 lsp_store.diagnostic_summary(false, cx),
2873 DiagnosticSummary {
2874 error_count: 2,
2875 warning_count: 0,
2876 }
2877 );
2878 });
2879}
2880
2881#[gpui::test]
2882async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2883 init_test(cx);
2884
2885 let text = "
2886 fn a() {
2887 f1();
2888 }
2889 fn b() {
2890 f2();
2891 }
2892 fn c() {
2893 f3();
2894 }
2895 "
2896 .unindent();
2897
2898 let fs = FakeFs::new(cx.executor());
2899 fs.insert_tree(
2900 path!("/dir"),
2901 json!({
2902 "a.rs": text.clone(),
2903 }),
2904 )
2905 .await;
2906
2907 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2908 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2909
2910 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2911 language_registry.add(rust_lang());
2912 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2913
2914 let (buffer, _handle) = project
2915 .update(cx, |project, cx| {
2916 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2917 })
2918 .await
2919 .unwrap();
2920
2921 let mut fake_server = fake_servers.next().await.unwrap();
2922 let lsp_document_version = fake_server
2923 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2924 .await
2925 .text_document
2926 .version;
2927
2928 // Simulate editing the buffer after the language server computes some edits.
2929 buffer.update(cx, |buffer, cx| {
2930 buffer.edit(
2931 [(
2932 Point::new(0, 0)..Point::new(0, 0),
2933 "// above first function\n",
2934 )],
2935 None,
2936 cx,
2937 );
2938 buffer.edit(
2939 [(
2940 Point::new(2, 0)..Point::new(2, 0),
2941 " // inside first function\n",
2942 )],
2943 None,
2944 cx,
2945 );
2946 buffer.edit(
2947 [(
2948 Point::new(6, 4)..Point::new(6, 4),
2949 "// inside second function ",
2950 )],
2951 None,
2952 cx,
2953 );
2954
2955 assert_eq!(
2956 buffer.text(),
2957 "
2958 // above first function
2959 fn a() {
2960 // inside first function
2961 f1();
2962 }
2963 fn b() {
2964 // inside second function f2();
2965 }
2966 fn c() {
2967 f3();
2968 }
2969 "
2970 .unindent()
2971 );
2972 });
2973
2974 let edits = lsp_store
2975 .update(cx, |lsp_store, cx| {
2976 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2977 &buffer,
2978 vec![
2979 // replace body of first function
2980 lsp::TextEdit {
2981 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2982 new_text: "
2983 fn a() {
2984 f10();
2985 }
2986 "
2987 .unindent(),
2988 },
2989 // edit inside second function
2990 lsp::TextEdit {
2991 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2992 new_text: "00".into(),
2993 },
2994 // edit inside third function via two distinct edits
2995 lsp::TextEdit {
2996 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2997 new_text: "4000".into(),
2998 },
2999 lsp::TextEdit {
3000 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3001 new_text: "".into(),
3002 },
3003 ],
3004 LanguageServerId(0),
3005 Some(lsp_document_version),
3006 cx,
3007 )
3008 })
3009 .await
3010 .unwrap();
3011
3012 buffer.update(cx, |buffer, cx| {
3013 for (range, new_text) in edits {
3014 buffer.edit([(range, new_text)], None, cx);
3015 }
3016 assert_eq!(
3017 buffer.text(),
3018 "
3019 // above first function
3020 fn a() {
3021 // inside first function
3022 f10();
3023 }
3024 fn b() {
3025 // inside second function f200();
3026 }
3027 fn c() {
3028 f4000();
3029 }
3030 "
3031 .unindent()
3032 );
3033 });
3034}
3035
3036#[gpui::test]
3037async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3038 init_test(cx);
3039
3040 let text = "
3041 use a::b;
3042 use a::c;
3043
3044 fn f() {
3045 b();
3046 c();
3047 }
3048 "
3049 .unindent();
3050
3051 let fs = FakeFs::new(cx.executor());
3052 fs.insert_tree(
3053 path!("/dir"),
3054 json!({
3055 "a.rs": text.clone(),
3056 }),
3057 )
3058 .await;
3059
3060 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3061 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3062 let buffer = project
3063 .update(cx, |project, cx| {
3064 project.open_local_buffer(path!("/dir/a.rs"), cx)
3065 })
3066 .await
3067 .unwrap();
3068
3069 // Simulate the language server sending us a small edit in the form of a very large diff.
3070 // Rust-analyzer does this when performing a merge-imports code action.
3071 let edits = lsp_store
3072 .update(cx, |lsp_store, cx| {
3073 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3074 &buffer,
3075 [
3076 // Replace the first use statement without editing the semicolon.
3077 lsp::TextEdit {
3078 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3079 new_text: "a::{b, c}".into(),
3080 },
3081 // Reinsert the remainder of the file between the semicolon and the final
3082 // newline of the file.
3083 lsp::TextEdit {
3084 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3085 new_text: "\n\n".into(),
3086 },
3087 lsp::TextEdit {
3088 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3089 new_text: "
3090 fn f() {
3091 b();
3092 c();
3093 }"
3094 .unindent(),
3095 },
3096 // Delete everything after the first newline of the file.
3097 lsp::TextEdit {
3098 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3099 new_text: "".into(),
3100 },
3101 ],
3102 LanguageServerId(0),
3103 None,
3104 cx,
3105 )
3106 })
3107 .await
3108 .unwrap();
3109
3110 buffer.update(cx, |buffer, cx| {
3111 let edits = edits
3112 .into_iter()
3113 .map(|(range, text)| {
3114 (
3115 range.start.to_point(buffer)..range.end.to_point(buffer),
3116 text,
3117 )
3118 })
3119 .collect::<Vec<_>>();
3120
3121 assert_eq!(
3122 edits,
3123 [
3124 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3125 (Point::new(1, 0)..Point::new(2, 0), "".into())
3126 ]
3127 );
3128
3129 for (range, new_text) in edits {
3130 buffer.edit([(range, new_text)], None, cx);
3131 }
3132 assert_eq!(
3133 buffer.text(),
3134 "
3135 use a::{b, c};
3136
3137 fn f() {
3138 b();
3139 c();
3140 }
3141 "
3142 .unindent()
3143 );
3144 });
3145}
3146
3147#[gpui::test]
3148async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3149 cx: &mut gpui::TestAppContext,
3150) {
3151 init_test(cx);
3152
3153 let text = "Path()";
3154
3155 let fs = FakeFs::new(cx.executor());
3156 fs.insert_tree(
3157 path!("/dir"),
3158 json!({
3159 "a.rs": text
3160 }),
3161 )
3162 .await;
3163
3164 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3165 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3166 let buffer = project
3167 .update(cx, |project, cx| {
3168 project.open_local_buffer(path!("/dir/a.rs"), cx)
3169 })
3170 .await
3171 .unwrap();
3172
3173 // Simulate the language server sending us a pair of edits at the same location,
3174 // with an insertion following a replacement (which violates the LSP spec).
3175 let edits = lsp_store
3176 .update(cx, |lsp_store, cx| {
3177 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3178 &buffer,
3179 [
3180 lsp::TextEdit {
3181 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3182 new_text: "Path".into(),
3183 },
3184 lsp::TextEdit {
3185 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3186 new_text: "from path import Path\n\n\n".into(),
3187 },
3188 ],
3189 LanguageServerId(0),
3190 None,
3191 cx,
3192 )
3193 })
3194 .await
3195 .unwrap();
3196
3197 buffer.update(cx, |buffer, cx| {
3198 buffer.edit(edits, None, cx);
3199 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3200 });
3201}
3202
3203#[gpui::test]
3204async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3205 init_test(cx);
3206
3207 let text = "
3208 use a::b;
3209 use a::c;
3210
3211 fn f() {
3212 b();
3213 c();
3214 }
3215 "
3216 .unindent();
3217
3218 let fs = FakeFs::new(cx.executor());
3219 fs.insert_tree(
3220 path!("/dir"),
3221 json!({
3222 "a.rs": text.clone(),
3223 }),
3224 )
3225 .await;
3226
3227 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3228 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3229 let buffer = project
3230 .update(cx, |project, cx| {
3231 project.open_local_buffer(path!("/dir/a.rs"), cx)
3232 })
3233 .await
3234 .unwrap();
3235
3236 // Simulate the language server sending us edits in a non-ordered fashion,
3237 // with ranges sometimes being inverted or pointing to invalid locations.
3238 let edits = lsp_store
3239 .update(cx, |lsp_store, cx| {
3240 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3241 &buffer,
3242 [
3243 lsp::TextEdit {
3244 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3245 new_text: "\n\n".into(),
3246 },
3247 lsp::TextEdit {
3248 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3249 new_text: "a::{b, c}".into(),
3250 },
3251 lsp::TextEdit {
3252 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3253 new_text: "".into(),
3254 },
3255 lsp::TextEdit {
3256 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3257 new_text: "
3258 fn f() {
3259 b();
3260 c();
3261 }"
3262 .unindent(),
3263 },
3264 ],
3265 LanguageServerId(0),
3266 None,
3267 cx,
3268 )
3269 })
3270 .await
3271 .unwrap();
3272
3273 buffer.update(cx, |buffer, cx| {
3274 let edits = edits
3275 .into_iter()
3276 .map(|(range, text)| {
3277 (
3278 range.start.to_point(buffer)..range.end.to_point(buffer),
3279 text,
3280 )
3281 })
3282 .collect::<Vec<_>>();
3283
3284 assert_eq!(
3285 edits,
3286 [
3287 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3288 (Point::new(1, 0)..Point::new(2, 0), "".into())
3289 ]
3290 );
3291
3292 for (range, new_text) in edits {
3293 buffer.edit([(range, new_text)], None, cx);
3294 }
3295 assert_eq!(
3296 buffer.text(),
3297 "
3298 use a::{b, c};
3299
3300 fn f() {
3301 b();
3302 c();
3303 }
3304 "
3305 .unindent()
3306 );
3307 });
3308}
3309
3310fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3311 buffer: &Buffer,
3312 range: Range<T>,
3313) -> Vec<(String, Option<DiagnosticSeverity>)> {
3314 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3315 for chunk in buffer.snapshot().chunks(range, true) {
3316 if chunks
3317 .last()
3318 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3319 {
3320 chunks.last_mut().unwrap().0.push_str(chunk.text);
3321 } else {
3322 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3323 }
3324 }
3325 chunks
3326}
3327
3328#[gpui::test(iterations = 10)]
3329async fn test_definition(cx: &mut gpui::TestAppContext) {
3330 init_test(cx);
3331
3332 let fs = FakeFs::new(cx.executor());
3333 fs.insert_tree(
3334 path!("/dir"),
3335 json!({
3336 "a.rs": "const fn a() { A }",
3337 "b.rs": "const y: i32 = crate::a()",
3338 }),
3339 )
3340 .await;
3341
3342 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3343
3344 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3345 language_registry.add(rust_lang());
3346 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3347
3348 let (buffer, _handle) = project
3349 .update(cx, |project, cx| {
3350 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3351 })
3352 .await
3353 .unwrap();
3354
3355 let fake_server = fake_servers.next().await.unwrap();
3356 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3357 let params = params.text_document_position_params;
3358 assert_eq!(
3359 params.text_document.uri.to_file_path().unwrap(),
3360 Path::new(path!("/dir/b.rs")),
3361 );
3362 assert_eq!(params.position, lsp::Position::new(0, 22));
3363
3364 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3365 lsp::Location::new(
3366 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3367 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3368 ),
3369 )))
3370 });
3371 let mut definitions = project
3372 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3373 .await
3374 .unwrap()
3375 .unwrap();
3376
3377 // Assert no new language server started
3378 cx.executor().run_until_parked();
3379 assert!(fake_servers.try_next().is_err());
3380
3381 assert_eq!(definitions.len(), 1);
3382 let definition = definitions.pop().unwrap();
3383 cx.update(|cx| {
3384 let target_buffer = definition.target.buffer.read(cx);
3385 assert_eq!(
3386 target_buffer
3387 .file()
3388 .unwrap()
3389 .as_local()
3390 .unwrap()
3391 .abs_path(cx),
3392 Path::new(path!("/dir/a.rs")),
3393 );
3394 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3395 assert_eq!(
3396 list_worktrees(&project, cx),
3397 [
3398 (path!("/dir/a.rs").as_ref(), false),
3399 (path!("/dir/b.rs").as_ref(), true)
3400 ],
3401 );
3402
3403 drop(definition);
3404 });
3405 cx.update(|cx| {
3406 assert_eq!(
3407 list_worktrees(&project, cx),
3408 [(path!("/dir/b.rs").as_ref(), true)]
3409 );
3410 });
3411
3412 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3413 project
3414 .read(cx)
3415 .worktrees(cx)
3416 .map(|worktree| {
3417 let worktree = worktree.read(cx);
3418 (
3419 worktree.as_local().unwrap().abs_path().as_ref(),
3420 worktree.is_visible(),
3421 )
3422 })
3423 .collect::<Vec<_>>()
3424 }
3425}
3426
3427#[gpui::test]
3428async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3429 init_test(cx);
3430
3431 let fs = FakeFs::new(cx.executor());
3432 fs.insert_tree(
3433 path!("/dir"),
3434 json!({
3435 "a.ts": "",
3436 }),
3437 )
3438 .await;
3439
3440 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3441
3442 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3443 language_registry.add(typescript_lang());
3444 let mut fake_language_servers = language_registry.register_fake_lsp(
3445 "TypeScript",
3446 FakeLspAdapter {
3447 capabilities: lsp::ServerCapabilities {
3448 completion_provider: Some(lsp::CompletionOptions {
3449 trigger_characters: Some(vec![".".to_string()]),
3450 ..Default::default()
3451 }),
3452 ..Default::default()
3453 },
3454 ..Default::default()
3455 },
3456 );
3457
3458 let (buffer, _handle) = project
3459 .update(cx, |p, cx| {
3460 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3461 })
3462 .await
3463 .unwrap();
3464
3465 let fake_server = fake_language_servers.next().await.unwrap();
3466
3467 // When text_edit exists, it takes precedence over insert_text and label
3468 let text = "let a = obj.fqn";
3469 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3470 let completions = project.update(cx, |project, cx| {
3471 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3472 });
3473
3474 fake_server
3475 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3476 Ok(Some(lsp::CompletionResponse::Array(vec![
3477 lsp::CompletionItem {
3478 label: "labelText".into(),
3479 insert_text: Some("insertText".into()),
3480 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3481 range: lsp::Range::new(
3482 lsp::Position::new(0, text.len() as u32 - 3),
3483 lsp::Position::new(0, text.len() as u32),
3484 ),
3485 new_text: "textEditText".into(),
3486 })),
3487 ..Default::default()
3488 },
3489 ])))
3490 })
3491 .next()
3492 .await;
3493
3494 let completions = completions
3495 .await
3496 .unwrap()
3497 .into_iter()
3498 .flat_map(|response| response.completions)
3499 .collect::<Vec<_>>();
3500 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3501
3502 assert_eq!(completions.len(), 1);
3503 assert_eq!(completions[0].new_text, "textEditText");
3504 assert_eq!(
3505 completions[0].replace_range.to_offset(&snapshot),
3506 text.len() - 3..text.len()
3507 );
3508}
3509
3510#[gpui::test]
3511async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3512 init_test(cx);
3513
3514 let fs = FakeFs::new(cx.executor());
3515 fs.insert_tree(
3516 path!("/dir"),
3517 json!({
3518 "a.ts": "",
3519 }),
3520 )
3521 .await;
3522
3523 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3524
3525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3526 language_registry.add(typescript_lang());
3527 let mut fake_language_servers = language_registry.register_fake_lsp(
3528 "TypeScript",
3529 FakeLspAdapter {
3530 capabilities: lsp::ServerCapabilities {
3531 completion_provider: Some(lsp::CompletionOptions {
3532 trigger_characters: Some(vec![".".to_string()]),
3533 ..Default::default()
3534 }),
3535 ..Default::default()
3536 },
3537 ..Default::default()
3538 },
3539 );
3540
3541 let (buffer, _handle) = project
3542 .update(cx, |p, cx| {
3543 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3544 })
3545 .await
3546 .unwrap();
3547
3548 let fake_server = fake_language_servers.next().await.unwrap();
3549 let text = "let a = obj.fqn";
3550
3551 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3552 {
3553 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3554 let completions = project.update(cx, |project, cx| {
3555 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3556 });
3557
3558 fake_server
3559 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3560 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3561 is_incomplete: false,
3562 item_defaults: Some(lsp::CompletionListItemDefaults {
3563 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3564 lsp::Range::new(
3565 lsp::Position::new(0, text.len() as u32 - 3),
3566 lsp::Position::new(0, text.len() as u32),
3567 ),
3568 )),
3569 ..Default::default()
3570 }),
3571 items: vec![lsp::CompletionItem {
3572 label: "labelText".into(),
3573 text_edit_text: Some("textEditText".into()),
3574 text_edit: None,
3575 ..Default::default()
3576 }],
3577 })))
3578 })
3579 .next()
3580 .await;
3581
3582 let completions = completions
3583 .await
3584 .unwrap()
3585 .into_iter()
3586 .flat_map(|response| response.completions)
3587 .collect::<Vec<_>>();
3588 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3589
3590 assert_eq!(completions.len(), 1);
3591 assert_eq!(completions[0].new_text, "textEditText");
3592 assert_eq!(
3593 completions[0].replace_range.to_offset(&snapshot),
3594 text.len() - 3..text.len()
3595 );
3596 }
3597
3598 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3599 {
3600 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3601 let completions = project.update(cx, |project, cx| {
3602 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3603 });
3604
3605 fake_server
3606 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3607 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3608 is_incomplete: false,
3609 item_defaults: Some(lsp::CompletionListItemDefaults {
3610 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3611 lsp::Range::new(
3612 lsp::Position::new(0, text.len() as u32 - 3),
3613 lsp::Position::new(0, text.len() as u32),
3614 ),
3615 )),
3616 ..Default::default()
3617 }),
3618 items: vec![lsp::CompletionItem {
3619 label: "labelText".into(),
3620 text_edit_text: None,
3621 insert_text: Some("irrelevant".into()),
3622 text_edit: None,
3623 ..Default::default()
3624 }],
3625 })))
3626 })
3627 .next()
3628 .await;
3629
3630 let completions = completions
3631 .await
3632 .unwrap()
3633 .into_iter()
3634 .flat_map(|response| response.completions)
3635 .collect::<Vec<_>>();
3636 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3637
3638 assert_eq!(completions.len(), 1);
3639 assert_eq!(completions[0].new_text, "labelText");
3640 assert_eq!(
3641 completions[0].replace_range.to_offset(&snapshot),
3642 text.len() - 3..text.len()
3643 );
3644 }
3645}
3646
3647#[gpui::test]
3648async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3649 init_test(cx);
3650
3651 let fs = FakeFs::new(cx.executor());
3652 fs.insert_tree(
3653 path!("/dir"),
3654 json!({
3655 "a.ts": "",
3656 }),
3657 )
3658 .await;
3659
3660 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3661
3662 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3663 language_registry.add(typescript_lang());
3664 let mut fake_language_servers = language_registry.register_fake_lsp(
3665 "TypeScript",
3666 FakeLspAdapter {
3667 capabilities: lsp::ServerCapabilities {
3668 completion_provider: Some(lsp::CompletionOptions {
3669 trigger_characters: Some(vec![":".to_string()]),
3670 ..Default::default()
3671 }),
3672 ..Default::default()
3673 },
3674 ..Default::default()
3675 },
3676 );
3677
3678 let (buffer, _handle) = project
3679 .update(cx, |p, cx| {
3680 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3681 })
3682 .await
3683 .unwrap();
3684
3685 let fake_server = fake_language_servers.next().await.unwrap();
3686
3687 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3688 let text = "let a = b.fqn";
3689 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3690 let completions = project.update(cx, |project, cx| {
3691 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3692 });
3693
3694 fake_server
3695 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3696 Ok(Some(lsp::CompletionResponse::Array(vec![
3697 lsp::CompletionItem {
3698 label: "fullyQualifiedName?".into(),
3699 insert_text: Some("fullyQualifiedName".into()),
3700 ..Default::default()
3701 },
3702 ])))
3703 })
3704 .next()
3705 .await;
3706 let completions = completions
3707 .await
3708 .unwrap()
3709 .into_iter()
3710 .flat_map(|response| response.completions)
3711 .collect::<Vec<_>>();
3712 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3713 assert_eq!(completions.len(), 1);
3714 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3715 assert_eq!(
3716 completions[0].replace_range.to_offset(&snapshot),
3717 text.len() - 3..text.len()
3718 );
3719
3720 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3721 let text = "let a = \"atoms/cmp\"";
3722 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3723 let completions = project.update(cx, |project, cx| {
3724 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3725 });
3726
3727 fake_server
3728 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3729 Ok(Some(lsp::CompletionResponse::Array(vec![
3730 lsp::CompletionItem {
3731 label: "component".into(),
3732 ..Default::default()
3733 },
3734 ])))
3735 })
3736 .next()
3737 .await;
3738 let completions = completions
3739 .await
3740 .unwrap()
3741 .into_iter()
3742 .flat_map(|response| response.completions)
3743 .collect::<Vec<_>>();
3744 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3745 assert_eq!(completions.len(), 1);
3746 assert_eq!(completions[0].new_text, "component");
3747 assert_eq!(
3748 completions[0].replace_range.to_offset(&snapshot),
3749 text.len() - 4..text.len() - 1
3750 );
3751}
3752
3753#[gpui::test]
3754async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3755 init_test(cx);
3756
3757 let fs = FakeFs::new(cx.executor());
3758 fs.insert_tree(
3759 path!("/dir"),
3760 json!({
3761 "a.ts": "",
3762 }),
3763 )
3764 .await;
3765
3766 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3767
3768 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3769 language_registry.add(typescript_lang());
3770 let mut fake_language_servers = language_registry.register_fake_lsp(
3771 "TypeScript",
3772 FakeLspAdapter {
3773 capabilities: lsp::ServerCapabilities {
3774 completion_provider: Some(lsp::CompletionOptions {
3775 trigger_characters: Some(vec![":".to_string()]),
3776 ..Default::default()
3777 }),
3778 ..Default::default()
3779 },
3780 ..Default::default()
3781 },
3782 );
3783
3784 let (buffer, _handle) = project
3785 .update(cx, |p, cx| {
3786 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3787 })
3788 .await
3789 .unwrap();
3790
3791 let fake_server = fake_language_servers.next().await.unwrap();
3792
3793 let text = "let a = b.fqn";
3794 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3795 let completions = project.update(cx, |project, cx| {
3796 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3797 });
3798
3799 fake_server
3800 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3801 Ok(Some(lsp::CompletionResponse::Array(vec![
3802 lsp::CompletionItem {
3803 label: "fullyQualifiedName?".into(),
3804 insert_text: Some("fully\rQualified\r\nName".into()),
3805 ..Default::default()
3806 },
3807 ])))
3808 })
3809 .next()
3810 .await;
3811 let completions = completions
3812 .await
3813 .unwrap()
3814 .into_iter()
3815 .flat_map(|response| response.completions)
3816 .collect::<Vec<_>>();
3817 assert_eq!(completions.len(), 1);
3818 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3819}
3820
3821#[gpui::test(iterations = 10)]
3822async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3823 init_test(cx);
3824
3825 let fs = FakeFs::new(cx.executor());
3826 fs.insert_tree(
3827 path!("/dir"),
3828 json!({
3829 "a.ts": "a",
3830 }),
3831 )
3832 .await;
3833
3834 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3835
3836 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3837 language_registry.add(typescript_lang());
3838 let mut fake_language_servers = language_registry.register_fake_lsp(
3839 "TypeScript",
3840 FakeLspAdapter {
3841 capabilities: lsp::ServerCapabilities {
3842 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3843 lsp::CodeActionOptions {
3844 resolve_provider: Some(true),
3845 ..lsp::CodeActionOptions::default()
3846 },
3847 )),
3848 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3849 commands: vec!["_the/command".to_string()],
3850 ..lsp::ExecuteCommandOptions::default()
3851 }),
3852 ..lsp::ServerCapabilities::default()
3853 },
3854 ..FakeLspAdapter::default()
3855 },
3856 );
3857
3858 let (buffer, _handle) = project
3859 .update(cx, |p, cx| {
3860 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3861 })
3862 .await
3863 .unwrap();
3864
3865 let fake_server = fake_language_servers.next().await.unwrap();
3866
3867 // Language server returns code actions that contain commands, and not edits.
3868 let actions = project.update(cx, |project, cx| {
3869 project.code_actions(&buffer, 0..0, None, cx)
3870 });
3871 fake_server
3872 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3873 Ok(Some(vec![
3874 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3875 title: "The code action".into(),
3876 data: Some(serde_json::json!({
3877 "command": "_the/command",
3878 })),
3879 ..lsp::CodeAction::default()
3880 }),
3881 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3882 title: "two".into(),
3883 ..lsp::CodeAction::default()
3884 }),
3885 ]))
3886 })
3887 .next()
3888 .await;
3889
3890 let action = actions.await.unwrap().unwrap()[0].clone();
3891 let apply = project.update(cx, |project, cx| {
3892 project.apply_code_action(buffer.clone(), action, true, cx)
3893 });
3894
3895 // Resolving the code action does not populate its edits. In absence of
3896 // edits, we must execute the given command.
3897 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3898 |mut action, _| async move {
3899 if action.data.is_some() {
3900 action.command = Some(lsp::Command {
3901 title: "The command".into(),
3902 command: "_the/command".into(),
3903 arguments: Some(vec![json!("the-argument")]),
3904 });
3905 }
3906 Ok(action)
3907 },
3908 );
3909
3910 // While executing the command, the language server sends the editor
3911 // a `workspaceEdit` request.
3912 fake_server
3913 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3914 let fake = fake_server.clone();
3915 move |params, _| {
3916 assert_eq!(params.command, "_the/command");
3917 let fake = fake.clone();
3918 async move {
3919 fake.server
3920 .request::<lsp::request::ApplyWorkspaceEdit>(
3921 lsp::ApplyWorkspaceEditParams {
3922 label: None,
3923 edit: lsp::WorkspaceEdit {
3924 changes: Some(
3925 [(
3926 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3927 vec![lsp::TextEdit {
3928 range: lsp::Range::new(
3929 lsp::Position::new(0, 0),
3930 lsp::Position::new(0, 0),
3931 ),
3932 new_text: "X".into(),
3933 }],
3934 )]
3935 .into_iter()
3936 .collect(),
3937 ),
3938 ..Default::default()
3939 },
3940 },
3941 )
3942 .await
3943 .into_response()
3944 .unwrap();
3945 Ok(Some(json!(null)))
3946 }
3947 }
3948 })
3949 .next()
3950 .await;
3951
3952 // Applying the code action returns a project transaction containing the edits
3953 // sent by the language server in its `workspaceEdit` request.
3954 let transaction = apply.await.unwrap();
3955 assert!(transaction.0.contains_key(&buffer));
3956 buffer.update(cx, |buffer, cx| {
3957 assert_eq!(buffer.text(), "Xa");
3958 buffer.undo(cx);
3959 assert_eq!(buffer.text(), "a");
3960 });
3961}
3962
3963#[gpui::test]
3964async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3965 init_test(cx);
3966 let fs = FakeFs::new(cx.background_executor.clone());
3967 let expected_contents = "content";
3968 fs.as_fake()
3969 .insert_tree(
3970 "/root",
3971 json!({
3972 "test.txt": expected_contents
3973 }),
3974 )
3975 .await;
3976
3977 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3978
3979 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3980 let worktree = project.worktrees(cx).next().unwrap();
3981 let entry_id = worktree
3982 .read(cx)
3983 .entry_for_path(rel_path("test.txt"))
3984 .unwrap()
3985 .id;
3986 (worktree, entry_id)
3987 });
3988 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3989 let _result = project
3990 .update(cx, |project, cx| {
3991 project.rename_entry(
3992 entry_id,
3993 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3994 cx,
3995 )
3996 })
3997 .await
3998 .unwrap();
3999 worktree.read_with(cx, |worktree, _| {
4000 assert!(
4001 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4002 "Old file should have been removed"
4003 );
4004 assert!(
4005 worktree
4006 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4007 .is_some(),
4008 "Whole directory hierarchy and the new file should have been created"
4009 );
4010 });
4011 assert_eq!(
4012 worktree
4013 .update(cx, |worktree, cx| {
4014 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4015 })
4016 .await
4017 .unwrap()
4018 .text,
4019 expected_contents,
4020 "Moved file's contents should be preserved"
4021 );
4022
4023 let entry_id = worktree.read_with(cx, |worktree, _| {
4024 worktree
4025 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4026 .unwrap()
4027 .id
4028 });
4029
4030 let _result = project
4031 .update(cx, |project, cx| {
4032 project.rename_entry(
4033 entry_id,
4034 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4035 cx,
4036 )
4037 })
4038 .await
4039 .unwrap();
4040 worktree.read_with(cx, |worktree, _| {
4041 assert!(
4042 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4043 "First file should not reappear"
4044 );
4045 assert!(
4046 worktree
4047 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4048 .is_none(),
4049 "Old file should have been removed"
4050 );
4051 assert!(
4052 worktree
4053 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4054 .is_some(),
4055 "No error should have occurred after moving into existing directory"
4056 );
4057 });
4058 assert_eq!(
4059 worktree
4060 .update(cx, |worktree, cx| {
4061 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4062 })
4063 .await
4064 .unwrap()
4065 .text,
4066 expected_contents,
4067 "Moved file's contents should be preserved"
4068 );
4069}
4070
4071#[gpui::test(iterations = 10)]
4072async fn test_save_file(cx: &mut gpui::TestAppContext) {
4073 init_test(cx);
4074
4075 let fs = FakeFs::new(cx.executor());
4076 fs.insert_tree(
4077 path!("/dir"),
4078 json!({
4079 "file1": "the old contents",
4080 }),
4081 )
4082 .await;
4083
4084 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4085 let buffer = project
4086 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4087 .await
4088 .unwrap();
4089 buffer.update(cx, |buffer, cx| {
4090 assert_eq!(buffer.text(), "the old contents");
4091 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4092 });
4093
4094 project
4095 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4096 .await
4097 .unwrap();
4098
4099 let new_text = fs
4100 .load(Path::new(path!("/dir/file1")))
4101 .await
4102 .unwrap()
4103 .replace("\r\n", "\n");
4104 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4105}
4106
4107#[gpui::test(iterations = 10)]
4108async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4109 // Issue: #24349
4110 init_test(cx);
4111
4112 let fs = FakeFs::new(cx.executor());
4113 fs.insert_tree(path!("/dir"), json!({})).await;
4114
4115 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4116 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4117
4118 language_registry.add(rust_lang());
4119 let mut fake_rust_servers = language_registry.register_fake_lsp(
4120 "Rust",
4121 FakeLspAdapter {
4122 name: "the-rust-language-server",
4123 capabilities: lsp::ServerCapabilities {
4124 completion_provider: Some(lsp::CompletionOptions {
4125 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4126 ..Default::default()
4127 }),
4128 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4129 lsp::TextDocumentSyncOptions {
4130 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4131 ..Default::default()
4132 },
4133 )),
4134 ..Default::default()
4135 },
4136 ..Default::default()
4137 },
4138 );
4139
4140 let buffer = project
4141 .update(cx, |this, cx| this.create_buffer(false, cx))
4142 .unwrap()
4143 .await;
4144 project.update(cx, |this, cx| {
4145 this.register_buffer_with_language_servers(&buffer, cx);
4146 buffer.update(cx, |buffer, cx| {
4147 assert!(!this.has_language_servers_for(buffer, cx));
4148 })
4149 });
4150
4151 project
4152 .update(cx, |this, cx| {
4153 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4154 this.save_buffer_as(
4155 buffer.clone(),
4156 ProjectPath {
4157 worktree_id,
4158 path: rel_path("file.rs").into(),
4159 },
4160 cx,
4161 )
4162 })
4163 .await
4164 .unwrap();
4165 // A server is started up, and it is notified about Rust files.
4166 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4167 assert_eq!(
4168 fake_rust_server
4169 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4170 .await
4171 .text_document,
4172 lsp::TextDocumentItem {
4173 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4174 version: 0,
4175 text: "".to_string(),
4176 language_id: "rust".to_string(),
4177 }
4178 );
4179
4180 project.update(cx, |this, cx| {
4181 buffer.update(cx, |buffer, cx| {
4182 assert!(this.has_language_servers_for(buffer, cx));
4183 })
4184 });
4185}
4186
4187#[gpui::test(iterations = 30)]
4188async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4189 init_test(cx);
4190
4191 let fs = FakeFs::new(cx.executor());
4192 fs.insert_tree(
4193 path!("/dir"),
4194 json!({
4195 "file1": "the original contents",
4196 }),
4197 )
4198 .await;
4199
4200 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4201 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4202 let buffer = project
4203 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4204 .await
4205 .unwrap();
4206
4207 // Simulate buffer diffs being slow, so that they don't complete before
4208 // the next file change occurs.
4209 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4210
4211 // Change the buffer's file on disk, and then wait for the file change
4212 // to be detected by the worktree, so that the buffer starts reloading.
4213 fs.save(
4214 path!("/dir/file1").as_ref(),
4215 &"the first contents".into(),
4216 Default::default(),
4217 )
4218 .await
4219 .unwrap();
4220 worktree.next_event(cx).await;
4221
4222 // Change the buffer's file again. Depending on the random seed, the
4223 // previous file change may still be in progress.
4224 fs.save(
4225 path!("/dir/file1").as_ref(),
4226 &"the second contents".into(),
4227 Default::default(),
4228 )
4229 .await
4230 .unwrap();
4231 worktree.next_event(cx).await;
4232
4233 cx.executor().run_until_parked();
4234 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4235 buffer.read_with(cx, |buffer, _| {
4236 assert_eq!(buffer.text(), on_disk_text);
4237 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4238 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4239 });
4240}
4241
4242#[gpui::test(iterations = 30)]
4243async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4244 init_test(cx);
4245
4246 let fs = FakeFs::new(cx.executor());
4247 fs.insert_tree(
4248 path!("/dir"),
4249 json!({
4250 "file1": "the original contents",
4251 }),
4252 )
4253 .await;
4254
4255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4256 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4257 let buffer = project
4258 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4259 .await
4260 .unwrap();
4261
4262 // Simulate buffer diffs being slow, so that they don't complete before
4263 // the next file change occurs.
4264 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4265
4266 // Change the buffer's file on disk, and then wait for the file change
4267 // to be detected by the worktree, so that the buffer starts reloading.
4268 fs.save(
4269 path!("/dir/file1").as_ref(),
4270 &"the first contents".into(),
4271 Default::default(),
4272 )
4273 .await
4274 .unwrap();
4275 worktree.next_event(cx).await;
4276
4277 cx.executor()
4278 .spawn(cx.executor().simulate_random_delay())
4279 .await;
4280
4281 // Perform a noop edit, causing the buffer's version to increase.
4282 buffer.update(cx, |buffer, cx| {
4283 buffer.edit([(0..0, " ")], None, cx);
4284 buffer.undo(cx);
4285 });
4286
4287 cx.executor().run_until_parked();
4288 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4289 buffer.read_with(cx, |buffer, _| {
4290 let buffer_text = buffer.text();
4291 if buffer_text == on_disk_text {
4292 assert!(
4293 !buffer.is_dirty() && !buffer.has_conflict(),
4294 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4295 );
4296 }
4297 // If the file change occurred while the buffer was processing the first
4298 // change, the buffer will be in a conflicting state.
4299 else {
4300 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4301 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4302 }
4303 });
4304}
4305
4306#[gpui::test]
4307async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4308 init_test(cx);
4309
4310 let fs = FakeFs::new(cx.executor());
4311 fs.insert_tree(
4312 path!("/dir"),
4313 json!({
4314 "file1": "the old contents",
4315 }),
4316 )
4317 .await;
4318
4319 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4320 let buffer = project
4321 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4322 .await
4323 .unwrap();
4324 buffer.update(cx, |buffer, cx| {
4325 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4326 });
4327
4328 project
4329 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4330 .await
4331 .unwrap();
4332
4333 let new_text = fs
4334 .load(Path::new(path!("/dir/file1")))
4335 .await
4336 .unwrap()
4337 .replace("\r\n", "\n");
4338 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4339}
4340
4341#[gpui::test]
4342async fn test_save_as(cx: &mut gpui::TestAppContext) {
4343 init_test(cx);
4344
4345 let fs = FakeFs::new(cx.executor());
4346 fs.insert_tree("/dir", json!({})).await;
4347
4348 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4349
4350 let languages = project.update(cx, |project, _| project.languages().clone());
4351 languages.add(rust_lang());
4352
4353 let buffer = project.update(cx, |project, cx| {
4354 project.create_local_buffer("", None, false, cx)
4355 });
4356 buffer.update(cx, |buffer, cx| {
4357 buffer.edit([(0..0, "abc")], None, cx);
4358 assert!(buffer.is_dirty());
4359 assert!(!buffer.has_conflict());
4360 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4361 });
4362 project
4363 .update(cx, |project, cx| {
4364 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4365 let path = ProjectPath {
4366 worktree_id,
4367 path: rel_path("file1.rs").into(),
4368 };
4369 project.save_buffer_as(buffer.clone(), path, cx)
4370 })
4371 .await
4372 .unwrap();
4373 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4374
4375 cx.executor().run_until_parked();
4376 buffer.update(cx, |buffer, cx| {
4377 assert_eq!(
4378 buffer.file().unwrap().full_path(cx),
4379 Path::new("dir/file1.rs")
4380 );
4381 assert!(!buffer.is_dirty());
4382 assert!(!buffer.has_conflict());
4383 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4384 });
4385
4386 let opened_buffer = project
4387 .update(cx, |project, cx| {
4388 project.open_local_buffer("/dir/file1.rs", cx)
4389 })
4390 .await
4391 .unwrap();
4392 assert_eq!(opened_buffer, buffer);
4393}
4394
4395#[gpui::test]
4396async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4397 init_test(cx);
4398
4399 let fs = FakeFs::new(cx.executor());
4400 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4401
4402 fs.insert_tree(
4403 path!("/dir"),
4404 json!({
4405 "data_a.txt": "data about a"
4406 }),
4407 )
4408 .await;
4409
4410 let buffer = project
4411 .update(cx, |project, cx| {
4412 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4413 })
4414 .await
4415 .unwrap();
4416
4417 buffer.update(cx, |buffer, cx| {
4418 buffer.edit([(11..12, "b")], None, cx);
4419 });
4420
4421 // Save buffer's contents as a new file and confirm that the buffer's now
4422 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4423 // file associated with the buffer has now been updated to `data_b.txt`
4424 project
4425 .update(cx, |project, cx| {
4426 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4427 let new_path = ProjectPath {
4428 worktree_id,
4429 path: rel_path("data_b.txt").into(),
4430 };
4431
4432 project.save_buffer_as(buffer.clone(), new_path, cx)
4433 })
4434 .await
4435 .unwrap();
4436
4437 buffer.update(cx, |buffer, cx| {
4438 assert_eq!(
4439 buffer.file().unwrap().full_path(cx),
4440 Path::new("dir/data_b.txt")
4441 )
4442 });
4443
4444 // Open the original `data_a.txt` file, confirming that its contents are
4445 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4446 let original_buffer = project
4447 .update(cx, |project, cx| {
4448 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4449 })
4450 .await
4451 .unwrap();
4452
4453 original_buffer.update(cx, |buffer, cx| {
4454 assert_eq!(buffer.text(), "data about a");
4455 assert_eq!(
4456 buffer.file().unwrap().full_path(cx),
4457 Path::new("dir/data_a.txt")
4458 )
4459 });
4460}
4461
4462#[gpui::test(retries = 5)]
4463async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4464 use worktree::WorktreeModelHandle as _;
4465
4466 init_test(cx);
4467 cx.executor().allow_parking();
4468
4469 let dir = TempTree::new(json!({
4470 "a": {
4471 "file1": "",
4472 "file2": "",
4473 "file3": "",
4474 },
4475 "b": {
4476 "c": {
4477 "file4": "",
4478 "file5": "",
4479 }
4480 }
4481 }));
4482
4483 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4484
4485 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4486 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4487 async move { buffer.await.unwrap() }
4488 };
4489 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4490 project.update(cx, |project, cx| {
4491 let tree = project.worktrees(cx).next().unwrap();
4492 tree.read(cx)
4493 .entry_for_path(rel_path(path))
4494 .unwrap_or_else(|| panic!("no entry for path {}", path))
4495 .id
4496 })
4497 };
4498
4499 let buffer2 = buffer_for_path("a/file2", cx).await;
4500 let buffer3 = buffer_for_path("a/file3", cx).await;
4501 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4502 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4503
4504 let file2_id = id_for_path("a/file2", cx);
4505 let file3_id = id_for_path("a/file3", cx);
4506 let file4_id = id_for_path("b/c/file4", cx);
4507
4508 // Create a remote copy of this worktree.
4509 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4510 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4511
4512 let updates = Arc::new(Mutex::new(Vec::new()));
4513 tree.update(cx, |tree, cx| {
4514 let updates = updates.clone();
4515 tree.observe_updates(0, cx, move |update| {
4516 updates.lock().push(update);
4517 async { true }
4518 });
4519 });
4520
4521 let remote = cx.update(|cx| {
4522 Worktree::remote(
4523 0,
4524 ReplicaId::REMOTE_SERVER,
4525 metadata,
4526 project.read(cx).client().into(),
4527 project.read(cx).path_style(cx),
4528 cx,
4529 )
4530 });
4531
4532 cx.executor().run_until_parked();
4533
4534 cx.update(|cx| {
4535 assert!(!buffer2.read(cx).is_dirty());
4536 assert!(!buffer3.read(cx).is_dirty());
4537 assert!(!buffer4.read(cx).is_dirty());
4538 assert!(!buffer5.read(cx).is_dirty());
4539 });
4540
4541 // Rename and delete files and directories.
4542 tree.flush_fs_events(cx).await;
4543 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4544 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4545 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4546 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4547 tree.flush_fs_events(cx).await;
4548
4549 cx.update(|app| {
4550 assert_eq!(
4551 tree.read(app).paths().collect::<Vec<_>>(),
4552 vec![
4553 rel_path("a"),
4554 rel_path("a/file1"),
4555 rel_path("a/file2.new"),
4556 rel_path("b"),
4557 rel_path("d"),
4558 rel_path("d/file3"),
4559 rel_path("d/file4"),
4560 ]
4561 );
4562 });
4563
4564 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4565 assert_eq!(id_for_path("d/file3", cx), file3_id);
4566 assert_eq!(id_for_path("d/file4", cx), file4_id);
4567
4568 cx.update(|cx| {
4569 assert_eq!(
4570 buffer2.read(cx).file().unwrap().path().as_ref(),
4571 rel_path("a/file2.new")
4572 );
4573 assert_eq!(
4574 buffer3.read(cx).file().unwrap().path().as_ref(),
4575 rel_path("d/file3")
4576 );
4577 assert_eq!(
4578 buffer4.read(cx).file().unwrap().path().as_ref(),
4579 rel_path("d/file4")
4580 );
4581 assert_eq!(
4582 buffer5.read(cx).file().unwrap().path().as_ref(),
4583 rel_path("b/c/file5")
4584 );
4585
4586 assert_matches!(
4587 buffer2.read(cx).file().unwrap().disk_state(),
4588 DiskState::Present { .. }
4589 );
4590 assert_matches!(
4591 buffer3.read(cx).file().unwrap().disk_state(),
4592 DiskState::Present { .. }
4593 );
4594 assert_matches!(
4595 buffer4.read(cx).file().unwrap().disk_state(),
4596 DiskState::Present { .. }
4597 );
4598 assert_eq!(
4599 buffer5.read(cx).file().unwrap().disk_state(),
4600 DiskState::Deleted
4601 );
4602 });
4603
4604 // Update the remote worktree. Check that it becomes consistent with the
4605 // local worktree.
4606 cx.executor().run_until_parked();
4607
4608 remote.update(cx, |remote, _| {
4609 for update in updates.lock().drain(..) {
4610 remote.as_remote_mut().unwrap().update_from_remote(update);
4611 }
4612 });
4613 cx.executor().run_until_parked();
4614 remote.update(cx, |remote, _| {
4615 assert_eq!(
4616 remote.paths().collect::<Vec<_>>(),
4617 vec![
4618 rel_path("a"),
4619 rel_path("a/file1"),
4620 rel_path("a/file2.new"),
4621 rel_path("b"),
4622 rel_path("d"),
4623 rel_path("d/file3"),
4624 rel_path("d/file4"),
4625 ]
4626 );
4627 });
4628}
4629
4630#[gpui::test(iterations = 10)]
4631async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4632 init_test(cx);
4633
4634 let fs = FakeFs::new(cx.executor());
4635 fs.insert_tree(
4636 path!("/dir"),
4637 json!({
4638 "a": {
4639 "file1": "",
4640 }
4641 }),
4642 )
4643 .await;
4644
4645 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4646 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4647 let tree_id = tree.update(cx, |tree, _| tree.id());
4648
4649 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4650 project.update(cx, |project, cx| {
4651 let tree = project.worktrees(cx).next().unwrap();
4652 tree.read(cx)
4653 .entry_for_path(rel_path(path))
4654 .unwrap_or_else(|| panic!("no entry for path {}", path))
4655 .id
4656 })
4657 };
4658
4659 let dir_id = id_for_path("a", cx);
4660 let file_id = id_for_path("a/file1", cx);
4661 let buffer = project
4662 .update(cx, |p, cx| {
4663 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4664 })
4665 .await
4666 .unwrap();
4667 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4668
4669 project
4670 .update(cx, |project, cx| {
4671 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4672 })
4673 .unwrap()
4674 .await
4675 .into_included()
4676 .unwrap();
4677 cx.executor().run_until_parked();
4678
4679 assert_eq!(id_for_path("b", cx), dir_id);
4680 assert_eq!(id_for_path("b/file1", cx), file_id);
4681 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4682}
4683
4684#[gpui::test]
4685async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4686 init_test(cx);
4687
4688 let fs = FakeFs::new(cx.executor());
4689 fs.insert_tree(
4690 "/dir",
4691 json!({
4692 "a.txt": "a-contents",
4693 "b.txt": "b-contents",
4694 }),
4695 )
4696 .await;
4697
4698 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4699
4700 // Spawn multiple tasks to open paths, repeating some paths.
4701 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4702 (
4703 p.open_local_buffer("/dir/a.txt", cx),
4704 p.open_local_buffer("/dir/b.txt", cx),
4705 p.open_local_buffer("/dir/a.txt", cx),
4706 )
4707 });
4708
4709 let buffer_a_1 = buffer_a_1.await.unwrap();
4710 let buffer_a_2 = buffer_a_2.await.unwrap();
4711 let buffer_b = buffer_b.await.unwrap();
4712 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4713 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4714
4715 // There is only one buffer per path.
4716 let buffer_a_id = buffer_a_1.entity_id();
4717 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4718
4719 // Open the same path again while it is still open.
4720 drop(buffer_a_1);
4721 let buffer_a_3 = project
4722 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4723 .await
4724 .unwrap();
4725
4726 // There's still only one buffer per path.
4727 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4728}
4729
4730#[gpui::test]
4731async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4732 init_test(cx);
4733
4734 let fs = FakeFs::new(cx.executor());
4735 fs.insert_tree(
4736 path!("/dir"),
4737 json!({
4738 "file1": "abc",
4739 "file2": "def",
4740 "file3": "ghi",
4741 }),
4742 )
4743 .await;
4744
4745 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4746
4747 let buffer1 = project
4748 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4749 .await
4750 .unwrap();
4751 let events = Arc::new(Mutex::new(Vec::new()));
4752
4753 // initially, the buffer isn't dirty.
4754 buffer1.update(cx, |buffer, cx| {
4755 cx.subscribe(&buffer1, {
4756 let events = events.clone();
4757 move |_, _, event, _| match event {
4758 BufferEvent::Operation { .. } => {}
4759 _ => events.lock().push(event.clone()),
4760 }
4761 })
4762 .detach();
4763
4764 assert!(!buffer.is_dirty());
4765 assert!(events.lock().is_empty());
4766
4767 buffer.edit([(1..2, "")], None, cx);
4768 });
4769
4770 // after the first edit, the buffer is dirty, and emits a dirtied event.
4771 buffer1.update(cx, |buffer, cx| {
4772 assert!(buffer.text() == "ac");
4773 assert!(buffer.is_dirty());
4774 assert_eq!(
4775 *events.lock(),
4776 &[
4777 language::BufferEvent::Edited,
4778 language::BufferEvent::DirtyChanged
4779 ]
4780 );
4781 events.lock().clear();
4782 buffer.did_save(
4783 buffer.version(),
4784 buffer.file().unwrap().disk_state().mtime(),
4785 cx,
4786 );
4787 });
4788
4789 // after saving, the buffer is not dirty, and emits a saved event.
4790 buffer1.update(cx, |buffer, cx| {
4791 assert!(!buffer.is_dirty());
4792 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4793 events.lock().clear();
4794
4795 buffer.edit([(1..1, "B")], None, cx);
4796 buffer.edit([(2..2, "D")], None, cx);
4797 });
4798
4799 // after editing again, the buffer is dirty, and emits another dirty event.
4800 buffer1.update(cx, |buffer, cx| {
4801 assert!(buffer.text() == "aBDc");
4802 assert!(buffer.is_dirty());
4803 assert_eq!(
4804 *events.lock(),
4805 &[
4806 language::BufferEvent::Edited,
4807 language::BufferEvent::DirtyChanged,
4808 language::BufferEvent::Edited,
4809 ],
4810 );
4811 events.lock().clear();
4812
4813 // After restoring the buffer to its previously-saved state,
4814 // the buffer is not considered dirty anymore.
4815 buffer.edit([(1..3, "")], None, cx);
4816 assert!(buffer.text() == "ac");
4817 assert!(!buffer.is_dirty());
4818 });
4819
4820 assert_eq!(
4821 *events.lock(),
4822 &[
4823 language::BufferEvent::Edited,
4824 language::BufferEvent::DirtyChanged
4825 ]
4826 );
4827
4828 // When a file is deleted, it is not considered dirty.
4829 let events = Arc::new(Mutex::new(Vec::new()));
4830 let buffer2 = project
4831 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4832 .await
4833 .unwrap();
4834 buffer2.update(cx, |_, cx| {
4835 cx.subscribe(&buffer2, {
4836 let events = events.clone();
4837 move |_, _, event, _| match event {
4838 BufferEvent::Operation { .. } => {}
4839 _ => events.lock().push(event.clone()),
4840 }
4841 })
4842 .detach();
4843 });
4844
4845 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4846 .await
4847 .unwrap();
4848 cx.executor().run_until_parked();
4849 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4850 assert_eq!(
4851 mem::take(&mut *events.lock()),
4852 &[language::BufferEvent::FileHandleChanged]
4853 );
4854
4855 // Buffer becomes dirty when edited.
4856 buffer2.update(cx, |buffer, cx| {
4857 buffer.edit([(2..3, "")], None, cx);
4858 assert_eq!(buffer.is_dirty(), true);
4859 });
4860 assert_eq!(
4861 mem::take(&mut *events.lock()),
4862 &[
4863 language::BufferEvent::Edited,
4864 language::BufferEvent::DirtyChanged
4865 ]
4866 );
4867
4868 // Buffer becomes clean again when all of its content is removed, because
4869 // the file was deleted.
4870 buffer2.update(cx, |buffer, cx| {
4871 buffer.edit([(0..2, "")], None, cx);
4872 assert_eq!(buffer.is_empty(), true);
4873 assert_eq!(buffer.is_dirty(), false);
4874 });
4875 assert_eq!(
4876 *events.lock(),
4877 &[
4878 language::BufferEvent::Edited,
4879 language::BufferEvent::DirtyChanged
4880 ]
4881 );
4882
4883 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4884 let events = Arc::new(Mutex::new(Vec::new()));
4885 let buffer3 = project
4886 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4887 .await
4888 .unwrap();
4889 buffer3.update(cx, |_, cx| {
4890 cx.subscribe(&buffer3, {
4891 let events = events.clone();
4892 move |_, _, event, _| match event {
4893 BufferEvent::Operation { .. } => {}
4894 _ => events.lock().push(event.clone()),
4895 }
4896 })
4897 .detach();
4898 });
4899
4900 buffer3.update(cx, |buffer, cx| {
4901 buffer.edit([(0..0, "x")], None, cx);
4902 });
4903 events.lock().clear();
4904 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4905 .await
4906 .unwrap();
4907 cx.executor().run_until_parked();
4908 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4909 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4910}
4911
4912#[gpui::test]
4913async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4914 init_test(cx);
4915
4916 let (initial_contents, initial_offsets) =
4917 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4918 let fs = FakeFs::new(cx.executor());
4919 fs.insert_tree(
4920 path!("/dir"),
4921 json!({
4922 "the-file": initial_contents,
4923 }),
4924 )
4925 .await;
4926 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4927 let buffer = project
4928 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4929 .await
4930 .unwrap();
4931
4932 let anchors = initial_offsets
4933 .iter()
4934 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4935 .collect::<Vec<_>>();
4936
4937 // Change the file on disk, adding two new lines of text, and removing
4938 // one line.
4939 buffer.update(cx, |buffer, _| {
4940 assert!(!buffer.is_dirty());
4941 assert!(!buffer.has_conflict());
4942 });
4943
4944 let (new_contents, new_offsets) =
4945 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4946 fs.save(
4947 path!("/dir/the-file").as_ref(),
4948 &new_contents.as_str().into(),
4949 LineEnding::Unix,
4950 )
4951 .await
4952 .unwrap();
4953
4954 // Because the buffer was not modified, it is reloaded from disk. Its
4955 // contents are edited according to the diff between the old and new
4956 // file contents.
4957 cx.executor().run_until_parked();
4958 buffer.update(cx, |buffer, _| {
4959 assert_eq!(buffer.text(), new_contents);
4960 assert!(!buffer.is_dirty());
4961 assert!(!buffer.has_conflict());
4962
4963 let anchor_offsets = anchors
4964 .iter()
4965 .map(|anchor| anchor.to_offset(&*buffer))
4966 .collect::<Vec<_>>();
4967 assert_eq!(anchor_offsets, new_offsets);
4968 });
4969
4970 // Modify the buffer
4971 buffer.update(cx, |buffer, cx| {
4972 buffer.edit([(0..0, " ")], None, cx);
4973 assert!(buffer.is_dirty());
4974 assert!(!buffer.has_conflict());
4975 });
4976
4977 // Change the file on disk again, adding blank lines to the beginning.
4978 fs.save(
4979 path!("/dir/the-file").as_ref(),
4980 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4981 LineEnding::Unix,
4982 )
4983 .await
4984 .unwrap();
4985
4986 // Because the buffer is modified, it doesn't reload from disk, but is
4987 // marked as having a conflict.
4988 cx.executor().run_until_parked();
4989 buffer.update(cx, |buffer, _| {
4990 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4991 assert!(buffer.has_conflict());
4992 });
4993}
4994
4995#[gpui::test]
4996async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4997 init_test(cx);
4998
4999 let fs = FakeFs::new(cx.executor());
5000 fs.insert_tree(
5001 path!("/dir"),
5002 json!({
5003 "file1": "a\nb\nc\n",
5004 "file2": "one\r\ntwo\r\nthree\r\n",
5005 }),
5006 )
5007 .await;
5008
5009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5010 let buffer1 = project
5011 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5012 .await
5013 .unwrap();
5014 let buffer2 = project
5015 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5016 .await
5017 .unwrap();
5018
5019 buffer1.update(cx, |buffer, _| {
5020 assert_eq!(buffer.text(), "a\nb\nc\n");
5021 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5022 });
5023 buffer2.update(cx, |buffer, _| {
5024 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5025 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5026 });
5027
5028 // Change a file's line endings on disk from unix to windows. The buffer's
5029 // state updates correctly.
5030 fs.save(
5031 path!("/dir/file1").as_ref(),
5032 &"aaa\nb\nc\n".into(),
5033 LineEnding::Windows,
5034 )
5035 .await
5036 .unwrap();
5037 cx.executor().run_until_parked();
5038 buffer1.update(cx, |buffer, _| {
5039 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5040 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5041 });
5042
5043 // Save a file with windows line endings. The file is written correctly.
5044 buffer2.update(cx, |buffer, cx| {
5045 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5046 });
5047 project
5048 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5049 .await
5050 .unwrap();
5051 assert_eq!(
5052 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5053 "one\r\ntwo\r\nthree\r\nfour\r\n",
5054 );
5055}
5056
5057#[gpui::test]
5058async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5059 init_test(cx);
5060
5061 let fs = FakeFs::new(cx.executor());
5062 fs.insert_tree(
5063 path!("/dir"),
5064 json!({
5065 "a.rs": "
5066 fn foo(mut v: Vec<usize>) {
5067 for x in &v {
5068 v.push(1);
5069 }
5070 }
5071 "
5072 .unindent(),
5073 }),
5074 )
5075 .await;
5076
5077 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5078 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5079 let buffer = project
5080 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5081 .await
5082 .unwrap();
5083
5084 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5085 let message = lsp::PublishDiagnosticsParams {
5086 uri: buffer_uri.clone(),
5087 diagnostics: vec![
5088 lsp::Diagnostic {
5089 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5090 severity: Some(DiagnosticSeverity::WARNING),
5091 message: "error 1".to_string(),
5092 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5093 location: lsp::Location {
5094 uri: buffer_uri.clone(),
5095 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5096 },
5097 message: "error 1 hint 1".to_string(),
5098 }]),
5099 ..Default::default()
5100 },
5101 lsp::Diagnostic {
5102 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5103 severity: Some(DiagnosticSeverity::HINT),
5104 message: "error 1 hint 1".to_string(),
5105 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5106 location: lsp::Location {
5107 uri: buffer_uri.clone(),
5108 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5109 },
5110 message: "original diagnostic".to_string(),
5111 }]),
5112 ..Default::default()
5113 },
5114 lsp::Diagnostic {
5115 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5116 severity: Some(DiagnosticSeverity::ERROR),
5117 message: "error 2".to_string(),
5118 related_information: Some(vec![
5119 lsp::DiagnosticRelatedInformation {
5120 location: lsp::Location {
5121 uri: buffer_uri.clone(),
5122 range: lsp::Range::new(
5123 lsp::Position::new(1, 13),
5124 lsp::Position::new(1, 15),
5125 ),
5126 },
5127 message: "error 2 hint 1".to_string(),
5128 },
5129 lsp::DiagnosticRelatedInformation {
5130 location: lsp::Location {
5131 uri: buffer_uri.clone(),
5132 range: lsp::Range::new(
5133 lsp::Position::new(1, 13),
5134 lsp::Position::new(1, 15),
5135 ),
5136 },
5137 message: "error 2 hint 2".to_string(),
5138 },
5139 ]),
5140 ..Default::default()
5141 },
5142 lsp::Diagnostic {
5143 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5144 severity: Some(DiagnosticSeverity::HINT),
5145 message: "error 2 hint 1".to_string(),
5146 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5147 location: lsp::Location {
5148 uri: buffer_uri.clone(),
5149 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5150 },
5151 message: "original diagnostic".to_string(),
5152 }]),
5153 ..Default::default()
5154 },
5155 lsp::Diagnostic {
5156 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5157 severity: Some(DiagnosticSeverity::HINT),
5158 message: "error 2 hint 2".to_string(),
5159 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5160 location: lsp::Location {
5161 uri: buffer_uri,
5162 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5163 },
5164 message: "original diagnostic".to_string(),
5165 }]),
5166 ..Default::default()
5167 },
5168 ],
5169 version: None,
5170 };
5171
5172 lsp_store
5173 .update(cx, |lsp_store, cx| {
5174 lsp_store.update_diagnostics(
5175 LanguageServerId(0),
5176 message,
5177 None,
5178 DiagnosticSourceKind::Pushed,
5179 &[],
5180 cx,
5181 )
5182 })
5183 .unwrap();
5184 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5185
5186 assert_eq!(
5187 buffer
5188 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5189 .collect::<Vec<_>>(),
5190 &[
5191 DiagnosticEntry {
5192 range: Point::new(1, 8)..Point::new(1, 9),
5193 diagnostic: Diagnostic {
5194 severity: DiagnosticSeverity::WARNING,
5195 message: "error 1".to_string(),
5196 group_id: 1,
5197 is_primary: true,
5198 source_kind: DiagnosticSourceKind::Pushed,
5199 ..Diagnostic::default()
5200 }
5201 },
5202 DiagnosticEntry {
5203 range: Point::new(1, 8)..Point::new(1, 9),
5204 diagnostic: Diagnostic {
5205 severity: DiagnosticSeverity::HINT,
5206 message: "error 1 hint 1".to_string(),
5207 group_id: 1,
5208 is_primary: false,
5209 source_kind: DiagnosticSourceKind::Pushed,
5210 ..Diagnostic::default()
5211 }
5212 },
5213 DiagnosticEntry {
5214 range: Point::new(1, 13)..Point::new(1, 15),
5215 diagnostic: Diagnostic {
5216 severity: DiagnosticSeverity::HINT,
5217 message: "error 2 hint 1".to_string(),
5218 group_id: 0,
5219 is_primary: false,
5220 source_kind: DiagnosticSourceKind::Pushed,
5221 ..Diagnostic::default()
5222 }
5223 },
5224 DiagnosticEntry {
5225 range: Point::new(1, 13)..Point::new(1, 15),
5226 diagnostic: Diagnostic {
5227 severity: DiagnosticSeverity::HINT,
5228 message: "error 2 hint 2".to_string(),
5229 group_id: 0,
5230 is_primary: false,
5231 source_kind: DiagnosticSourceKind::Pushed,
5232 ..Diagnostic::default()
5233 }
5234 },
5235 DiagnosticEntry {
5236 range: Point::new(2, 8)..Point::new(2, 17),
5237 diagnostic: Diagnostic {
5238 severity: DiagnosticSeverity::ERROR,
5239 message: "error 2".to_string(),
5240 group_id: 0,
5241 is_primary: true,
5242 source_kind: DiagnosticSourceKind::Pushed,
5243 ..Diagnostic::default()
5244 }
5245 }
5246 ]
5247 );
5248
5249 assert_eq!(
5250 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5251 &[
5252 DiagnosticEntry {
5253 range: Point::new(1, 13)..Point::new(1, 15),
5254 diagnostic: Diagnostic {
5255 severity: DiagnosticSeverity::HINT,
5256 message: "error 2 hint 1".to_string(),
5257 group_id: 0,
5258 is_primary: false,
5259 source_kind: DiagnosticSourceKind::Pushed,
5260 ..Diagnostic::default()
5261 }
5262 },
5263 DiagnosticEntry {
5264 range: Point::new(1, 13)..Point::new(1, 15),
5265 diagnostic: Diagnostic {
5266 severity: DiagnosticSeverity::HINT,
5267 message: "error 2 hint 2".to_string(),
5268 group_id: 0,
5269 is_primary: false,
5270 source_kind: DiagnosticSourceKind::Pushed,
5271 ..Diagnostic::default()
5272 }
5273 },
5274 DiagnosticEntry {
5275 range: Point::new(2, 8)..Point::new(2, 17),
5276 diagnostic: Diagnostic {
5277 severity: DiagnosticSeverity::ERROR,
5278 message: "error 2".to_string(),
5279 group_id: 0,
5280 is_primary: true,
5281 source_kind: DiagnosticSourceKind::Pushed,
5282 ..Diagnostic::default()
5283 }
5284 }
5285 ]
5286 );
5287
5288 assert_eq!(
5289 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5290 &[
5291 DiagnosticEntry {
5292 range: Point::new(1, 8)..Point::new(1, 9),
5293 diagnostic: Diagnostic {
5294 severity: DiagnosticSeverity::WARNING,
5295 message: "error 1".to_string(),
5296 group_id: 1,
5297 is_primary: true,
5298 source_kind: DiagnosticSourceKind::Pushed,
5299 ..Diagnostic::default()
5300 }
5301 },
5302 DiagnosticEntry {
5303 range: Point::new(1, 8)..Point::new(1, 9),
5304 diagnostic: Diagnostic {
5305 severity: DiagnosticSeverity::HINT,
5306 message: "error 1 hint 1".to_string(),
5307 group_id: 1,
5308 is_primary: false,
5309 source_kind: DiagnosticSourceKind::Pushed,
5310 ..Diagnostic::default()
5311 }
5312 },
5313 ]
5314 );
5315}
5316
5317#[gpui::test]
5318async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5319 init_test(cx);
5320
5321 let fs = FakeFs::new(cx.executor());
5322 fs.insert_tree(
5323 path!("/dir"),
5324 json!({
5325 "one.rs": "const ONE: usize = 1;",
5326 "two": {
5327 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5328 }
5329
5330 }),
5331 )
5332 .await;
5333 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5334
5335 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5336 language_registry.add(rust_lang());
5337 let watched_paths = lsp::FileOperationRegistrationOptions {
5338 filters: vec![
5339 FileOperationFilter {
5340 scheme: Some("file".to_owned()),
5341 pattern: lsp::FileOperationPattern {
5342 glob: "**/*.rs".to_owned(),
5343 matches: Some(lsp::FileOperationPatternKind::File),
5344 options: None,
5345 },
5346 },
5347 FileOperationFilter {
5348 scheme: Some("file".to_owned()),
5349 pattern: lsp::FileOperationPattern {
5350 glob: "**/**".to_owned(),
5351 matches: Some(lsp::FileOperationPatternKind::Folder),
5352 options: None,
5353 },
5354 },
5355 ],
5356 };
5357 let mut fake_servers = language_registry.register_fake_lsp(
5358 "Rust",
5359 FakeLspAdapter {
5360 capabilities: lsp::ServerCapabilities {
5361 workspace: Some(lsp::WorkspaceServerCapabilities {
5362 workspace_folders: None,
5363 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5364 did_rename: Some(watched_paths.clone()),
5365 will_rename: Some(watched_paths),
5366 ..Default::default()
5367 }),
5368 }),
5369 ..Default::default()
5370 },
5371 ..Default::default()
5372 },
5373 );
5374
5375 let _ = project
5376 .update(cx, |project, cx| {
5377 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5378 })
5379 .await
5380 .unwrap();
5381
5382 let fake_server = fake_servers.next().await.unwrap();
5383 let response = project.update(cx, |project, cx| {
5384 let worktree = project.worktrees(cx).next().unwrap();
5385 let entry = worktree
5386 .read(cx)
5387 .entry_for_path(rel_path("one.rs"))
5388 .unwrap();
5389 project.rename_entry(
5390 entry.id,
5391 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5392 cx,
5393 )
5394 });
5395 let expected_edit = lsp::WorkspaceEdit {
5396 changes: None,
5397 document_changes: Some(DocumentChanges::Edits({
5398 vec![TextDocumentEdit {
5399 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5400 range: lsp::Range {
5401 start: lsp::Position {
5402 line: 0,
5403 character: 1,
5404 },
5405 end: lsp::Position {
5406 line: 0,
5407 character: 3,
5408 },
5409 },
5410 new_text: "This is not a drill".to_owned(),
5411 })],
5412 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5413 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5414 version: Some(1337),
5415 },
5416 }]
5417 })),
5418 change_annotations: None,
5419 };
5420 let resolved_workspace_edit = Arc::new(OnceLock::new());
5421 fake_server
5422 .set_request_handler::<WillRenameFiles, _, _>({
5423 let resolved_workspace_edit = resolved_workspace_edit.clone();
5424 let expected_edit = expected_edit.clone();
5425 move |params, _| {
5426 let resolved_workspace_edit = resolved_workspace_edit.clone();
5427 let expected_edit = expected_edit.clone();
5428 async move {
5429 assert_eq!(params.files.len(), 1);
5430 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5431 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5432 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5433 Ok(Some(expected_edit))
5434 }
5435 }
5436 })
5437 .next()
5438 .await
5439 .unwrap();
5440 let _ = response.await.unwrap();
5441 fake_server
5442 .handle_notification::<DidRenameFiles, _>(|params, _| {
5443 assert_eq!(params.files.len(), 1);
5444 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5445 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5446 })
5447 .next()
5448 .await
5449 .unwrap();
5450 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5451}
5452
5453#[gpui::test]
5454async fn test_rename(cx: &mut gpui::TestAppContext) {
5455 // hi
5456 init_test(cx);
5457
5458 let fs = FakeFs::new(cx.executor());
5459 fs.insert_tree(
5460 path!("/dir"),
5461 json!({
5462 "one.rs": "const ONE: usize = 1;",
5463 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5464 }),
5465 )
5466 .await;
5467
5468 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5469
5470 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5471 language_registry.add(rust_lang());
5472 let mut fake_servers = language_registry.register_fake_lsp(
5473 "Rust",
5474 FakeLspAdapter {
5475 capabilities: lsp::ServerCapabilities {
5476 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5477 prepare_provider: Some(true),
5478 work_done_progress_options: Default::default(),
5479 })),
5480 ..Default::default()
5481 },
5482 ..Default::default()
5483 },
5484 );
5485
5486 let (buffer, _handle) = project
5487 .update(cx, |project, cx| {
5488 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5489 })
5490 .await
5491 .unwrap();
5492
5493 let fake_server = fake_servers.next().await.unwrap();
5494
5495 let response = project.update(cx, |project, cx| {
5496 project.prepare_rename(buffer.clone(), 7, cx)
5497 });
5498 fake_server
5499 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5500 assert_eq!(
5501 params.text_document.uri.as_str(),
5502 uri!("file:///dir/one.rs")
5503 );
5504 assert_eq!(params.position, lsp::Position::new(0, 7));
5505 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5506 lsp::Position::new(0, 6),
5507 lsp::Position::new(0, 9),
5508 ))))
5509 })
5510 .next()
5511 .await
5512 .unwrap();
5513 let response = response.await.unwrap();
5514 let PrepareRenameResponse::Success(range) = response else {
5515 panic!("{:?}", response);
5516 };
5517 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5518 assert_eq!(range, 6..9);
5519
5520 let response = project.update(cx, |project, cx| {
5521 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5522 });
5523 fake_server
5524 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5525 assert_eq!(
5526 params.text_document_position.text_document.uri.as_str(),
5527 uri!("file:///dir/one.rs")
5528 );
5529 assert_eq!(
5530 params.text_document_position.position,
5531 lsp::Position::new(0, 7)
5532 );
5533 assert_eq!(params.new_name, "THREE");
5534 Ok(Some(lsp::WorkspaceEdit {
5535 changes: Some(
5536 [
5537 (
5538 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5539 vec![lsp::TextEdit::new(
5540 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5541 "THREE".to_string(),
5542 )],
5543 ),
5544 (
5545 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5546 vec![
5547 lsp::TextEdit::new(
5548 lsp::Range::new(
5549 lsp::Position::new(0, 24),
5550 lsp::Position::new(0, 27),
5551 ),
5552 "THREE".to_string(),
5553 ),
5554 lsp::TextEdit::new(
5555 lsp::Range::new(
5556 lsp::Position::new(0, 35),
5557 lsp::Position::new(0, 38),
5558 ),
5559 "THREE".to_string(),
5560 ),
5561 ],
5562 ),
5563 ]
5564 .into_iter()
5565 .collect(),
5566 ),
5567 ..Default::default()
5568 }))
5569 })
5570 .next()
5571 .await
5572 .unwrap();
5573 let mut transaction = response.await.unwrap().0;
5574 assert_eq!(transaction.len(), 2);
5575 assert_eq!(
5576 transaction
5577 .remove_entry(&buffer)
5578 .unwrap()
5579 .0
5580 .update(cx, |buffer, _| buffer.text()),
5581 "const THREE: usize = 1;"
5582 );
5583 assert_eq!(
5584 transaction
5585 .into_keys()
5586 .next()
5587 .unwrap()
5588 .update(cx, |buffer, _| buffer.text()),
5589 "const TWO: usize = one::THREE + one::THREE;"
5590 );
5591}
5592
5593#[gpui::test]
5594async fn test_search(cx: &mut gpui::TestAppContext) {
5595 init_test(cx);
5596
5597 let fs = FakeFs::new(cx.executor());
5598 fs.insert_tree(
5599 path!("/dir"),
5600 json!({
5601 "one.rs": "const ONE: usize = 1;",
5602 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5603 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5604 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5605 }),
5606 )
5607 .await;
5608 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5609 assert_eq!(
5610 search(
5611 &project,
5612 SearchQuery::text(
5613 "TWO",
5614 false,
5615 true,
5616 false,
5617 Default::default(),
5618 Default::default(),
5619 false,
5620 None
5621 )
5622 .unwrap(),
5623 cx
5624 )
5625 .await
5626 .unwrap(),
5627 HashMap::from_iter([
5628 (path!("dir/two.rs").to_string(), vec![6..9]),
5629 (path!("dir/three.rs").to_string(), vec![37..40])
5630 ])
5631 );
5632
5633 let buffer_4 = project
5634 .update(cx, |project, cx| {
5635 project.open_local_buffer(path!("/dir/four.rs"), cx)
5636 })
5637 .await
5638 .unwrap();
5639 buffer_4.update(cx, |buffer, cx| {
5640 let text = "two::TWO";
5641 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5642 });
5643
5644 assert_eq!(
5645 search(
5646 &project,
5647 SearchQuery::text(
5648 "TWO",
5649 false,
5650 true,
5651 false,
5652 Default::default(),
5653 Default::default(),
5654 false,
5655 None,
5656 )
5657 .unwrap(),
5658 cx
5659 )
5660 .await
5661 .unwrap(),
5662 HashMap::from_iter([
5663 (path!("dir/two.rs").to_string(), vec![6..9]),
5664 (path!("dir/three.rs").to_string(), vec![37..40]),
5665 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5666 ])
5667 );
5668}
5669
5670#[gpui::test]
5671async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5672 init_test(cx);
5673
5674 let search_query = "file";
5675
5676 let fs = FakeFs::new(cx.executor());
5677 fs.insert_tree(
5678 path!("/dir"),
5679 json!({
5680 "one.rs": r#"// Rust file one"#,
5681 "one.ts": r#"// TypeScript file one"#,
5682 "two.rs": r#"// Rust file two"#,
5683 "two.ts": r#"// TypeScript file two"#,
5684 }),
5685 )
5686 .await;
5687 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5688
5689 assert!(
5690 search(
5691 &project,
5692 SearchQuery::text(
5693 search_query,
5694 false,
5695 true,
5696 false,
5697 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5698 Default::default(),
5699 false,
5700 None
5701 )
5702 .unwrap(),
5703 cx
5704 )
5705 .await
5706 .unwrap()
5707 .is_empty(),
5708 "If no inclusions match, no files should be returned"
5709 );
5710
5711 assert_eq!(
5712 search(
5713 &project,
5714 SearchQuery::text(
5715 search_query,
5716 false,
5717 true,
5718 false,
5719 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5720 Default::default(),
5721 false,
5722 None
5723 )
5724 .unwrap(),
5725 cx
5726 )
5727 .await
5728 .unwrap(),
5729 HashMap::from_iter([
5730 (path!("dir/one.rs").to_string(), vec![8..12]),
5731 (path!("dir/two.rs").to_string(), vec![8..12]),
5732 ]),
5733 "Rust only search should give only Rust files"
5734 );
5735
5736 assert_eq!(
5737 search(
5738 &project,
5739 SearchQuery::text(
5740 search_query,
5741 false,
5742 true,
5743 false,
5744 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5745 .unwrap(),
5746 Default::default(),
5747 false,
5748 None,
5749 )
5750 .unwrap(),
5751 cx
5752 )
5753 .await
5754 .unwrap(),
5755 HashMap::from_iter([
5756 (path!("dir/one.ts").to_string(), vec![14..18]),
5757 (path!("dir/two.ts").to_string(), vec![14..18]),
5758 ]),
5759 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5760 );
5761
5762 assert_eq!(
5763 search(
5764 &project,
5765 SearchQuery::text(
5766 search_query,
5767 false,
5768 true,
5769 false,
5770 PathMatcher::new(
5771 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5772 PathStyle::local()
5773 )
5774 .unwrap(),
5775 Default::default(),
5776 false,
5777 None,
5778 )
5779 .unwrap(),
5780 cx
5781 )
5782 .await
5783 .unwrap(),
5784 HashMap::from_iter([
5785 (path!("dir/two.ts").to_string(), vec![14..18]),
5786 (path!("dir/one.rs").to_string(), vec![8..12]),
5787 (path!("dir/one.ts").to_string(), vec![14..18]),
5788 (path!("dir/two.rs").to_string(), vec![8..12]),
5789 ]),
5790 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5791 );
5792}
5793
5794#[gpui::test]
5795async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5796 init_test(cx);
5797
5798 let search_query = "file";
5799
5800 let fs = FakeFs::new(cx.executor());
5801 fs.insert_tree(
5802 path!("/dir"),
5803 json!({
5804 "one.rs": r#"// Rust file one"#,
5805 "one.ts": r#"// TypeScript file one"#,
5806 "two.rs": r#"// Rust file two"#,
5807 "two.ts": r#"// TypeScript file two"#,
5808 }),
5809 )
5810 .await;
5811 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5812
5813 assert_eq!(
5814 search(
5815 &project,
5816 SearchQuery::text(
5817 search_query,
5818 false,
5819 true,
5820 false,
5821 Default::default(),
5822 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5823 false,
5824 None,
5825 )
5826 .unwrap(),
5827 cx
5828 )
5829 .await
5830 .unwrap(),
5831 HashMap::from_iter([
5832 (path!("dir/one.rs").to_string(), vec![8..12]),
5833 (path!("dir/one.ts").to_string(), vec![14..18]),
5834 (path!("dir/two.rs").to_string(), vec![8..12]),
5835 (path!("dir/two.ts").to_string(), vec![14..18]),
5836 ]),
5837 "If no exclusions match, all files should be returned"
5838 );
5839
5840 assert_eq!(
5841 search(
5842 &project,
5843 SearchQuery::text(
5844 search_query,
5845 false,
5846 true,
5847 false,
5848 Default::default(),
5849 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5850 false,
5851 None,
5852 )
5853 .unwrap(),
5854 cx
5855 )
5856 .await
5857 .unwrap(),
5858 HashMap::from_iter([
5859 (path!("dir/one.ts").to_string(), vec![14..18]),
5860 (path!("dir/two.ts").to_string(), vec![14..18]),
5861 ]),
5862 "Rust exclusion search should give only TypeScript files"
5863 );
5864
5865 assert_eq!(
5866 search(
5867 &project,
5868 SearchQuery::text(
5869 search_query,
5870 false,
5871 true,
5872 false,
5873 Default::default(),
5874 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5875 .unwrap(),
5876 false,
5877 None,
5878 )
5879 .unwrap(),
5880 cx
5881 )
5882 .await
5883 .unwrap(),
5884 HashMap::from_iter([
5885 (path!("dir/one.rs").to_string(), vec![8..12]),
5886 (path!("dir/two.rs").to_string(), vec![8..12]),
5887 ]),
5888 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5889 );
5890
5891 assert!(
5892 search(
5893 &project,
5894 SearchQuery::text(
5895 search_query,
5896 false,
5897 true,
5898 false,
5899 Default::default(),
5900 PathMatcher::new(
5901 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5902 PathStyle::local(),
5903 )
5904 .unwrap(),
5905 false,
5906 None,
5907 )
5908 .unwrap(),
5909 cx
5910 )
5911 .await
5912 .unwrap()
5913 .is_empty(),
5914 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5915 );
5916}
5917
5918#[gpui::test]
5919async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5920 init_test(cx);
5921
5922 let search_query = "file";
5923
5924 let fs = FakeFs::new(cx.executor());
5925 fs.insert_tree(
5926 path!("/dir"),
5927 json!({
5928 "one.rs": r#"// Rust file one"#,
5929 "one.ts": r#"// TypeScript file one"#,
5930 "two.rs": r#"// Rust file two"#,
5931 "two.ts": r#"// TypeScript file two"#,
5932 }),
5933 )
5934 .await;
5935
5936 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5937 let path_style = PathStyle::local();
5938 let _buffer = project.update(cx, |project, cx| {
5939 project.create_local_buffer("file", None, false, cx)
5940 });
5941
5942 assert_eq!(
5943 search(
5944 &project,
5945 SearchQuery::text(
5946 search_query,
5947 false,
5948 true,
5949 false,
5950 Default::default(),
5951 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5952 false,
5953 None,
5954 )
5955 .unwrap(),
5956 cx
5957 )
5958 .await
5959 .unwrap(),
5960 HashMap::from_iter([
5961 (path!("dir/one.rs").to_string(), vec![8..12]),
5962 (path!("dir/one.ts").to_string(), vec![14..18]),
5963 (path!("dir/two.rs").to_string(), vec![8..12]),
5964 (path!("dir/two.ts").to_string(), vec![14..18]),
5965 ]),
5966 "If no exclusions match, all files should be returned"
5967 );
5968
5969 assert_eq!(
5970 search(
5971 &project,
5972 SearchQuery::text(
5973 search_query,
5974 false,
5975 true,
5976 false,
5977 Default::default(),
5978 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5979 false,
5980 None,
5981 )
5982 .unwrap(),
5983 cx
5984 )
5985 .await
5986 .unwrap(),
5987 HashMap::from_iter([
5988 (path!("dir/one.ts").to_string(), vec![14..18]),
5989 (path!("dir/two.ts").to_string(), vec![14..18]),
5990 ]),
5991 "Rust exclusion search should give only TypeScript files"
5992 );
5993
5994 assert_eq!(
5995 search(
5996 &project,
5997 SearchQuery::text(
5998 search_query,
5999 false,
6000 true,
6001 false,
6002 Default::default(),
6003 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6004 false,
6005 None,
6006 )
6007 .unwrap(),
6008 cx
6009 )
6010 .await
6011 .unwrap(),
6012 HashMap::from_iter([
6013 (path!("dir/one.rs").to_string(), vec![8..12]),
6014 (path!("dir/two.rs").to_string(), vec![8..12]),
6015 ]),
6016 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6017 );
6018
6019 assert!(
6020 search(
6021 &project,
6022 SearchQuery::text(
6023 search_query,
6024 false,
6025 true,
6026 false,
6027 Default::default(),
6028 PathMatcher::new(
6029 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6030 PathStyle::local(),
6031 )
6032 .unwrap(),
6033 false,
6034 None,
6035 )
6036 .unwrap(),
6037 cx
6038 )
6039 .await
6040 .unwrap()
6041 .is_empty(),
6042 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6043 );
6044}
6045
6046#[gpui::test]
6047async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6048 init_test(cx);
6049
6050 let search_query = "file";
6051
6052 let fs = FakeFs::new(cx.executor());
6053 fs.insert_tree(
6054 path!("/dir"),
6055 json!({
6056 "one.rs": r#"// Rust file one"#,
6057 "one.ts": r#"// TypeScript file one"#,
6058 "two.rs": r#"// Rust file two"#,
6059 "two.ts": r#"// TypeScript file two"#,
6060 }),
6061 )
6062 .await;
6063 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6064 assert!(
6065 search(
6066 &project,
6067 SearchQuery::text(
6068 search_query,
6069 false,
6070 true,
6071 false,
6072 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6073 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6074 false,
6075 None,
6076 )
6077 .unwrap(),
6078 cx
6079 )
6080 .await
6081 .unwrap()
6082 .is_empty(),
6083 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6084 );
6085
6086 assert!(
6087 search(
6088 &project,
6089 SearchQuery::text(
6090 search_query,
6091 false,
6092 true,
6093 false,
6094 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6095 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6096 false,
6097 None,
6098 )
6099 .unwrap(),
6100 cx
6101 )
6102 .await
6103 .unwrap()
6104 .is_empty(),
6105 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6106 );
6107
6108 assert!(
6109 search(
6110 &project,
6111 SearchQuery::text(
6112 search_query,
6113 false,
6114 true,
6115 false,
6116 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6117 .unwrap(),
6118 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6119 .unwrap(),
6120 false,
6121 None,
6122 )
6123 .unwrap(),
6124 cx
6125 )
6126 .await
6127 .unwrap()
6128 .is_empty(),
6129 "Non-matching inclusions and exclusions should not change that."
6130 );
6131
6132 assert_eq!(
6133 search(
6134 &project,
6135 SearchQuery::text(
6136 search_query,
6137 false,
6138 true,
6139 false,
6140 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6141 .unwrap(),
6142 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6143 .unwrap(),
6144 false,
6145 None,
6146 )
6147 .unwrap(),
6148 cx
6149 )
6150 .await
6151 .unwrap(),
6152 HashMap::from_iter([
6153 (path!("dir/one.ts").to_string(), vec![14..18]),
6154 (path!("dir/two.ts").to_string(), vec![14..18]),
6155 ]),
6156 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6157 );
6158}
6159
6160#[gpui::test]
6161async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6162 init_test(cx);
6163
6164 let fs = FakeFs::new(cx.executor());
6165 fs.insert_tree(
6166 path!("/worktree-a"),
6167 json!({
6168 "haystack.rs": r#"// NEEDLE"#,
6169 "haystack.ts": r#"// NEEDLE"#,
6170 }),
6171 )
6172 .await;
6173 fs.insert_tree(
6174 path!("/worktree-b"),
6175 json!({
6176 "haystack.rs": r#"// NEEDLE"#,
6177 "haystack.ts": r#"// NEEDLE"#,
6178 }),
6179 )
6180 .await;
6181
6182 let path_style = PathStyle::local();
6183 let project = Project::test(
6184 fs.clone(),
6185 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6186 cx,
6187 )
6188 .await;
6189
6190 assert_eq!(
6191 search(
6192 &project,
6193 SearchQuery::text(
6194 "NEEDLE",
6195 false,
6196 true,
6197 false,
6198 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6199 Default::default(),
6200 true,
6201 None,
6202 )
6203 .unwrap(),
6204 cx
6205 )
6206 .await
6207 .unwrap(),
6208 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6209 "should only return results from included worktree"
6210 );
6211 assert_eq!(
6212 search(
6213 &project,
6214 SearchQuery::text(
6215 "NEEDLE",
6216 false,
6217 true,
6218 false,
6219 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6220 Default::default(),
6221 true,
6222 None,
6223 )
6224 .unwrap(),
6225 cx
6226 )
6227 .await
6228 .unwrap(),
6229 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6230 "should only return results from included worktree"
6231 );
6232
6233 assert_eq!(
6234 search(
6235 &project,
6236 SearchQuery::text(
6237 "NEEDLE",
6238 false,
6239 true,
6240 false,
6241 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6242 Default::default(),
6243 false,
6244 None,
6245 )
6246 .unwrap(),
6247 cx
6248 )
6249 .await
6250 .unwrap(),
6251 HashMap::from_iter([
6252 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6253 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6254 ]),
6255 "should return results from both worktrees"
6256 );
6257}
6258
6259#[gpui::test]
6260async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6261 init_test(cx);
6262
6263 let fs = FakeFs::new(cx.background_executor.clone());
6264 fs.insert_tree(
6265 path!("/dir"),
6266 json!({
6267 ".git": {},
6268 ".gitignore": "**/target\n/node_modules\n",
6269 "target": {
6270 "index.txt": "index_key:index_value"
6271 },
6272 "node_modules": {
6273 "eslint": {
6274 "index.ts": "const eslint_key = 'eslint value'",
6275 "package.json": r#"{ "some_key": "some value" }"#,
6276 },
6277 "prettier": {
6278 "index.ts": "const prettier_key = 'prettier value'",
6279 "package.json": r#"{ "other_key": "other value" }"#,
6280 },
6281 },
6282 "package.json": r#"{ "main_key": "main value" }"#,
6283 }),
6284 )
6285 .await;
6286 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6287
6288 let query = "key";
6289 assert_eq!(
6290 search(
6291 &project,
6292 SearchQuery::text(
6293 query,
6294 false,
6295 false,
6296 false,
6297 Default::default(),
6298 Default::default(),
6299 false,
6300 None,
6301 )
6302 .unwrap(),
6303 cx
6304 )
6305 .await
6306 .unwrap(),
6307 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6308 "Only one non-ignored file should have the query"
6309 );
6310
6311 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6312 let path_style = PathStyle::local();
6313 assert_eq!(
6314 search(
6315 &project,
6316 SearchQuery::text(
6317 query,
6318 false,
6319 false,
6320 true,
6321 Default::default(),
6322 Default::default(),
6323 false,
6324 None,
6325 )
6326 .unwrap(),
6327 cx
6328 )
6329 .await
6330 .unwrap(),
6331 HashMap::from_iter([
6332 (path!("dir/package.json").to_string(), vec![8..11]),
6333 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6334 (
6335 path!("dir/node_modules/prettier/package.json").to_string(),
6336 vec![9..12]
6337 ),
6338 (
6339 path!("dir/node_modules/prettier/index.ts").to_string(),
6340 vec![15..18]
6341 ),
6342 (
6343 path!("dir/node_modules/eslint/index.ts").to_string(),
6344 vec![13..16]
6345 ),
6346 (
6347 path!("dir/node_modules/eslint/package.json").to_string(),
6348 vec![8..11]
6349 ),
6350 ]),
6351 "Unrestricted search with ignored directories should find every file with the query"
6352 );
6353
6354 let files_to_include =
6355 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6356 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6357 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6358 assert_eq!(
6359 search(
6360 &project,
6361 SearchQuery::text(
6362 query,
6363 false,
6364 false,
6365 true,
6366 files_to_include,
6367 files_to_exclude,
6368 false,
6369 None,
6370 )
6371 .unwrap(),
6372 cx
6373 )
6374 .await
6375 .unwrap(),
6376 HashMap::from_iter([(
6377 path!("dir/node_modules/prettier/package.json").to_string(),
6378 vec![9..12]
6379 )]),
6380 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6381 );
6382}
6383
6384#[gpui::test]
6385async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6386 init_test(cx);
6387
6388 let fs = FakeFs::new(cx.executor());
6389 fs.insert_tree(
6390 path!("/dir"),
6391 json!({
6392 "one.rs": "// ПРИВЕТ? привет!",
6393 "two.rs": "// ПРИВЕТ.",
6394 "three.rs": "// привет",
6395 }),
6396 )
6397 .await;
6398 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6399 let unicode_case_sensitive_query = SearchQuery::text(
6400 "привет",
6401 false,
6402 true,
6403 false,
6404 Default::default(),
6405 Default::default(),
6406 false,
6407 None,
6408 );
6409 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6410 assert_eq!(
6411 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6412 .await
6413 .unwrap(),
6414 HashMap::from_iter([
6415 (path!("dir/one.rs").to_string(), vec![17..29]),
6416 (path!("dir/three.rs").to_string(), vec![3..15]),
6417 ])
6418 );
6419
6420 let unicode_case_insensitive_query = SearchQuery::text(
6421 "привет",
6422 false,
6423 false,
6424 false,
6425 Default::default(),
6426 Default::default(),
6427 false,
6428 None,
6429 );
6430 assert_matches!(
6431 unicode_case_insensitive_query,
6432 Ok(SearchQuery::Regex { .. })
6433 );
6434 assert_eq!(
6435 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6436 .await
6437 .unwrap(),
6438 HashMap::from_iter([
6439 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6440 (path!("dir/two.rs").to_string(), vec![3..15]),
6441 (path!("dir/three.rs").to_string(), vec![3..15]),
6442 ])
6443 );
6444
6445 assert_eq!(
6446 search(
6447 &project,
6448 SearchQuery::text(
6449 "привет.",
6450 false,
6451 false,
6452 false,
6453 Default::default(),
6454 Default::default(),
6455 false,
6456 None,
6457 )
6458 .unwrap(),
6459 cx
6460 )
6461 .await
6462 .unwrap(),
6463 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6464 );
6465}
6466
6467#[gpui::test]
6468async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6469 init_test(cx);
6470
6471 let fs = FakeFs::new(cx.executor());
6472 fs.insert_tree(
6473 "/one/two",
6474 json!({
6475 "three": {
6476 "a.txt": "",
6477 "four": {}
6478 },
6479 "c.rs": ""
6480 }),
6481 )
6482 .await;
6483
6484 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6485 project
6486 .update(cx, |project, cx| {
6487 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6488 project.create_entry((id, rel_path("b..")), true, cx)
6489 })
6490 .await
6491 .unwrap()
6492 .into_included()
6493 .unwrap();
6494
6495 assert_eq!(
6496 fs.paths(true),
6497 vec![
6498 PathBuf::from(path!("/")),
6499 PathBuf::from(path!("/one")),
6500 PathBuf::from(path!("/one/two")),
6501 PathBuf::from(path!("/one/two/c.rs")),
6502 PathBuf::from(path!("/one/two/three")),
6503 PathBuf::from(path!("/one/two/three/a.txt")),
6504 PathBuf::from(path!("/one/two/three/b..")),
6505 PathBuf::from(path!("/one/two/three/four")),
6506 ]
6507 );
6508}
6509
6510#[gpui::test]
6511async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6512 init_test(cx);
6513
6514 let fs = FakeFs::new(cx.executor());
6515 fs.insert_tree(
6516 path!("/dir"),
6517 json!({
6518 "a.tsx": "a",
6519 }),
6520 )
6521 .await;
6522
6523 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6524
6525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6526 language_registry.add(tsx_lang());
6527 let language_server_names = [
6528 "TypeScriptServer",
6529 "TailwindServer",
6530 "ESLintServer",
6531 "NoHoverCapabilitiesServer",
6532 ];
6533 let mut language_servers = [
6534 language_registry.register_fake_lsp(
6535 "tsx",
6536 FakeLspAdapter {
6537 name: language_server_names[0],
6538 capabilities: lsp::ServerCapabilities {
6539 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6540 ..lsp::ServerCapabilities::default()
6541 },
6542 ..FakeLspAdapter::default()
6543 },
6544 ),
6545 language_registry.register_fake_lsp(
6546 "tsx",
6547 FakeLspAdapter {
6548 name: language_server_names[1],
6549 capabilities: lsp::ServerCapabilities {
6550 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6551 ..lsp::ServerCapabilities::default()
6552 },
6553 ..FakeLspAdapter::default()
6554 },
6555 ),
6556 language_registry.register_fake_lsp(
6557 "tsx",
6558 FakeLspAdapter {
6559 name: language_server_names[2],
6560 capabilities: lsp::ServerCapabilities {
6561 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6562 ..lsp::ServerCapabilities::default()
6563 },
6564 ..FakeLspAdapter::default()
6565 },
6566 ),
6567 language_registry.register_fake_lsp(
6568 "tsx",
6569 FakeLspAdapter {
6570 name: language_server_names[3],
6571 capabilities: lsp::ServerCapabilities {
6572 hover_provider: None,
6573 ..lsp::ServerCapabilities::default()
6574 },
6575 ..FakeLspAdapter::default()
6576 },
6577 ),
6578 ];
6579
6580 let (buffer, _handle) = project
6581 .update(cx, |p, cx| {
6582 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6583 })
6584 .await
6585 .unwrap();
6586 cx.executor().run_until_parked();
6587
6588 let mut servers_with_hover_requests = HashMap::default();
6589 for i in 0..language_server_names.len() {
6590 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6591 panic!(
6592 "Failed to get language server #{i} with name {}",
6593 &language_server_names[i]
6594 )
6595 });
6596 let new_server_name = new_server.server.name();
6597 assert!(
6598 !servers_with_hover_requests.contains_key(&new_server_name),
6599 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6600 );
6601 match new_server_name.as_ref() {
6602 "TailwindServer" | "TypeScriptServer" => {
6603 servers_with_hover_requests.insert(
6604 new_server_name.clone(),
6605 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6606 move |_, _| {
6607 let name = new_server_name.clone();
6608 async move {
6609 Ok(Some(lsp::Hover {
6610 contents: lsp::HoverContents::Scalar(
6611 lsp::MarkedString::String(format!("{name} hover")),
6612 ),
6613 range: None,
6614 }))
6615 }
6616 },
6617 ),
6618 );
6619 }
6620 "ESLintServer" => {
6621 servers_with_hover_requests.insert(
6622 new_server_name,
6623 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6624 |_, _| async move { Ok(None) },
6625 ),
6626 );
6627 }
6628 "NoHoverCapabilitiesServer" => {
6629 let _never_handled = new_server
6630 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6631 panic!(
6632 "Should not call for hovers server with no corresponding capabilities"
6633 )
6634 });
6635 }
6636 unexpected => panic!("Unexpected server name: {unexpected}"),
6637 }
6638 }
6639
6640 let hover_task = project.update(cx, |project, cx| {
6641 project.hover(&buffer, Point::new(0, 0), cx)
6642 });
6643 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6644 |mut hover_request| async move {
6645 hover_request
6646 .next()
6647 .await
6648 .expect("All hover requests should have been triggered")
6649 },
6650 ))
6651 .await;
6652 assert_eq!(
6653 vec!["TailwindServer hover", "TypeScriptServer hover"],
6654 hover_task
6655 .await
6656 .into_iter()
6657 .flatten()
6658 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6659 .sorted()
6660 .collect::<Vec<_>>(),
6661 "Should receive hover responses from all related servers with hover capabilities"
6662 );
6663}
6664
6665#[gpui::test]
6666async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6667 init_test(cx);
6668
6669 let fs = FakeFs::new(cx.executor());
6670 fs.insert_tree(
6671 path!("/dir"),
6672 json!({
6673 "a.ts": "a",
6674 }),
6675 )
6676 .await;
6677
6678 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6679
6680 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6681 language_registry.add(typescript_lang());
6682 let mut fake_language_servers = language_registry.register_fake_lsp(
6683 "TypeScript",
6684 FakeLspAdapter {
6685 capabilities: lsp::ServerCapabilities {
6686 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6687 ..lsp::ServerCapabilities::default()
6688 },
6689 ..FakeLspAdapter::default()
6690 },
6691 );
6692
6693 let (buffer, _handle) = project
6694 .update(cx, |p, cx| {
6695 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6696 })
6697 .await
6698 .unwrap();
6699 cx.executor().run_until_parked();
6700
6701 let fake_server = fake_language_servers
6702 .next()
6703 .await
6704 .expect("failed to get the language server");
6705
6706 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6707 move |_, _| async move {
6708 Ok(Some(lsp::Hover {
6709 contents: lsp::HoverContents::Array(vec![
6710 lsp::MarkedString::String("".to_string()),
6711 lsp::MarkedString::String(" ".to_string()),
6712 lsp::MarkedString::String("\n\n\n".to_string()),
6713 ]),
6714 range: None,
6715 }))
6716 },
6717 );
6718
6719 let hover_task = project.update(cx, |project, cx| {
6720 project.hover(&buffer, Point::new(0, 0), cx)
6721 });
6722 let () = request_handled
6723 .next()
6724 .await
6725 .expect("All hover requests should have been triggered");
6726 assert_eq!(
6727 Vec::<String>::new(),
6728 hover_task
6729 .await
6730 .into_iter()
6731 .flatten()
6732 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6733 .sorted()
6734 .collect::<Vec<_>>(),
6735 "Empty hover parts should be ignored"
6736 );
6737}
6738
6739#[gpui::test]
6740async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6741 init_test(cx);
6742
6743 let fs = FakeFs::new(cx.executor());
6744 fs.insert_tree(
6745 path!("/dir"),
6746 json!({
6747 "a.ts": "a",
6748 }),
6749 )
6750 .await;
6751
6752 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6753
6754 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6755 language_registry.add(typescript_lang());
6756 let mut fake_language_servers = language_registry.register_fake_lsp(
6757 "TypeScript",
6758 FakeLspAdapter {
6759 capabilities: lsp::ServerCapabilities {
6760 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6761 ..lsp::ServerCapabilities::default()
6762 },
6763 ..FakeLspAdapter::default()
6764 },
6765 );
6766
6767 let (buffer, _handle) = project
6768 .update(cx, |p, cx| {
6769 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6770 })
6771 .await
6772 .unwrap();
6773 cx.executor().run_until_parked();
6774
6775 let fake_server = fake_language_servers
6776 .next()
6777 .await
6778 .expect("failed to get the language server");
6779
6780 let mut request_handled = fake_server
6781 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6782 Ok(Some(vec![
6783 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6784 title: "organize imports".to_string(),
6785 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6786 ..lsp::CodeAction::default()
6787 }),
6788 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6789 title: "fix code".to_string(),
6790 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6791 ..lsp::CodeAction::default()
6792 }),
6793 ]))
6794 });
6795
6796 let code_actions_task = project.update(cx, |project, cx| {
6797 project.code_actions(
6798 &buffer,
6799 0..buffer.read(cx).len(),
6800 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6801 cx,
6802 )
6803 });
6804
6805 let () = request_handled
6806 .next()
6807 .await
6808 .expect("The code action request should have been triggered");
6809
6810 let code_actions = code_actions_task.await.unwrap().unwrap();
6811 assert_eq!(code_actions.len(), 1);
6812 assert_eq!(
6813 code_actions[0].lsp_action.action_kind(),
6814 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6815 );
6816}
6817
6818#[gpui::test]
6819async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6820 init_test(cx);
6821
6822 let fs = FakeFs::new(cx.executor());
6823 fs.insert_tree(
6824 path!("/dir"),
6825 json!({
6826 "a.tsx": "a",
6827 }),
6828 )
6829 .await;
6830
6831 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6832
6833 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6834 language_registry.add(tsx_lang());
6835 let language_server_names = [
6836 "TypeScriptServer",
6837 "TailwindServer",
6838 "ESLintServer",
6839 "NoActionsCapabilitiesServer",
6840 ];
6841
6842 let mut language_server_rxs = [
6843 language_registry.register_fake_lsp(
6844 "tsx",
6845 FakeLspAdapter {
6846 name: language_server_names[0],
6847 capabilities: lsp::ServerCapabilities {
6848 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6849 ..lsp::ServerCapabilities::default()
6850 },
6851 ..FakeLspAdapter::default()
6852 },
6853 ),
6854 language_registry.register_fake_lsp(
6855 "tsx",
6856 FakeLspAdapter {
6857 name: language_server_names[1],
6858 capabilities: lsp::ServerCapabilities {
6859 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6860 ..lsp::ServerCapabilities::default()
6861 },
6862 ..FakeLspAdapter::default()
6863 },
6864 ),
6865 language_registry.register_fake_lsp(
6866 "tsx",
6867 FakeLspAdapter {
6868 name: language_server_names[2],
6869 capabilities: lsp::ServerCapabilities {
6870 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6871 ..lsp::ServerCapabilities::default()
6872 },
6873 ..FakeLspAdapter::default()
6874 },
6875 ),
6876 language_registry.register_fake_lsp(
6877 "tsx",
6878 FakeLspAdapter {
6879 name: language_server_names[3],
6880 capabilities: lsp::ServerCapabilities {
6881 code_action_provider: None,
6882 ..lsp::ServerCapabilities::default()
6883 },
6884 ..FakeLspAdapter::default()
6885 },
6886 ),
6887 ];
6888
6889 let (buffer, _handle) = project
6890 .update(cx, |p, cx| {
6891 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6892 })
6893 .await
6894 .unwrap();
6895 cx.executor().run_until_parked();
6896
6897 let mut servers_with_actions_requests = HashMap::default();
6898 for i in 0..language_server_names.len() {
6899 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6900 panic!(
6901 "Failed to get language server #{i} with name {}",
6902 &language_server_names[i]
6903 )
6904 });
6905 let new_server_name = new_server.server.name();
6906
6907 assert!(
6908 !servers_with_actions_requests.contains_key(&new_server_name),
6909 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6910 );
6911 match new_server_name.0.as_ref() {
6912 "TailwindServer" | "TypeScriptServer" => {
6913 servers_with_actions_requests.insert(
6914 new_server_name.clone(),
6915 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6916 move |_, _| {
6917 let name = new_server_name.clone();
6918 async move {
6919 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6920 lsp::CodeAction {
6921 title: format!("{name} code action"),
6922 ..lsp::CodeAction::default()
6923 },
6924 )]))
6925 }
6926 },
6927 ),
6928 );
6929 }
6930 "ESLintServer" => {
6931 servers_with_actions_requests.insert(
6932 new_server_name,
6933 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6934 |_, _| async move { Ok(None) },
6935 ),
6936 );
6937 }
6938 "NoActionsCapabilitiesServer" => {
6939 let _never_handled = new_server
6940 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6941 panic!(
6942 "Should not call for code actions server with no corresponding capabilities"
6943 )
6944 });
6945 }
6946 unexpected => panic!("Unexpected server name: {unexpected}"),
6947 }
6948 }
6949
6950 let code_actions_task = project.update(cx, |project, cx| {
6951 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6952 });
6953
6954 // cx.run_until_parked();
6955 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6956 |mut code_actions_request| async move {
6957 code_actions_request
6958 .next()
6959 .await
6960 .expect("All code actions requests should have been triggered")
6961 },
6962 ))
6963 .await;
6964 assert_eq!(
6965 vec!["TailwindServer code action", "TypeScriptServer code action"],
6966 code_actions_task
6967 .await
6968 .unwrap()
6969 .unwrap()
6970 .into_iter()
6971 .map(|code_action| code_action.lsp_action.title().to_owned())
6972 .sorted()
6973 .collect::<Vec<_>>(),
6974 "Should receive code actions responses from all related servers with hover capabilities"
6975 );
6976}
6977
6978#[gpui::test]
6979async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6980 init_test(cx);
6981
6982 let fs = FakeFs::new(cx.executor());
6983 fs.insert_tree(
6984 "/dir",
6985 json!({
6986 "a.rs": "let a = 1;",
6987 "b.rs": "let b = 2;",
6988 "c.rs": "let c = 2;",
6989 }),
6990 )
6991 .await;
6992
6993 let project = Project::test(
6994 fs,
6995 [
6996 "/dir/a.rs".as_ref(),
6997 "/dir/b.rs".as_ref(),
6998 "/dir/c.rs".as_ref(),
6999 ],
7000 cx,
7001 )
7002 .await;
7003
7004 // check the initial state and get the worktrees
7005 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7006 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7007 assert_eq!(worktrees.len(), 3);
7008
7009 let worktree_a = worktrees[0].read(cx);
7010 let worktree_b = worktrees[1].read(cx);
7011 let worktree_c = worktrees[2].read(cx);
7012
7013 // check they start in the right order
7014 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7015 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7016 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7017
7018 (
7019 worktrees[0].clone(),
7020 worktrees[1].clone(),
7021 worktrees[2].clone(),
7022 )
7023 });
7024
7025 // move first worktree to after the second
7026 // [a, b, c] -> [b, a, c]
7027 project
7028 .update(cx, |project, cx| {
7029 let first = worktree_a.read(cx);
7030 let second = worktree_b.read(cx);
7031 project.move_worktree(first.id(), second.id(), cx)
7032 })
7033 .expect("moving first after second");
7034
7035 // check the state after moving
7036 project.update(cx, |project, cx| {
7037 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7038 assert_eq!(worktrees.len(), 3);
7039
7040 let first = worktrees[0].read(cx);
7041 let second = worktrees[1].read(cx);
7042 let third = worktrees[2].read(cx);
7043
7044 // check they are now in the right order
7045 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7046 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7047 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7048 });
7049
7050 // move the second worktree to before the first
7051 // [b, a, c] -> [a, b, c]
7052 project
7053 .update(cx, |project, cx| {
7054 let second = worktree_a.read(cx);
7055 let first = worktree_b.read(cx);
7056 project.move_worktree(first.id(), second.id(), cx)
7057 })
7058 .expect("moving second before first");
7059
7060 // check the state after moving
7061 project.update(cx, |project, cx| {
7062 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7063 assert_eq!(worktrees.len(), 3);
7064
7065 let first = worktrees[0].read(cx);
7066 let second = worktrees[1].read(cx);
7067 let third = worktrees[2].read(cx);
7068
7069 // check they are now in the right order
7070 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7071 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7072 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7073 });
7074
7075 // move the second worktree to after the third
7076 // [a, b, c] -> [a, c, b]
7077 project
7078 .update(cx, |project, cx| {
7079 let second = worktree_b.read(cx);
7080 let third = worktree_c.read(cx);
7081 project.move_worktree(second.id(), third.id(), cx)
7082 })
7083 .expect("moving second after third");
7084
7085 // check the state after moving
7086 project.update(cx, |project, cx| {
7087 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7088 assert_eq!(worktrees.len(), 3);
7089
7090 let first = worktrees[0].read(cx);
7091 let second = worktrees[1].read(cx);
7092 let third = worktrees[2].read(cx);
7093
7094 // check they are now in the right order
7095 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7096 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7097 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7098 });
7099
7100 // move the third worktree to before the second
7101 // [a, c, b] -> [a, b, c]
7102 project
7103 .update(cx, |project, cx| {
7104 let third = worktree_c.read(cx);
7105 let second = worktree_b.read(cx);
7106 project.move_worktree(third.id(), second.id(), cx)
7107 })
7108 .expect("moving third before second");
7109
7110 // check the state after moving
7111 project.update(cx, |project, cx| {
7112 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7113 assert_eq!(worktrees.len(), 3);
7114
7115 let first = worktrees[0].read(cx);
7116 let second = worktrees[1].read(cx);
7117 let third = worktrees[2].read(cx);
7118
7119 // check they are now in the right order
7120 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7121 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7122 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7123 });
7124
7125 // move the first worktree to after the third
7126 // [a, b, c] -> [b, c, a]
7127 project
7128 .update(cx, |project, cx| {
7129 let first = worktree_a.read(cx);
7130 let third = worktree_c.read(cx);
7131 project.move_worktree(first.id(), third.id(), cx)
7132 })
7133 .expect("moving first after third");
7134
7135 // check the state after moving
7136 project.update(cx, |project, cx| {
7137 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7138 assert_eq!(worktrees.len(), 3);
7139
7140 let first = worktrees[0].read(cx);
7141 let second = worktrees[1].read(cx);
7142 let third = worktrees[2].read(cx);
7143
7144 // check they are now in the right order
7145 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7146 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7147 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7148 });
7149
7150 // move the third worktree to before the first
7151 // [b, c, a] -> [a, b, c]
7152 project
7153 .update(cx, |project, cx| {
7154 let third = worktree_a.read(cx);
7155 let first = worktree_b.read(cx);
7156 project.move_worktree(third.id(), first.id(), cx)
7157 })
7158 .expect("moving third before first");
7159
7160 // check the state after moving
7161 project.update(cx, |project, cx| {
7162 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7163 assert_eq!(worktrees.len(), 3);
7164
7165 let first = worktrees[0].read(cx);
7166 let second = worktrees[1].read(cx);
7167 let third = worktrees[2].read(cx);
7168
7169 // check they are now in the right order
7170 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7171 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7172 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7173 });
7174}
7175
7176#[gpui::test]
7177async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7178 init_test(cx);
7179
7180 let staged_contents = r#"
7181 fn main() {
7182 println!("hello world");
7183 }
7184 "#
7185 .unindent();
7186 let file_contents = r#"
7187 // print goodbye
7188 fn main() {
7189 println!("goodbye world");
7190 }
7191 "#
7192 .unindent();
7193
7194 let fs = FakeFs::new(cx.background_executor.clone());
7195 fs.insert_tree(
7196 "/dir",
7197 json!({
7198 ".git": {},
7199 "src": {
7200 "main.rs": file_contents,
7201 }
7202 }),
7203 )
7204 .await;
7205
7206 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7207
7208 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7209
7210 let buffer = project
7211 .update(cx, |project, cx| {
7212 project.open_local_buffer("/dir/src/main.rs", cx)
7213 })
7214 .await
7215 .unwrap();
7216 let unstaged_diff = project
7217 .update(cx, |project, cx| {
7218 project.open_unstaged_diff(buffer.clone(), cx)
7219 })
7220 .await
7221 .unwrap();
7222
7223 cx.run_until_parked();
7224 unstaged_diff.update(cx, |unstaged_diff, cx| {
7225 let snapshot = buffer.read(cx).snapshot();
7226 assert_hunks(
7227 unstaged_diff.hunks(&snapshot, cx),
7228 &snapshot,
7229 &unstaged_diff.base_text_string().unwrap(),
7230 &[
7231 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7232 (
7233 2..3,
7234 " println!(\"hello world\");\n",
7235 " println!(\"goodbye world\");\n",
7236 DiffHunkStatus::modified_none(),
7237 ),
7238 ],
7239 );
7240 });
7241
7242 let staged_contents = r#"
7243 // print goodbye
7244 fn main() {
7245 }
7246 "#
7247 .unindent();
7248
7249 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7250
7251 cx.run_until_parked();
7252 unstaged_diff.update(cx, |unstaged_diff, cx| {
7253 let snapshot = buffer.read(cx).snapshot();
7254 assert_hunks(
7255 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7256 &snapshot,
7257 &unstaged_diff.base_text().text(),
7258 &[(
7259 2..3,
7260 "",
7261 " println!(\"goodbye world\");\n",
7262 DiffHunkStatus::added_none(),
7263 )],
7264 );
7265 });
7266}
7267
7268#[gpui::test]
7269async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7270 init_test(cx);
7271
7272 let committed_contents = r#"
7273 fn main() {
7274 println!("hello world");
7275 }
7276 "#
7277 .unindent();
7278 let staged_contents = r#"
7279 fn main() {
7280 println!("goodbye world");
7281 }
7282 "#
7283 .unindent();
7284 let file_contents = r#"
7285 // print goodbye
7286 fn main() {
7287 println!("goodbye world");
7288 }
7289 "#
7290 .unindent();
7291
7292 let fs = FakeFs::new(cx.background_executor.clone());
7293 fs.insert_tree(
7294 "/dir",
7295 json!({
7296 ".git": {},
7297 "src": {
7298 "modification.rs": file_contents,
7299 }
7300 }),
7301 )
7302 .await;
7303
7304 fs.set_head_for_repo(
7305 Path::new("/dir/.git"),
7306 &[
7307 ("src/modification.rs", committed_contents),
7308 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7309 ],
7310 "deadbeef",
7311 );
7312 fs.set_index_for_repo(
7313 Path::new("/dir/.git"),
7314 &[
7315 ("src/modification.rs", staged_contents),
7316 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7317 ],
7318 );
7319
7320 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7321 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7322 let language = rust_lang();
7323 language_registry.add(language.clone());
7324
7325 let buffer_1 = project
7326 .update(cx, |project, cx| {
7327 project.open_local_buffer("/dir/src/modification.rs", cx)
7328 })
7329 .await
7330 .unwrap();
7331 let diff_1 = project
7332 .update(cx, |project, cx| {
7333 project.open_uncommitted_diff(buffer_1.clone(), cx)
7334 })
7335 .await
7336 .unwrap();
7337 diff_1.read_with(cx, |diff, _| {
7338 assert_eq!(diff.base_text().language().cloned(), Some(language))
7339 });
7340 cx.run_until_parked();
7341 diff_1.update(cx, |diff, cx| {
7342 let snapshot = buffer_1.read(cx).snapshot();
7343 assert_hunks(
7344 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7345 &snapshot,
7346 &diff.base_text_string().unwrap(),
7347 &[
7348 (
7349 0..1,
7350 "",
7351 "// print goodbye\n",
7352 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7353 ),
7354 (
7355 2..3,
7356 " println!(\"hello world\");\n",
7357 " println!(\"goodbye world\");\n",
7358 DiffHunkStatus::modified_none(),
7359 ),
7360 ],
7361 );
7362 });
7363
7364 // Reset HEAD to a version that differs from both the buffer and the index.
7365 let committed_contents = r#"
7366 // print goodbye
7367 fn main() {
7368 }
7369 "#
7370 .unindent();
7371 fs.set_head_for_repo(
7372 Path::new("/dir/.git"),
7373 &[
7374 ("src/modification.rs", committed_contents.clone()),
7375 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7376 ],
7377 "deadbeef",
7378 );
7379
7380 // Buffer now has an unstaged hunk.
7381 cx.run_until_parked();
7382 diff_1.update(cx, |diff, cx| {
7383 let snapshot = buffer_1.read(cx).snapshot();
7384 assert_hunks(
7385 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7386 &snapshot,
7387 &diff.base_text().text(),
7388 &[(
7389 2..3,
7390 "",
7391 " println!(\"goodbye world\");\n",
7392 DiffHunkStatus::added_none(),
7393 )],
7394 );
7395 });
7396
7397 // Open a buffer for a file that's been deleted.
7398 let buffer_2 = project
7399 .update(cx, |project, cx| {
7400 project.open_local_buffer("/dir/src/deletion.rs", cx)
7401 })
7402 .await
7403 .unwrap();
7404 let diff_2 = project
7405 .update(cx, |project, cx| {
7406 project.open_uncommitted_diff(buffer_2.clone(), cx)
7407 })
7408 .await
7409 .unwrap();
7410 cx.run_until_parked();
7411 diff_2.update(cx, |diff, cx| {
7412 let snapshot = buffer_2.read(cx).snapshot();
7413 assert_hunks(
7414 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7415 &snapshot,
7416 &diff.base_text_string().unwrap(),
7417 &[(
7418 0..0,
7419 "// the-deleted-contents\n",
7420 "",
7421 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7422 )],
7423 );
7424 });
7425
7426 // Stage the deletion of this file
7427 fs.set_index_for_repo(
7428 Path::new("/dir/.git"),
7429 &[("src/modification.rs", committed_contents.clone())],
7430 );
7431 cx.run_until_parked();
7432 diff_2.update(cx, |diff, cx| {
7433 let snapshot = buffer_2.read(cx).snapshot();
7434 assert_hunks(
7435 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7436 &snapshot,
7437 &diff.base_text_string().unwrap(),
7438 &[(
7439 0..0,
7440 "// the-deleted-contents\n",
7441 "",
7442 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7443 )],
7444 );
7445 });
7446}
7447
7448#[gpui::test]
7449async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7450 use DiffHunkSecondaryStatus::*;
7451 init_test(cx);
7452
7453 let committed_contents = r#"
7454 zero
7455 one
7456 two
7457 three
7458 four
7459 five
7460 "#
7461 .unindent();
7462 let file_contents = r#"
7463 one
7464 TWO
7465 three
7466 FOUR
7467 five
7468 "#
7469 .unindent();
7470
7471 let fs = FakeFs::new(cx.background_executor.clone());
7472 fs.insert_tree(
7473 "/dir",
7474 json!({
7475 ".git": {},
7476 "file.txt": file_contents.clone()
7477 }),
7478 )
7479 .await;
7480
7481 fs.set_head_and_index_for_repo(
7482 path!("/dir/.git").as_ref(),
7483 &[("file.txt", committed_contents.clone())],
7484 );
7485
7486 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7487
7488 let buffer = project
7489 .update(cx, |project, cx| {
7490 project.open_local_buffer("/dir/file.txt", cx)
7491 })
7492 .await
7493 .unwrap();
7494 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7495 let uncommitted_diff = project
7496 .update(cx, |project, cx| {
7497 project.open_uncommitted_diff(buffer.clone(), cx)
7498 })
7499 .await
7500 .unwrap();
7501 let mut diff_events = cx.events(&uncommitted_diff);
7502
7503 // The hunks are initially unstaged.
7504 uncommitted_diff.read_with(cx, |diff, cx| {
7505 assert_hunks(
7506 diff.hunks(&snapshot, cx),
7507 &snapshot,
7508 &diff.base_text_string().unwrap(),
7509 &[
7510 (
7511 0..0,
7512 "zero\n",
7513 "",
7514 DiffHunkStatus::deleted(HasSecondaryHunk),
7515 ),
7516 (
7517 1..2,
7518 "two\n",
7519 "TWO\n",
7520 DiffHunkStatus::modified(HasSecondaryHunk),
7521 ),
7522 (
7523 3..4,
7524 "four\n",
7525 "FOUR\n",
7526 DiffHunkStatus::modified(HasSecondaryHunk),
7527 ),
7528 ],
7529 );
7530 });
7531
7532 // Stage a hunk. It appears as optimistically staged.
7533 uncommitted_diff.update(cx, |diff, cx| {
7534 let range =
7535 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7536 let hunks = diff
7537 .hunks_intersecting_range(range, &snapshot, cx)
7538 .collect::<Vec<_>>();
7539 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7540
7541 assert_hunks(
7542 diff.hunks(&snapshot, cx),
7543 &snapshot,
7544 &diff.base_text_string().unwrap(),
7545 &[
7546 (
7547 0..0,
7548 "zero\n",
7549 "",
7550 DiffHunkStatus::deleted(HasSecondaryHunk),
7551 ),
7552 (
7553 1..2,
7554 "two\n",
7555 "TWO\n",
7556 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7557 ),
7558 (
7559 3..4,
7560 "four\n",
7561 "FOUR\n",
7562 DiffHunkStatus::modified(HasSecondaryHunk),
7563 ),
7564 ],
7565 );
7566 });
7567
7568 // The diff emits a change event for the range of the staged hunk.
7569 assert!(matches!(
7570 diff_events.next().await.unwrap(),
7571 BufferDiffEvent::HunksStagedOrUnstaged(_)
7572 ));
7573 let event = diff_events.next().await.unwrap();
7574 if let BufferDiffEvent::DiffChanged {
7575 changed_range: Some(changed_range),
7576 } = event
7577 {
7578 let changed_range = changed_range.to_point(&snapshot);
7579 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7580 } else {
7581 panic!("Unexpected event {event:?}");
7582 }
7583
7584 // When the write to the index completes, it appears as staged.
7585 cx.run_until_parked();
7586 uncommitted_diff.update(cx, |diff, cx| {
7587 assert_hunks(
7588 diff.hunks(&snapshot, cx),
7589 &snapshot,
7590 &diff.base_text_string().unwrap(),
7591 &[
7592 (
7593 0..0,
7594 "zero\n",
7595 "",
7596 DiffHunkStatus::deleted(HasSecondaryHunk),
7597 ),
7598 (
7599 1..2,
7600 "two\n",
7601 "TWO\n",
7602 DiffHunkStatus::modified(NoSecondaryHunk),
7603 ),
7604 (
7605 3..4,
7606 "four\n",
7607 "FOUR\n",
7608 DiffHunkStatus::modified(HasSecondaryHunk),
7609 ),
7610 ],
7611 );
7612 });
7613
7614 // The diff emits a change event for the changed index text.
7615 let event = diff_events.next().await.unwrap();
7616 if let BufferDiffEvent::DiffChanged {
7617 changed_range: Some(changed_range),
7618 } = event
7619 {
7620 let changed_range = changed_range.to_point(&snapshot);
7621 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7622 } else {
7623 panic!("Unexpected event {event:?}");
7624 }
7625
7626 // Simulate a problem writing to the git index.
7627 fs.set_error_message_for_index_write(
7628 "/dir/.git".as_ref(),
7629 Some("failed to write git index".into()),
7630 );
7631
7632 // Stage another hunk.
7633 uncommitted_diff.update(cx, |diff, cx| {
7634 let range =
7635 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7636 let hunks = diff
7637 .hunks_intersecting_range(range, &snapshot, cx)
7638 .collect::<Vec<_>>();
7639 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7640
7641 assert_hunks(
7642 diff.hunks(&snapshot, cx),
7643 &snapshot,
7644 &diff.base_text_string().unwrap(),
7645 &[
7646 (
7647 0..0,
7648 "zero\n",
7649 "",
7650 DiffHunkStatus::deleted(HasSecondaryHunk),
7651 ),
7652 (
7653 1..2,
7654 "two\n",
7655 "TWO\n",
7656 DiffHunkStatus::modified(NoSecondaryHunk),
7657 ),
7658 (
7659 3..4,
7660 "four\n",
7661 "FOUR\n",
7662 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7663 ),
7664 ],
7665 );
7666 });
7667 assert!(matches!(
7668 diff_events.next().await.unwrap(),
7669 BufferDiffEvent::HunksStagedOrUnstaged(_)
7670 ));
7671 let event = diff_events.next().await.unwrap();
7672 if let BufferDiffEvent::DiffChanged {
7673 changed_range: Some(changed_range),
7674 } = event
7675 {
7676 let changed_range = changed_range.to_point(&snapshot);
7677 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7678 } else {
7679 panic!("Unexpected event {event:?}");
7680 }
7681
7682 // When the write fails, the hunk returns to being unstaged.
7683 cx.run_until_parked();
7684 uncommitted_diff.update(cx, |diff, cx| {
7685 assert_hunks(
7686 diff.hunks(&snapshot, cx),
7687 &snapshot,
7688 &diff.base_text_string().unwrap(),
7689 &[
7690 (
7691 0..0,
7692 "zero\n",
7693 "",
7694 DiffHunkStatus::deleted(HasSecondaryHunk),
7695 ),
7696 (
7697 1..2,
7698 "two\n",
7699 "TWO\n",
7700 DiffHunkStatus::modified(NoSecondaryHunk),
7701 ),
7702 (
7703 3..4,
7704 "four\n",
7705 "FOUR\n",
7706 DiffHunkStatus::modified(HasSecondaryHunk),
7707 ),
7708 ],
7709 );
7710 });
7711
7712 let event = diff_events.next().await.unwrap();
7713 if let BufferDiffEvent::DiffChanged {
7714 changed_range: Some(changed_range),
7715 } = event
7716 {
7717 let changed_range = changed_range.to_point(&snapshot);
7718 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7719 } else {
7720 panic!("Unexpected event {event:?}");
7721 }
7722
7723 // Allow writing to the git index to succeed again.
7724 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7725
7726 // Stage two hunks with separate operations.
7727 uncommitted_diff.update(cx, |diff, cx| {
7728 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7729 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7730 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7731 });
7732
7733 // Both staged hunks appear as pending.
7734 uncommitted_diff.update(cx, |diff, cx| {
7735 assert_hunks(
7736 diff.hunks(&snapshot, cx),
7737 &snapshot,
7738 &diff.base_text_string().unwrap(),
7739 &[
7740 (
7741 0..0,
7742 "zero\n",
7743 "",
7744 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7745 ),
7746 (
7747 1..2,
7748 "two\n",
7749 "TWO\n",
7750 DiffHunkStatus::modified(NoSecondaryHunk),
7751 ),
7752 (
7753 3..4,
7754 "four\n",
7755 "FOUR\n",
7756 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7757 ),
7758 ],
7759 );
7760 });
7761
7762 // Both staging operations take effect.
7763 cx.run_until_parked();
7764 uncommitted_diff.update(cx, |diff, cx| {
7765 assert_hunks(
7766 diff.hunks(&snapshot, cx),
7767 &snapshot,
7768 &diff.base_text_string().unwrap(),
7769 &[
7770 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7771 (
7772 1..2,
7773 "two\n",
7774 "TWO\n",
7775 DiffHunkStatus::modified(NoSecondaryHunk),
7776 ),
7777 (
7778 3..4,
7779 "four\n",
7780 "FOUR\n",
7781 DiffHunkStatus::modified(NoSecondaryHunk),
7782 ),
7783 ],
7784 );
7785 });
7786}
7787
7788#[gpui::test(seeds(340, 472))]
7789async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7790 use DiffHunkSecondaryStatus::*;
7791 init_test(cx);
7792
7793 let committed_contents = r#"
7794 zero
7795 one
7796 two
7797 three
7798 four
7799 five
7800 "#
7801 .unindent();
7802 let file_contents = r#"
7803 one
7804 TWO
7805 three
7806 FOUR
7807 five
7808 "#
7809 .unindent();
7810
7811 let fs = FakeFs::new(cx.background_executor.clone());
7812 fs.insert_tree(
7813 "/dir",
7814 json!({
7815 ".git": {},
7816 "file.txt": file_contents.clone()
7817 }),
7818 )
7819 .await;
7820
7821 fs.set_head_for_repo(
7822 "/dir/.git".as_ref(),
7823 &[("file.txt", committed_contents.clone())],
7824 "deadbeef",
7825 );
7826 fs.set_index_for_repo(
7827 "/dir/.git".as_ref(),
7828 &[("file.txt", committed_contents.clone())],
7829 );
7830
7831 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7832
7833 let buffer = project
7834 .update(cx, |project, cx| {
7835 project.open_local_buffer("/dir/file.txt", cx)
7836 })
7837 .await
7838 .unwrap();
7839 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7840 let uncommitted_diff = project
7841 .update(cx, |project, cx| {
7842 project.open_uncommitted_diff(buffer.clone(), cx)
7843 })
7844 .await
7845 .unwrap();
7846
7847 // The hunks are initially unstaged.
7848 uncommitted_diff.read_with(cx, |diff, cx| {
7849 assert_hunks(
7850 diff.hunks(&snapshot, cx),
7851 &snapshot,
7852 &diff.base_text_string().unwrap(),
7853 &[
7854 (
7855 0..0,
7856 "zero\n",
7857 "",
7858 DiffHunkStatus::deleted(HasSecondaryHunk),
7859 ),
7860 (
7861 1..2,
7862 "two\n",
7863 "TWO\n",
7864 DiffHunkStatus::modified(HasSecondaryHunk),
7865 ),
7866 (
7867 3..4,
7868 "four\n",
7869 "FOUR\n",
7870 DiffHunkStatus::modified(HasSecondaryHunk),
7871 ),
7872 ],
7873 );
7874 });
7875
7876 // Pause IO events
7877 fs.pause_events();
7878
7879 // Stage the first hunk.
7880 uncommitted_diff.update(cx, |diff, cx| {
7881 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7882 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7883 assert_hunks(
7884 diff.hunks(&snapshot, cx),
7885 &snapshot,
7886 &diff.base_text_string().unwrap(),
7887 &[
7888 (
7889 0..0,
7890 "zero\n",
7891 "",
7892 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7893 ),
7894 (
7895 1..2,
7896 "two\n",
7897 "TWO\n",
7898 DiffHunkStatus::modified(HasSecondaryHunk),
7899 ),
7900 (
7901 3..4,
7902 "four\n",
7903 "FOUR\n",
7904 DiffHunkStatus::modified(HasSecondaryHunk),
7905 ),
7906 ],
7907 );
7908 });
7909
7910 // Stage the second hunk *before* receiving the FS event for the first hunk.
7911 cx.run_until_parked();
7912 uncommitted_diff.update(cx, |diff, cx| {
7913 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7914 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7915 assert_hunks(
7916 diff.hunks(&snapshot, cx),
7917 &snapshot,
7918 &diff.base_text_string().unwrap(),
7919 &[
7920 (
7921 0..0,
7922 "zero\n",
7923 "",
7924 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7925 ),
7926 (
7927 1..2,
7928 "two\n",
7929 "TWO\n",
7930 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7931 ),
7932 (
7933 3..4,
7934 "four\n",
7935 "FOUR\n",
7936 DiffHunkStatus::modified(HasSecondaryHunk),
7937 ),
7938 ],
7939 );
7940 });
7941
7942 // Process the FS event for staging the first hunk (second event is still pending).
7943 fs.flush_events(1);
7944 cx.run_until_parked();
7945
7946 // Stage the third hunk before receiving the second FS event.
7947 uncommitted_diff.update(cx, |diff, cx| {
7948 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7949 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7950 });
7951
7952 // Wait for all remaining IO.
7953 cx.run_until_parked();
7954 fs.flush_events(fs.buffered_event_count());
7955
7956 // Now all hunks are staged.
7957 cx.run_until_parked();
7958 uncommitted_diff.update(cx, |diff, cx| {
7959 assert_hunks(
7960 diff.hunks(&snapshot, cx),
7961 &snapshot,
7962 &diff.base_text_string().unwrap(),
7963 &[
7964 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7965 (
7966 1..2,
7967 "two\n",
7968 "TWO\n",
7969 DiffHunkStatus::modified(NoSecondaryHunk),
7970 ),
7971 (
7972 3..4,
7973 "four\n",
7974 "FOUR\n",
7975 DiffHunkStatus::modified(NoSecondaryHunk),
7976 ),
7977 ],
7978 );
7979 });
7980}
7981
7982#[gpui::test(iterations = 25)]
7983async fn test_staging_random_hunks(
7984 mut rng: StdRng,
7985 executor: BackgroundExecutor,
7986 cx: &mut gpui::TestAppContext,
7987) {
7988 let operations = env::var("OPERATIONS")
7989 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7990 .unwrap_or(20);
7991
7992 // Try to induce races between diff recalculation and index writes.
7993 if rng.random_bool(0.5) {
7994 executor.deprioritize(*CALCULATE_DIFF_TASK);
7995 }
7996
7997 use DiffHunkSecondaryStatus::*;
7998 init_test(cx);
7999
8000 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8001 let index_text = committed_text.clone();
8002 let buffer_text = (0..30)
8003 .map(|i| match i % 5 {
8004 0 => format!("line {i} (modified)\n"),
8005 _ => format!("line {i}\n"),
8006 })
8007 .collect::<String>();
8008
8009 let fs = FakeFs::new(cx.background_executor.clone());
8010 fs.insert_tree(
8011 path!("/dir"),
8012 json!({
8013 ".git": {},
8014 "file.txt": buffer_text.clone()
8015 }),
8016 )
8017 .await;
8018 fs.set_head_for_repo(
8019 path!("/dir/.git").as_ref(),
8020 &[("file.txt", committed_text.clone())],
8021 "deadbeef",
8022 );
8023 fs.set_index_for_repo(
8024 path!("/dir/.git").as_ref(),
8025 &[("file.txt", index_text.clone())],
8026 );
8027 let repo = fs
8028 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8029 .unwrap();
8030
8031 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8032 let buffer = project
8033 .update(cx, |project, cx| {
8034 project.open_local_buffer(path!("/dir/file.txt"), cx)
8035 })
8036 .await
8037 .unwrap();
8038 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8039 let uncommitted_diff = project
8040 .update(cx, |project, cx| {
8041 project.open_uncommitted_diff(buffer.clone(), cx)
8042 })
8043 .await
8044 .unwrap();
8045
8046 let mut hunks =
8047 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
8048 assert_eq!(hunks.len(), 6);
8049
8050 for _i in 0..operations {
8051 let hunk_ix = rng.random_range(0..hunks.len());
8052 let hunk = &mut hunks[hunk_ix];
8053 let row = hunk.range.start.row;
8054
8055 if hunk.status().has_secondary_hunk() {
8056 log::info!("staging hunk at {row}");
8057 uncommitted_diff.update(cx, |diff, cx| {
8058 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8059 });
8060 hunk.secondary_status = SecondaryHunkRemovalPending;
8061 } else {
8062 log::info!("unstaging hunk at {row}");
8063 uncommitted_diff.update(cx, |diff, cx| {
8064 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8065 });
8066 hunk.secondary_status = SecondaryHunkAdditionPending;
8067 }
8068
8069 for _ in 0..rng.random_range(0..10) {
8070 log::info!("yielding");
8071 cx.executor().simulate_random_delay().await;
8072 }
8073 }
8074
8075 cx.executor().run_until_parked();
8076
8077 for hunk in &mut hunks {
8078 if hunk.secondary_status == SecondaryHunkRemovalPending {
8079 hunk.secondary_status = NoSecondaryHunk;
8080 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8081 hunk.secondary_status = HasSecondaryHunk;
8082 }
8083 }
8084
8085 log::info!(
8086 "index text:\n{}",
8087 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8088 .await
8089 .unwrap()
8090 );
8091
8092 uncommitted_diff.update(cx, |diff, cx| {
8093 let expected_hunks = hunks
8094 .iter()
8095 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8096 .collect::<Vec<_>>();
8097 let actual_hunks = diff
8098 .hunks(&snapshot, cx)
8099 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8100 .collect::<Vec<_>>();
8101 assert_eq!(actual_hunks, expected_hunks);
8102 });
8103}
8104
8105#[gpui::test]
8106async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8107 init_test(cx);
8108
8109 let committed_contents = r#"
8110 fn main() {
8111 println!("hello from HEAD");
8112 }
8113 "#
8114 .unindent();
8115 let file_contents = r#"
8116 fn main() {
8117 println!("hello from the working copy");
8118 }
8119 "#
8120 .unindent();
8121
8122 let fs = FakeFs::new(cx.background_executor.clone());
8123 fs.insert_tree(
8124 "/dir",
8125 json!({
8126 ".git": {},
8127 "src": {
8128 "main.rs": file_contents,
8129 }
8130 }),
8131 )
8132 .await;
8133
8134 fs.set_head_for_repo(
8135 Path::new("/dir/.git"),
8136 &[("src/main.rs", committed_contents.clone())],
8137 "deadbeef",
8138 );
8139 fs.set_index_for_repo(
8140 Path::new("/dir/.git"),
8141 &[("src/main.rs", committed_contents.clone())],
8142 );
8143
8144 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8145
8146 let buffer = project
8147 .update(cx, |project, cx| {
8148 project.open_local_buffer("/dir/src/main.rs", cx)
8149 })
8150 .await
8151 .unwrap();
8152 let uncommitted_diff = project
8153 .update(cx, |project, cx| {
8154 project.open_uncommitted_diff(buffer.clone(), cx)
8155 })
8156 .await
8157 .unwrap();
8158
8159 cx.run_until_parked();
8160 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8161 let snapshot = buffer.read(cx).snapshot();
8162 assert_hunks(
8163 uncommitted_diff.hunks(&snapshot, cx),
8164 &snapshot,
8165 &uncommitted_diff.base_text_string().unwrap(),
8166 &[(
8167 1..2,
8168 " println!(\"hello from HEAD\");\n",
8169 " println!(\"hello from the working copy\");\n",
8170 DiffHunkStatus {
8171 kind: DiffHunkStatusKind::Modified,
8172 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8173 },
8174 )],
8175 );
8176 });
8177}
8178
8179// TODO: Should we test this on Windows also?
8180#[gpui::test]
8181#[cfg(not(windows))]
8182async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8183 use std::os::unix::fs::PermissionsExt;
8184 init_test(cx);
8185 cx.executor().allow_parking();
8186 let committed_contents = "bar\n";
8187 let file_contents = "baz\n";
8188 let root = TempTree::new(json!({
8189 "project": {
8190 "foo": committed_contents
8191 },
8192 }));
8193
8194 let work_dir = root.path().join("project");
8195 let file_path = work_dir.join("foo");
8196 let repo = git_init(work_dir.as_path());
8197 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8198 perms.set_mode(0o755);
8199 std::fs::set_permissions(&file_path, perms).unwrap();
8200 git_add("foo", &repo);
8201 git_commit("Initial commit", &repo);
8202 std::fs::write(&file_path, file_contents).unwrap();
8203
8204 let project = Project::test(
8205 Arc::new(RealFs::new(None, cx.executor())),
8206 [root.path()],
8207 cx,
8208 )
8209 .await;
8210
8211 let buffer = project
8212 .update(cx, |project, cx| {
8213 project.open_local_buffer(file_path.as_path(), cx)
8214 })
8215 .await
8216 .unwrap();
8217
8218 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8219
8220 let uncommitted_diff = project
8221 .update(cx, |project, cx| {
8222 project.open_uncommitted_diff(buffer.clone(), cx)
8223 })
8224 .await
8225 .unwrap();
8226
8227 uncommitted_diff.update(cx, |diff, cx| {
8228 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
8229 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8230 });
8231
8232 cx.run_until_parked();
8233
8234 let output = smol::process::Command::new("git")
8235 .current_dir(&work_dir)
8236 .args(["diff", "--staged"])
8237 .output()
8238 .await
8239 .unwrap();
8240
8241 let staged_diff = String::from_utf8_lossy(&output.stdout);
8242
8243 assert!(
8244 !staged_diff.contains("new mode 100644"),
8245 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8246 staged_diff
8247 );
8248
8249 let output = smol::process::Command::new("git")
8250 .current_dir(&work_dir)
8251 .args(["ls-files", "-s"])
8252 .output()
8253 .await
8254 .unwrap();
8255 let index_contents = String::from_utf8_lossy(&output.stdout);
8256
8257 assert!(
8258 index_contents.contains("100755"),
8259 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8260 index_contents
8261 );
8262}
8263
8264#[gpui::test]
8265async fn test_repository_and_path_for_project_path(
8266 background_executor: BackgroundExecutor,
8267 cx: &mut gpui::TestAppContext,
8268) {
8269 init_test(cx);
8270 let fs = FakeFs::new(background_executor);
8271 fs.insert_tree(
8272 path!("/root"),
8273 json!({
8274 "c.txt": "",
8275 "dir1": {
8276 ".git": {},
8277 "deps": {
8278 "dep1": {
8279 ".git": {},
8280 "src": {
8281 "a.txt": ""
8282 }
8283 }
8284 },
8285 "src": {
8286 "b.txt": ""
8287 }
8288 },
8289 }),
8290 )
8291 .await;
8292
8293 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8294 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8295 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8296 project
8297 .update(cx, |project, cx| project.git_scans_complete(cx))
8298 .await;
8299 cx.run_until_parked();
8300
8301 project.read_with(cx, |project, cx| {
8302 let git_store = project.git_store().read(cx);
8303 let pairs = [
8304 ("c.txt", None),
8305 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8306 (
8307 "dir1/deps/dep1/src/a.txt",
8308 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8309 ),
8310 ];
8311 let expected = pairs
8312 .iter()
8313 .map(|(path, result)| {
8314 (
8315 path,
8316 result.map(|(repo, repo_path)| {
8317 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8318 }),
8319 )
8320 })
8321 .collect::<Vec<_>>();
8322 let actual = pairs
8323 .iter()
8324 .map(|(path, _)| {
8325 let project_path = (tree_id, rel_path(path)).into();
8326 let result = maybe!({
8327 let (repo, repo_path) =
8328 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8329 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8330 });
8331 (path, result)
8332 })
8333 .collect::<Vec<_>>();
8334 pretty_assertions::assert_eq!(expected, actual);
8335 });
8336
8337 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8338 .await
8339 .unwrap();
8340 cx.run_until_parked();
8341
8342 project.read_with(cx, |project, cx| {
8343 let git_store = project.git_store().read(cx);
8344 assert_eq!(
8345 git_store.repository_and_path_for_project_path(
8346 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8347 cx
8348 ),
8349 None
8350 );
8351 });
8352}
8353
8354#[gpui::test]
8355async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8356 init_test(cx);
8357 let fs = FakeFs::new(cx.background_executor.clone());
8358 let home = paths::home_dir();
8359 fs.insert_tree(
8360 home,
8361 json!({
8362 ".git": {},
8363 "project": {
8364 "a.txt": "A"
8365 },
8366 }),
8367 )
8368 .await;
8369
8370 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8371 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8372 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8373
8374 project
8375 .update(cx, |project, cx| project.git_scans_complete(cx))
8376 .await;
8377 tree.flush_fs_events(cx).await;
8378
8379 project.read_with(cx, |project, cx| {
8380 let containing = project
8381 .git_store()
8382 .read(cx)
8383 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8384 assert!(containing.is_none());
8385 });
8386
8387 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8388 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8389 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8390 project
8391 .update(cx, |project, cx| project.git_scans_complete(cx))
8392 .await;
8393 tree.flush_fs_events(cx).await;
8394
8395 project.read_with(cx, |project, cx| {
8396 let containing = project
8397 .git_store()
8398 .read(cx)
8399 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8400 assert_eq!(
8401 containing
8402 .unwrap()
8403 .0
8404 .read(cx)
8405 .work_directory_abs_path
8406 .as_ref(),
8407 home,
8408 );
8409 });
8410}
8411
8412#[gpui::test]
8413async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8414 init_test(cx);
8415 cx.executor().allow_parking();
8416
8417 let root = TempTree::new(json!({
8418 "project": {
8419 "a.txt": "a", // Modified
8420 "b.txt": "bb", // Added
8421 "c.txt": "ccc", // Unchanged
8422 "d.txt": "dddd", // Deleted
8423 },
8424 }));
8425
8426 // Set up git repository before creating the project.
8427 let work_dir = root.path().join("project");
8428 let repo = git_init(work_dir.as_path());
8429 git_add("a.txt", &repo);
8430 git_add("c.txt", &repo);
8431 git_add("d.txt", &repo);
8432 git_commit("Initial commit", &repo);
8433 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8434 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8435
8436 let project = Project::test(
8437 Arc::new(RealFs::new(None, cx.executor())),
8438 [root.path()],
8439 cx,
8440 )
8441 .await;
8442
8443 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8444 tree.flush_fs_events(cx).await;
8445 project
8446 .update(cx, |project, cx| project.git_scans_complete(cx))
8447 .await;
8448 cx.executor().run_until_parked();
8449
8450 let repository = project.read_with(cx, |project, cx| {
8451 project.repositories(cx).values().next().unwrap().clone()
8452 });
8453
8454 // Check that the right git state is observed on startup
8455 repository.read_with(cx, |repository, _| {
8456 let entries = repository.cached_status().collect::<Vec<_>>();
8457 assert_eq!(
8458 entries,
8459 [
8460 StatusEntry {
8461 repo_path: repo_path("a.txt"),
8462 status: StatusCode::Modified.worktree(),
8463 },
8464 StatusEntry {
8465 repo_path: repo_path("b.txt"),
8466 status: FileStatus::Untracked,
8467 },
8468 StatusEntry {
8469 repo_path: repo_path("d.txt"),
8470 status: StatusCode::Deleted.worktree(),
8471 },
8472 ]
8473 );
8474 });
8475
8476 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8477
8478 tree.flush_fs_events(cx).await;
8479 project
8480 .update(cx, |project, cx| project.git_scans_complete(cx))
8481 .await;
8482 cx.executor().run_until_parked();
8483
8484 repository.read_with(cx, |repository, _| {
8485 let entries = repository.cached_status().collect::<Vec<_>>();
8486 assert_eq!(
8487 entries,
8488 [
8489 StatusEntry {
8490 repo_path: repo_path("a.txt"),
8491 status: StatusCode::Modified.worktree(),
8492 },
8493 StatusEntry {
8494 repo_path: repo_path("b.txt"),
8495 status: FileStatus::Untracked,
8496 },
8497 StatusEntry {
8498 repo_path: repo_path("c.txt"),
8499 status: StatusCode::Modified.worktree(),
8500 },
8501 StatusEntry {
8502 repo_path: repo_path("d.txt"),
8503 status: StatusCode::Deleted.worktree(),
8504 },
8505 ]
8506 );
8507 });
8508
8509 git_add("a.txt", &repo);
8510 git_add("c.txt", &repo);
8511 git_remove_index(Path::new("d.txt"), &repo);
8512 git_commit("Another commit", &repo);
8513 tree.flush_fs_events(cx).await;
8514 project
8515 .update(cx, |project, cx| project.git_scans_complete(cx))
8516 .await;
8517 cx.executor().run_until_parked();
8518
8519 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8520 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8521 tree.flush_fs_events(cx).await;
8522 project
8523 .update(cx, |project, cx| project.git_scans_complete(cx))
8524 .await;
8525 cx.executor().run_until_parked();
8526
8527 repository.read_with(cx, |repository, _cx| {
8528 let entries = repository.cached_status().collect::<Vec<_>>();
8529
8530 // Deleting an untracked entry, b.txt, should leave no status
8531 // a.txt was tracked, and so should have a status
8532 assert_eq!(
8533 entries,
8534 [StatusEntry {
8535 repo_path: repo_path("a.txt"),
8536 status: StatusCode::Deleted.worktree(),
8537 }]
8538 );
8539 });
8540}
8541
8542#[gpui::test]
8543#[ignore]
8544async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8545 init_test(cx);
8546 cx.executor().allow_parking();
8547
8548 let root = TempTree::new(json!({
8549 "project": {
8550 "sub": {},
8551 "a.txt": "",
8552 },
8553 }));
8554
8555 let work_dir = root.path().join("project");
8556 let repo = git_init(work_dir.as_path());
8557 // a.txt exists in HEAD and the working copy but is deleted in the index.
8558 git_add("a.txt", &repo);
8559 git_commit("Initial commit", &repo);
8560 git_remove_index("a.txt".as_ref(), &repo);
8561 // `sub` is a nested git repository.
8562 let _sub = git_init(&work_dir.join("sub"));
8563
8564 let project = Project::test(
8565 Arc::new(RealFs::new(None, cx.executor())),
8566 [root.path()],
8567 cx,
8568 )
8569 .await;
8570
8571 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8572 tree.flush_fs_events(cx).await;
8573 project
8574 .update(cx, |project, cx| project.git_scans_complete(cx))
8575 .await;
8576 cx.executor().run_until_parked();
8577
8578 let repository = project.read_with(cx, |project, cx| {
8579 project
8580 .repositories(cx)
8581 .values()
8582 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8583 .unwrap()
8584 .clone()
8585 });
8586
8587 repository.read_with(cx, |repository, _cx| {
8588 let entries = repository.cached_status().collect::<Vec<_>>();
8589
8590 // `sub` doesn't appear in our computed statuses.
8591 // a.txt appears with a combined `DA` status.
8592 assert_eq!(
8593 entries,
8594 [StatusEntry {
8595 repo_path: repo_path("a.txt"),
8596 status: TrackedStatus {
8597 index_status: StatusCode::Deleted,
8598 worktree_status: StatusCode::Added
8599 }
8600 .into(),
8601 }]
8602 )
8603 });
8604}
8605
8606#[track_caller]
8607/// We merge lhs into rhs.
8608fn merge_pending_ops_snapshots(
8609 source: Vec<pending_op::PendingOps>,
8610 mut target: Vec<pending_op::PendingOps>,
8611) -> Vec<pending_op::PendingOps> {
8612 for s_ops in source {
8613 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8614 if ops.repo_path == s_ops.repo_path {
8615 Some(idx)
8616 } else {
8617 None
8618 }
8619 }) {
8620 let t_ops = &mut target[idx];
8621 for s_op in s_ops.ops {
8622 if let Some(op_idx) = t_ops
8623 .ops
8624 .iter()
8625 .zip(0..)
8626 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8627 {
8628 let t_op = &mut t_ops.ops[op_idx];
8629 match (s_op.job_status, t_op.job_status) {
8630 (pending_op::JobStatus::Running, _) => {}
8631 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8632 (s_st, t_st) if s_st == t_st => {}
8633 _ => unreachable!(),
8634 }
8635 } else {
8636 t_ops.ops.push(s_op);
8637 }
8638 }
8639 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8640 } else {
8641 target.push(s_ops);
8642 }
8643 }
8644 target
8645}
8646
8647#[gpui::test]
8648async fn test_repository_pending_ops_staging(
8649 executor: gpui::BackgroundExecutor,
8650 cx: &mut gpui::TestAppContext,
8651) {
8652 init_test(cx);
8653
8654 let fs = FakeFs::new(executor);
8655 fs.insert_tree(
8656 path!("/root"),
8657 json!({
8658 "my-repo": {
8659 ".git": {},
8660 "a.txt": "a",
8661 }
8662
8663 }),
8664 )
8665 .await;
8666
8667 fs.set_status_for_repo(
8668 path!("/root/my-repo/.git").as_ref(),
8669 &[("a.txt", FileStatus::Untracked)],
8670 );
8671
8672 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8673 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8674 project.update(cx, |project, cx| {
8675 let pending_ops_all = pending_ops_all.clone();
8676 cx.subscribe(project.git_store(), move |_, _, e, _| {
8677 if let GitStoreEvent::RepositoryUpdated(
8678 _,
8679 RepositoryEvent::PendingOpsChanged { pending_ops },
8680 _,
8681 ) = e
8682 {
8683 let merged = merge_pending_ops_snapshots(
8684 pending_ops.items(()),
8685 pending_ops_all.lock().items(()),
8686 );
8687 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8688 }
8689 })
8690 .detach();
8691 });
8692 project
8693 .update(cx, |project, cx| project.git_scans_complete(cx))
8694 .await;
8695
8696 let repo = project.read_with(cx, |project, cx| {
8697 project.repositories(cx).values().next().unwrap().clone()
8698 });
8699
8700 // Ensure we have no pending ops for any of the untracked files
8701 repo.read_with(cx, |repo, _cx| {
8702 assert!(repo.pending_ops().next().is_none());
8703 });
8704
8705 let mut id = 1u16;
8706
8707 let mut assert_stage = async |path: RepoPath, stage| {
8708 let git_status = if stage {
8709 pending_op::GitStatus::Staged
8710 } else {
8711 pending_op::GitStatus::Unstaged
8712 };
8713 repo.update(cx, |repo, cx| {
8714 let task = if stage {
8715 repo.stage_entries(vec![path.clone()], cx)
8716 } else {
8717 repo.unstage_entries(vec![path.clone()], cx)
8718 };
8719 let ops = repo.pending_ops_for_path(&path).unwrap();
8720 assert_eq!(
8721 ops.ops.last(),
8722 Some(&pending_op::PendingOp {
8723 id: id.into(),
8724 git_status,
8725 job_status: pending_op::JobStatus::Running
8726 })
8727 );
8728 task
8729 })
8730 .await
8731 .unwrap();
8732
8733 repo.read_with(cx, |repo, _cx| {
8734 let ops = repo.pending_ops_for_path(&path).unwrap();
8735 assert_eq!(
8736 ops.ops.last(),
8737 Some(&pending_op::PendingOp {
8738 id: id.into(),
8739 git_status,
8740 job_status: pending_op::JobStatus::Finished
8741 })
8742 );
8743 });
8744
8745 id += 1;
8746 };
8747
8748 assert_stage(repo_path("a.txt"), true).await;
8749 assert_stage(repo_path("a.txt"), false).await;
8750 assert_stage(repo_path("a.txt"), true).await;
8751 assert_stage(repo_path("a.txt"), false).await;
8752 assert_stage(repo_path("a.txt"), true).await;
8753
8754 cx.run_until_parked();
8755
8756 assert_eq!(
8757 pending_ops_all
8758 .lock()
8759 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8760 .unwrap()
8761 .ops,
8762 vec![
8763 pending_op::PendingOp {
8764 id: 1u16.into(),
8765 git_status: pending_op::GitStatus::Staged,
8766 job_status: pending_op::JobStatus::Finished
8767 },
8768 pending_op::PendingOp {
8769 id: 2u16.into(),
8770 git_status: pending_op::GitStatus::Unstaged,
8771 job_status: pending_op::JobStatus::Finished
8772 },
8773 pending_op::PendingOp {
8774 id: 3u16.into(),
8775 git_status: pending_op::GitStatus::Staged,
8776 job_status: pending_op::JobStatus::Finished
8777 },
8778 pending_op::PendingOp {
8779 id: 4u16.into(),
8780 git_status: pending_op::GitStatus::Unstaged,
8781 job_status: pending_op::JobStatus::Finished
8782 },
8783 pending_op::PendingOp {
8784 id: 5u16.into(),
8785 git_status: pending_op::GitStatus::Staged,
8786 job_status: pending_op::JobStatus::Finished
8787 }
8788 ],
8789 );
8790
8791 repo.update(cx, |repo, _cx| {
8792 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8793
8794 assert_eq!(
8795 git_statuses,
8796 [StatusEntry {
8797 repo_path: repo_path("a.txt"),
8798 status: TrackedStatus {
8799 index_status: StatusCode::Added,
8800 worktree_status: StatusCode::Unmodified
8801 }
8802 .into(),
8803 }]
8804 );
8805 });
8806}
8807
8808#[gpui::test]
8809async fn test_repository_pending_ops_long_running_staging(
8810 executor: gpui::BackgroundExecutor,
8811 cx: &mut gpui::TestAppContext,
8812) {
8813 init_test(cx);
8814
8815 let fs = FakeFs::new(executor);
8816 fs.insert_tree(
8817 path!("/root"),
8818 json!({
8819 "my-repo": {
8820 ".git": {},
8821 "a.txt": "a",
8822 }
8823
8824 }),
8825 )
8826 .await;
8827
8828 fs.set_status_for_repo(
8829 path!("/root/my-repo/.git").as_ref(),
8830 &[("a.txt", FileStatus::Untracked)],
8831 );
8832
8833 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8834 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8835 project.update(cx, |project, cx| {
8836 let pending_ops_all = pending_ops_all.clone();
8837 cx.subscribe(project.git_store(), move |_, _, e, _| {
8838 if let GitStoreEvent::RepositoryUpdated(
8839 _,
8840 RepositoryEvent::PendingOpsChanged { pending_ops },
8841 _,
8842 ) = e
8843 {
8844 let merged = merge_pending_ops_snapshots(
8845 pending_ops.items(()),
8846 pending_ops_all.lock().items(()),
8847 );
8848 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8849 }
8850 })
8851 .detach();
8852 });
8853
8854 project
8855 .update(cx, |project, cx| project.git_scans_complete(cx))
8856 .await;
8857
8858 let repo = project.read_with(cx, |project, cx| {
8859 project.repositories(cx).values().next().unwrap().clone()
8860 });
8861
8862 repo.update(cx, |repo, cx| {
8863 repo.stage_entries(vec![repo_path("a.txt")], cx)
8864 })
8865 .detach();
8866
8867 repo.update(cx, |repo, cx| {
8868 repo.stage_entries(vec![repo_path("a.txt")], cx)
8869 })
8870 .unwrap()
8871 .with_timeout(Duration::from_secs(1), &cx.executor())
8872 .await
8873 .unwrap();
8874
8875 cx.run_until_parked();
8876
8877 assert_eq!(
8878 pending_ops_all
8879 .lock()
8880 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8881 .unwrap()
8882 .ops,
8883 vec![
8884 pending_op::PendingOp {
8885 id: 1u16.into(),
8886 git_status: pending_op::GitStatus::Staged,
8887 job_status: pending_op::JobStatus::Skipped
8888 },
8889 pending_op::PendingOp {
8890 id: 2u16.into(),
8891 git_status: pending_op::GitStatus::Staged,
8892 job_status: pending_op::JobStatus::Finished
8893 }
8894 ],
8895 );
8896
8897 repo.update(cx, |repo, _cx| {
8898 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8899
8900 assert_eq!(
8901 git_statuses,
8902 [StatusEntry {
8903 repo_path: repo_path("a.txt"),
8904 status: TrackedStatus {
8905 index_status: StatusCode::Added,
8906 worktree_status: StatusCode::Unmodified
8907 }
8908 .into(),
8909 }]
8910 );
8911 });
8912}
8913
8914#[gpui::test]
8915async fn test_repository_pending_ops_stage_all(
8916 executor: gpui::BackgroundExecutor,
8917 cx: &mut gpui::TestAppContext,
8918) {
8919 init_test(cx);
8920
8921 let fs = FakeFs::new(executor);
8922 fs.insert_tree(
8923 path!("/root"),
8924 json!({
8925 "my-repo": {
8926 ".git": {},
8927 "a.txt": "a",
8928 "b.txt": "b"
8929 }
8930
8931 }),
8932 )
8933 .await;
8934
8935 fs.set_status_for_repo(
8936 path!("/root/my-repo/.git").as_ref(),
8937 &[
8938 ("a.txt", FileStatus::Untracked),
8939 ("b.txt", FileStatus::Untracked),
8940 ],
8941 );
8942
8943 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8944 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8945 project.update(cx, |project, cx| {
8946 let pending_ops_all = pending_ops_all.clone();
8947 cx.subscribe(project.git_store(), move |_, _, e, _| {
8948 if let GitStoreEvent::RepositoryUpdated(
8949 _,
8950 RepositoryEvent::PendingOpsChanged { pending_ops },
8951 _,
8952 ) = e
8953 {
8954 let merged = merge_pending_ops_snapshots(
8955 pending_ops.items(()),
8956 pending_ops_all.lock().items(()),
8957 );
8958 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8959 }
8960 })
8961 .detach();
8962 });
8963 project
8964 .update(cx, |project, cx| project.git_scans_complete(cx))
8965 .await;
8966
8967 let repo = project.read_with(cx, |project, cx| {
8968 project.repositories(cx).values().next().unwrap().clone()
8969 });
8970
8971 repo.update(cx, |repo, cx| {
8972 repo.stage_entries(vec![repo_path("a.txt")], cx)
8973 })
8974 .await
8975 .unwrap();
8976 repo.update(cx, |repo, cx| repo.stage_all(cx))
8977 .await
8978 .unwrap();
8979 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8980 .await
8981 .unwrap();
8982
8983 cx.run_until_parked();
8984
8985 assert_eq!(
8986 pending_ops_all
8987 .lock()
8988 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8989 .unwrap()
8990 .ops,
8991 vec![
8992 pending_op::PendingOp {
8993 id: 1u16.into(),
8994 git_status: pending_op::GitStatus::Staged,
8995 job_status: pending_op::JobStatus::Finished
8996 },
8997 pending_op::PendingOp {
8998 id: 2u16.into(),
8999 git_status: pending_op::GitStatus::Unstaged,
9000 job_status: pending_op::JobStatus::Finished
9001 },
9002 ],
9003 );
9004 assert_eq!(
9005 pending_ops_all
9006 .lock()
9007 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9008 .unwrap()
9009 .ops,
9010 vec![
9011 pending_op::PendingOp {
9012 id: 1u16.into(),
9013 git_status: pending_op::GitStatus::Staged,
9014 job_status: pending_op::JobStatus::Finished
9015 },
9016 pending_op::PendingOp {
9017 id: 2u16.into(),
9018 git_status: pending_op::GitStatus::Unstaged,
9019 job_status: pending_op::JobStatus::Finished
9020 },
9021 ],
9022 );
9023
9024 repo.update(cx, |repo, _cx| {
9025 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9026
9027 assert_eq!(
9028 git_statuses,
9029 [
9030 StatusEntry {
9031 repo_path: repo_path("a.txt"),
9032 status: FileStatus::Untracked,
9033 },
9034 StatusEntry {
9035 repo_path: repo_path("b.txt"),
9036 status: FileStatus::Untracked,
9037 },
9038 ]
9039 );
9040 });
9041}
9042
9043#[gpui::test]
9044async fn test_repository_subfolder_git_status(
9045 executor: gpui::BackgroundExecutor,
9046 cx: &mut gpui::TestAppContext,
9047) {
9048 init_test(cx);
9049
9050 let fs = FakeFs::new(executor);
9051 fs.insert_tree(
9052 path!("/root"),
9053 json!({
9054 "my-repo": {
9055 ".git": {},
9056 "a.txt": "a",
9057 "sub-folder-1": {
9058 "sub-folder-2": {
9059 "c.txt": "cc",
9060 "d": {
9061 "e.txt": "eee"
9062 }
9063 },
9064 }
9065 },
9066 }),
9067 )
9068 .await;
9069
9070 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9071 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9072
9073 fs.set_status_for_repo(
9074 path!("/root/my-repo/.git").as_ref(),
9075 &[(E_TXT, FileStatus::Untracked)],
9076 );
9077
9078 let project = Project::test(
9079 fs.clone(),
9080 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9081 cx,
9082 )
9083 .await;
9084
9085 project
9086 .update(cx, |project, cx| project.git_scans_complete(cx))
9087 .await;
9088 cx.run_until_parked();
9089
9090 let repository = project.read_with(cx, |project, cx| {
9091 project.repositories(cx).values().next().unwrap().clone()
9092 });
9093
9094 // Ensure that the git status is loaded correctly
9095 repository.read_with(cx, |repository, _cx| {
9096 assert_eq!(
9097 repository.work_directory_abs_path,
9098 Path::new(path!("/root/my-repo")).into()
9099 );
9100
9101 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9102 assert_eq!(
9103 repository
9104 .status_for_path(&repo_path(E_TXT))
9105 .unwrap()
9106 .status,
9107 FileStatus::Untracked
9108 );
9109 });
9110
9111 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9112 project
9113 .update(cx, |project, cx| project.git_scans_complete(cx))
9114 .await;
9115 cx.run_until_parked();
9116
9117 repository.read_with(cx, |repository, _cx| {
9118 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9119 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9120 });
9121}
9122
9123// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9124#[cfg(any())]
9125#[gpui::test]
9126async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9127 init_test(cx);
9128 cx.executor().allow_parking();
9129
9130 let root = TempTree::new(json!({
9131 "project": {
9132 "a.txt": "a",
9133 },
9134 }));
9135 let root_path = root.path();
9136
9137 let repo = git_init(&root_path.join("project"));
9138 git_add("a.txt", &repo);
9139 git_commit("init", &repo);
9140
9141 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9142
9143 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9144 tree.flush_fs_events(cx).await;
9145 project
9146 .update(cx, |project, cx| project.git_scans_complete(cx))
9147 .await;
9148 cx.executor().run_until_parked();
9149
9150 let repository = project.read_with(cx, |project, cx| {
9151 project.repositories(cx).values().next().unwrap().clone()
9152 });
9153
9154 git_branch("other-branch", &repo);
9155 git_checkout("refs/heads/other-branch", &repo);
9156 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9157 git_add("a.txt", &repo);
9158 git_commit("capitalize", &repo);
9159 let commit = repo
9160 .head()
9161 .expect("Failed to get HEAD")
9162 .peel_to_commit()
9163 .expect("HEAD is not a commit");
9164 git_checkout("refs/heads/main", &repo);
9165 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9166 git_add("a.txt", &repo);
9167 git_commit("improve letter", &repo);
9168 git_cherry_pick(&commit, &repo);
9169 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9170 .expect("No CHERRY_PICK_HEAD");
9171 pretty_assertions::assert_eq!(
9172 git_status(&repo),
9173 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9174 );
9175 tree.flush_fs_events(cx).await;
9176 project
9177 .update(cx, |project, cx| project.git_scans_complete(cx))
9178 .await;
9179 cx.executor().run_until_parked();
9180 let conflicts = repository.update(cx, |repository, _| {
9181 repository
9182 .merge_conflicts
9183 .iter()
9184 .cloned()
9185 .collect::<Vec<_>>()
9186 });
9187 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9188
9189 git_add("a.txt", &repo);
9190 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9191 git_commit("whatevs", &repo);
9192 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9193 .expect("Failed to remove CHERRY_PICK_HEAD");
9194 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9195 tree.flush_fs_events(cx).await;
9196 let conflicts = repository.update(cx, |repository, _| {
9197 repository
9198 .merge_conflicts
9199 .iter()
9200 .cloned()
9201 .collect::<Vec<_>>()
9202 });
9203 pretty_assertions::assert_eq!(conflicts, []);
9204}
9205
9206#[gpui::test]
9207async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9208 init_test(cx);
9209 let fs = FakeFs::new(cx.background_executor.clone());
9210 fs.insert_tree(
9211 path!("/root"),
9212 json!({
9213 ".git": {},
9214 ".gitignore": "*.txt\n",
9215 "a.xml": "<a></a>",
9216 "b.txt": "Some text"
9217 }),
9218 )
9219 .await;
9220
9221 fs.set_head_and_index_for_repo(
9222 path!("/root/.git").as_ref(),
9223 &[
9224 (".gitignore", "*.txt\n".into()),
9225 ("a.xml", "<a></a>".into()),
9226 ],
9227 );
9228
9229 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9230
9231 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9232 tree.flush_fs_events(cx).await;
9233 project
9234 .update(cx, |project, cx| project.git_scans_complete(cx))
9235 .await;
9236 cx.executor().run_until_parked();
9237
9238 let repository = project.read_with(cx, |project, cx| {
9239 project.repositories(cx).values().next().unwrap().clone()
9240 });
9241
9242 // One file is unmodified, the other is ignored.
9243 cx.read(|cx| {
9244 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9245 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9246 });
9247
9248 // Change the gitignore, and stage the newly non-ignored file.
9249 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9250 .await
9251 .unwrap();
9252 fs.set_index_for_repo(
9253 Path::new(path!("/root/.git")),
9254 &[
9255 (".gitignore", "*.txt\n".into()),
9256 ("a.xml", "<a></a>".into()),
9257 ("b.txt", "Some text".into()),
9258 ],
9259 );
9260
9261 cx.executor().run_until_parked();
9262 cx.read(|cx| {
9263 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9264 assert_entry_git_state(
9265 tree.read(cx),
9266 repository.read(cx),
9267 "b.txt",
9268 Some(StatusCode::Added),
9269 false,
9270 );
9271 });
9272}
9273
9274// NOTE:
9275// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9276// a directory which some program has already open.
9277// This is a limitation of the Windows.
9278// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9279// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9280#[gpui::test]
9281#[cfg_attr(target_os = "windows", ignore)]
9282async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9283 init_test(cx);
9284 cx.executor().allow_parking();
9285 let root = TempTree::new(json!({
9286 "projects": {
9287 "project1": {
9288 "a": "",
9289 "b": "",
9290 }
9291 },
9292
9293 }));
9294 let root_path = root.path();
9295
9296 let repo = git_init(&root_path.join("projects/project1"));
9297 git_add("a", &repo);
9298 git_commit("init", &repo);
9299 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9300
9301 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9302
9303 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9304 tree.flush_fs_events(cx).await;
9305 project
9306 .update(cx, |project, cx| project.git_scans_complete(cx))
9307 .await;
9308 cx.executor().run_until_parked();
9309
9310 let repository = project.read_with(cx, |project, cx| {
9311 project.repositories(cx).values().next().unwrap().clone()
9312 });
9313
9314 repository.read_with(cx, |repository, _| {
9315 assert_eq!(
9316 repository.work_directory_abs_path.as_ref(),
9317 root_path.join("projects/project1").as_path()
9318 );
9319 assert_eq!(
9320 repository
9321 .status_for_path(&repo_path("a"))
9322 .map(|entry| entry.status),
9323 Some(StatusCode::Modified.worktree()),
9324 );
9325 assert_eq!(
9326 repository
9327 .status_for_path(&repo_path("b"))
9328 .map(|entry| entry.status),
9329 Some(FileStatus::Untracked),
9330 );
9331 });
9332
9333 std::fs::rename(
9334 root_path.join("projects/project1"),
9335 root_path.join("projects/project2"),
9336 )
9337 .unwrap();
9338 tree.flush_fs_events(cx).await;
9339
9340 repository.read_with(cx, |repository, _| {
9341 assert_eq!(
9342 repository.work_directory_abs_path.as_ref(),
9343 root_path.join("projects/project2").as_path()
9344 );
9345 assert_eq!(
9346 repository.status_for_path(&repo_path("a")).unwrap().status,
9347 StatusCode::Modified.worktree(),
9348 );
9349 assert_eq!(
9350 repository.status_for_path(&repo_path("b")).unwrap().status,
9351 FileStatus::Untracked,
9352 );
9353 });
9354}
9355
9356// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9357// you can't rename a directory which some program has already open. This is a
9358// limitation of the Windows. See:
9359// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9360// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9361#[gpui::test]
9362#[cfg_attr(target_os = "windows", ignore)]
9363async fn test_file_status(cx: &mut gpui::TestAppContext) {
9364 init_test(cx);
9365 cx.executor().allow_parking();
9366 const IGNORE_RULE: &str = "**/target";
9367
9368 let root = TempTree::new(json!({
9369 "project": {
9370 "a.txt": "a",
9371 "b.txt": "bb",
9372 "c": {
9373 "d": {
9374 "e.txt": "eee"
9375 }
9376 },
9377 "f.txt": "ffff",
9378 "target": {
9379 "build_file": "???"
9380 },
9381 ".gitignore": IGNORE_RULE
9382 },
9383
9384 }));
9385 let root_path = root.path();
9386
9387 const A_TXT: &str = "a.txt";
9388 const B_TXT: &str = "b.txt";
9389 const E_TXT: &str = "c/d/e.txt";
9390 const F_TXT: &str = "f.txt";
9391 const DOTGITIGNORE: &str = ".gitignore";
9392 const BUILD_FILE: &str = "target/build_file";
9393
9394 // Set up git repository before creating the worktree.
9395 let work_dir = root.path().join("project");
9396 let mut repo = git_init(work_dir.as_path());
9397 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9398 git_add(A_TXT, &repo);
9399 git_add(E_TXT, &repo);
9400 git_add(DOTGITIGNORE, &repo);
9401 git_commit("Initial commit", &repo);
9402
9403 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9404
9405 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9406 tree.flush_fs_events(cx).await;
9407 project
9408 .update(cx, |project, cx| project.git_scans_complete(cx))
9409 .await;
9410 cx.executor().run_until_parked();
9411
9412 let repository = project.read_with(cx, |project, cx| {
9413 project.repositories(cx).values().next().unwrap().clone()
9414 });
9415
9416 // Check that the right git state is observed on startup
9417 repository.read_with(cx, |repository, _cx| {
9418 assert_eq!(
9419 repository.work_directory_abs_path.as_ref(),
9420 root_path.join("project").as_path()
9421 );
9422
9423 assert_eq!(
9424 repository
9425 .status_for_path(&repo_path(B_TXT))
9426 .unwrap()
9427 .status,
9428 FileStatus::Untracked,
9429 );
9430 assert_eq!(
9431 repository
9432 .status_for_path(&repo_path(F_TXT))
9433 .unwrap()
9434 .status,
9435 FileStatus::Untracked,
9436 );
9437 });
9438
9439 // Modify a file in the working copy.
9440 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9441 tree.flush_fs_events(cx).await;
9442 project
9443 .update(cx, |project, cx| project.git_scans_complete(cx))
9444 .await;
9445 cx.executor().run_until_parked();
9446
9447 // The worktree detects that the file's git status has changed.
9448 repository.read_with(cx, |repository, _| {
9449 assert_eq!(
9450 repository
9451 .status_for_path(&repo_path(A_TXT))
9452 .unwrap()
9453 .status,
9454 StatusCode::Modified.worktree(),
9455 );
9456 });
9457
9458 // Create a commit in the git repository.
9459 git_add(A_TXT, &repo);
9460 git_add(B_TXT, &repo);
9461 git_commit("Committing modified and added", &repo);
9462 tree.flush_fs_events(cx).await;
9463 project
9464 .update(cx, |project, cx| project.git_scans_complete(cx))
9465 .await;
9466 cx.executor().run_until_parked();
9467
9468 // The worktree detects that the files' git status have changed.
9469 repository.read_with(cx, |repository, _cx| {
9470 assert_eq!(
9471 repository
9472 .status_for_path(&repo_path(F_TXT))
9473 .unwrap()
9474 .status,
9475 FileStatus::Untracked,
9476 );
9477 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9478 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9479 });
9480
9481 // Modify files in the working copy and perform git operations on other files.
9482 git_reset(0, &repo);
9483 git_remove_index(Path::new(B_TXT), &repo);
9484 git_stash(&mut repo);
9485 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9486 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9487 tree.flush_fs_events(cx).await;
9488 project
9489 .update(cx, |project, cx| project.git_scans_complete(cx))
9490 .await;
9491 cx.executor().run_until_parked();
9492
9493 // Check that more complex repo changes are tracked
9494 repository.read_with(cx, |repository, _cx| {
9495 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9496 assert_eq!(
9497 repository
9498 .status_for_path(&repo_path(B_TXT))
9499 .unwrap()
9500 .status,
9501 FileStatus::Untracked,
9502 );
9503 assert_eq!(
9504 repository
9505 .status_for_path(&repo_path(E_TXT))
9506 .unwrap()
9507 .status,
9508 StatusCode::Modified.worktree(),
9509 );
9510 });
9511
9512 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9513 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9514 std::fs::write(
9515 work_dir.join(DOTGITIGNORE),
9516 [IGNORE_RULE, "f.txt"].join("\n"),
9517 )
9518 .unwrap();
9519
9520 git_add(Path::new(DOTGITIGNORE), &repo);
9521 git_commit("Committing modified git ignore", &repo);
9522
9523 tree.flush_fs_events(cx).await;
9524 cx.executor().run_until_parked();
9525
9526 let mut renamed_dir_name = "first_directory/second_directory";
9527 const RENAMED_FILE: &str = "rf.txt";
9528
9529 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9530 std::fs::write(
9531 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9532 "new-contents",
9533 )
9534 .unwrap();
9535
9536 tree.flush_fs_events(cx).await;
9537 project
9538 .update(cx, |project, cx| project.git_scans_complete(cx))
9539 .await;
9540 cx.executor().run_until_parked();
9541
9542 repository.read_with(cx, |repository, _cx| {
9543 assert_eq!(
9544 repository
9545 .status_for_path(&RepoPath::from_rel_path(
9546 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9547 ))
9548 .unwrap()
9549 .status,
9550 FileStatus::Untracked,
9551 );
9552 });
9553
9554 renamed_dir_name = "new_first_directory/second_directory";
9555
9556 std::fs::rename(
9557 work_dir.join("first_directory"),
9558 work_dir.join("new_first_directory"),
9559 )
9560 .unwrap();
9561
9562 tree.flush_fs_events(cx).await;
9563 project
9564 .update(cx, |project, cx| project.git_scans_complete(cx))
9565 .await;
9566 cx.executor().run_until_parked();
9567
9568 repository.read_with(cx, |repository, _cx| {
9569 assert_eq!(
9570 repository
9571 .status_for_path(&RepoPath::from_rel_path(
9572 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9573 ))
9574 .unwrap()
9575 .status,
9576 FileStatus::Untracked,
9577 );
9578 });
9579}
9580
9581#[gpui::test]
9582#[ignore]
9583async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9584 init_test(cx);
9585 cx.executor().allow_parking();
9586
9587 const IGNORE_RULE: &str = "**/target";
9588
9589 let root = TempTree::new(json!({
9590 "project": {
9591 "src": {
9592 "main.rs": "fn main() {}"
9593 },
9594 "target": {
9595 "debug": {
9596 "important_text.txt": "important text",
9597 },
9598 },
9599 ".gitignore": IGNORE_RULE
9600 },
9601
9602 }));
9603 let root_path = root.path();
9604
9605 // Set up git repository before creating the worktree.
9606 let work_dir = root.path().join("project");
9607 let repo = git_init(work_dir.as_path());
9608 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9609 git_add("src/main.rs", &repo);
9610 git_add(".gitignore", &repo);
9611 git_commit("Initial commit", &repo);
9612
9613 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9614 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9615 let project_events = Arc::new(Mutex::new(Vec::new()));
9616 project.update(cx, |project, cx| {
9617 let repo_events = repository_updates.clone();
9618 cx.subscribe(project.git_store(), move |_, _, e, _| {
9619 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9620 repo_events.lock().push(e.clone());
9621 }
9622 })
9623 .detach();
9624 let project_events = project_events.clone();
9625 cx.subscribe_self(move |_, e, _| {
9626 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9627 project_events.lock().extend(
9628 updates
9629 .iter()
9630 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9631 .filter(|(path, _)| path != "fs-event-sentinel"),
9632 );
9633 }
9634 })
9635 .detach();
9636 });
9637
9638 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9639 tree.flush_fs_events(cx).await;
9640 tree.update(cx, |tree, cx| {
9641 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9642 })
9643 .await
9644 .unwrap();
9645 tree.update(cx, |tree, _| {
9646 assert_eq!(
9647 tree.entries(true, 0)
9648 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9649 .collect::<Vec<_>>(),
9650 vec![
9651 (rel_path(""), false),
9652 (rel_path("project/"), false),
9653 (rel_path("project/.gitignore"), false),
9654 (rel_path("project/src"), false),
9655 (rel_path("project/src/main.rs"), false),
9656 (rel_path("project/target"), true),
9657 (rel_path("project/target/debug"), true),
9658 (rel_path("project/target/debug/important_text.txt"), true),
9659 ]
9660 );
9661 });
9662
9663 assert_eq!(
9664 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9665 vec![
9666 RepositoryEvent::StatusesChanged,
9667 RepositoryEvent::MergeHeadsChanged,
9668 ],
9669 "Initial worktree scan should produce a repo update event"
9670 );
9671 assert_eq!(
9672 project_events.lock().drain(..).collect::<Vec<_>>(),
9673 vec![
9674 ("project/target".to_string(), PathChange::Loaded),
9675 ("project/target/debug".to_string(), PathChange::Loaded),
9676 (
9677 "project/target/debug/important_text.txt".to_string(),
9678 PathChange::Loaded
9679 ),
9680 ],
9681 "Initial project changes should show that all not-ignored and all opened files are loaded"
9682 );
9683
9684 let deps_dir = work_dir.join("target").join("debug").join("deps");
9685 std::fs::create_dir_all(&deps_dir).unwrap();
9686 tree.flush_fs_events(cx).await;
9687 project
9688 .update(cx, |project, cx| project.git_scans_complete(cx))
9689 .await;
9690 cx.executor().run_until_parked();
9691 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9692 tree.flush_fs_events(cx).await;
9693 project
9694 .update(cx, |project, cx| project.git_scans_complete(cx))
9695 .await;
9696 cx.executor().run_until_parked();
9697 std::fs::remove_dir_all(&deps_dir).unwrap();
9698 tree.flush_fs_events(cx).await;
9699 project
9700 .update(cx, |project, cx| project.git_scans_complete(cx))
9701 .await;
9702 cx.executor().run_until_parked();
9703
9704 tree.update(cx, |tree, _| {
9705 assert_eq!(
9706 tree.entries(true, 0)
9707 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9708 .collect::<Vec<_>>(),
9709 vec![
9710 (rel_path(""), false),
9711 (rel_path("project/"), false),
9712 (rel_path("project/.gitignore"), false),
9713 (rel_path("project/src"), false),
9714 (rel_path("project/src/main.rs"), false),
9715 (rel_path("project/target"), true),
9716 (rel_path("project/target/debug"), true),
9717 (rel_path("project/target/debug/important_text.txt"), true),
9718 ],
9719 "No stray temp files should be left after the flycheck changes"
9720 );
9721 });
9722
9723 assert_eq!(
9724 repository_updates
9725 .lock()
9726 .iter()
9727 .cloned()
9728 .collect::<Vec<_>>(),
9729 Vec::new(),
9730 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9731 );
9732 assert_eq!(
9733 project_events.lock().as_slice(),
9734 vec![
9735 ("project/target/debug/deps".to_string(), PathChange::Added),
9736 ("project/target/debug/deps".to_string(), PathChange::Removed),
9737 ],
9738 "Due to `debug` directory being tracked, it should get updates for entries inside it.
9739 No updates for more nested directories should happen as those are ignored",
9740 );
9741}
9742
9743// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
9744// to different timings/ordering of events.
9745#[ignore]
9746#[gpui::test]
9747async fn test_odd_events_for_ignored_dirs(
9748 executor: BackgroundExecutor,
9749 cx: &mut gpui::TestAppContext,
9750) {
9751 init_test(cx);
9752 let fs = FakeFs::new(executor);
9753 fs.insert_tree(
9754 path!("/root"),
9755 json!({
9756 ".git": {},
9757 ".gitignore": "**/target/",
9758 "src": {
9759 "main.rs": "fn main() {}",
9760 },
9761 "target": {
9762 "debug": {
9763 "foo.txt": "foo",
9764 "deps": {}
9765 }
9766 }
9767 }),
9768 )
9769 .await;
9770 fs.set_head_and_index_for_repo(
9771 path!("/root/.git").as_ref(),
9772 &[
9773 (".gitignore", "**/target/".into()),
9774 ("src/main.rs", "fn main() {}".into()),
9775 ],
9776 );
9777
9778 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9779 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9780 let project_events = Arc::new(Mutex::new(Vec::new()));
9781 project.update(cx, |project, cx| {
9782 let repository_updates = repository_updates.clone();
9783 cx.subscribe(project.git_store(), move |_, _, e, _| {
9784 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9785 repository_updates.lock().push(e.clone());
9786 }
9787 })
9788 .detach();
9789 let project_events = project_events.clone();
9790 cx.subscribe_self(move |_, e, _| {
9791 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9792 project_events.lock().extend(
9793 updates
9794 .iter()
9795 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9796 .filter(|(path, _)| path != "fs-event-sentinel"),
9797 );
9798 }
9799 })
9800 .detach();
9801 });
9802
9803 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9804 tree.update(cx, |tree, cx| {
9805 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9806 })
9807 .await
9808 .unwrap();
9809 tree.flush_fs_events(cx).await;
9810 project
9811 .update(cx, |project, cx| project.git_scans_complete(cx))
9812 .await;
9813 cx.run_until_parked();
9814 tree.update(cx, |tree, _| {
9815 assert_eq!(
9816 tree.entries(true, 0)
9817 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9818 .collect::<Vec<_>>(),
9819 vec![
9820 (rel_path(""), false),
9821 (rel_path(".gitignore"), false),
9822 (rel_path("src"), false),
9823 (rel_path("src/main.rs"), false),
9824 (rel_path("target"), true),
9825 (rel_path("target/debug"), true),
9826 (rel_path("target/debug/deps"), true),
9827 (rel_path("target/debug/foo.txt"), true),
9828 ]
9829 );
9830 });
9831
9832 assert_eq!(
9833 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9834 vec![
9835 RepositoryEvent::MergeHeadsChanged,
9836 RepositoryEvent::BranchChanged,
9837 RepositoryEvent::StatusesChanged,
9838 RepositoryEvent::StatusesChanged,
9839 ],
9840 "Initial worktree scan should produce a repo update event"
9841 );
9842 assert_eq!(
9843 project_events.lock().drain(..).collect::<Vec<_>>(),
9844 vec![
9845 ("target".to_string(), PathChange::Loaded),
9846 ("target/debug".to_string(), PathChange::Loaded),
9847 ("target/debug/deps".to_string(), PathChange::Loaded),
9848 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9849 ],
9850 "All non-ignored entries and all opened firs should be getting a project event",
9851 );
9852
9853 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9854 // This may happen multiple times during a single flycheck, but once is enough for testing.
9855 fs.emit_fs_event("/root/target/debug/deps", None);
9856 tree.flush_fs_events(cx).await;
9857 project
9858 .update(cx, |project, cx| project.git_scans_complete(cx))
9859 .await;
9860 cx.executor().run_until_parked();
9861
9862 assert_eq!(
9863 repository_updates
9864 .lock()
9865 .iter()
9866 .cloned()
9867 .collect::<Vec<_>>(),
9868 Vec::new(),
9869 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9870 );
9871 assert_eq!(
9872 project_events.lock().as_slice(),
9873 Vec::new(),
9874 "No further project events should happen, as only ignored dirs received FS events",
9875 );
9876}
9877
9878#[gpui::test]
9879async fn test_repos_in_invisible_worktrees(
9880 executor: BackgroundExecutor,
9881 cx: &mut gpui::TestAppContext,
9882) {
9883 init_test(cx);
9884 let fs = FakeFs::new(executor);
9885 fs.insert_tree(
9886 path!("/root"),
9887 json!({
9888 "dir1": {
9889 ".git": {},
9890 "dep1": {
9891 ".git": {},
9892 "src": {
9893 "a.txt": "",
9894 },
9895 },
9896 "b.txt": "",
9897 },
9898 }),
9899 )
9900 .await;
9901
9902 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9903 let _visible_worktree =
9904 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9905 project
9906 .update(cx, |project, cx| project.git_scans_complete(cx))
9907 .await;
9908
9909 let repos = project.read_with(cx, |project, cx| {
9910 project
9911 .repositories(cx)
9912 .values()
9913 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9914 .collect::<Vec<_>>()
9915 });
9916 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9917
9918 let (_invisible_worktree, _) = project
9919 .update(cx, |project, cx| {
9920 project.worktree_store.update(cx, |worktree_store, cx| {
9921 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9922 })
9923 })
9924 .await
9925 .expect("failed to create worktree");
9926 project
9927 .update(cx, |project, cx| project.git_scans_complete(cx))
9928 .await;
9929
9930 let repos = project.read_with(cx, |project, cx| {
9931 project
9932 .repositories(cx)
9933 .values()
9934 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9935 .collect::<Vec<_>>()
9936 });
9937 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9938}
9939
9940#[gpui::test(iterations = 10)]
9941async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9942 init_test(cx);
9943 cx.update(|cx| {
9944 cx.update_global::<SettingsStore, _>(|store, cx| {
9945 store.update_user_settings(cx, |settings| {
9946 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9947 });
9948 });
9949 });
9950 let fs = FakeFs::new(cx.background_executor.clone());
9951 fs.insert_tree(
9952 path!("/root"),
9953 json!({
9954 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9955 "tree": {
9956 ".git": {},
9957 ".gitignore": "ignored-dir\n",
9958 "tracked-dir": {
9959 "tracked-file1": "",
9960 "ancestor-ignored-file1": "",
9961 },
9962 "ignored-dir": {
9963 "ignored-file1": ""
9964 }
9965 }
9966 }),
9967 )
9968 .await;
9969 fs.set_head_and_index_for_repo(
9970 path!("/root/tree/.git").as_ref(),
9971 &[
9972 (".gitignore", "ignored-dir\n".into()),
9973 ("tracked-dir/tracked-file1", "".into()),
9974 ],
9975 );
9976
9977 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9978
9979 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9980 tree.flush_fs_events(cx).await;
9981 project
9982 .update(cx, |project, cx| project.git_scans_complete(cx))
9983 .await;
9984 cx.executor().run_until_parked();
9985
9986 let repository = project.read_with(cx, |project, cx| {
9987 project.repositories(cx).values().next().unwrap().clone()
9988 });
9989
9990 tree.read_with(cx, |tree, _| {
9991 tree.as_local()
9992 .unwrap()
9993 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9994 })
9995 .recv()
9996 .await;
9997
9998 cx.read(|cx| {
9999 assert_entry_git_state(
10000 tree.read(cx),
10001 repository.read(cx),
10002 "tracked-dir/tracked-file1",
10003 None,
10004 false,
10005 );
10006 assert_entry_git_state(
10007 tree.read(cx),
10008 repository.read(cx),
10009 "tracked-dir/ancestor-ignored-file1",
10010 None,
10011 false,
10012 );
10013 assert_entry_git_state(
10014 tree.read(cx),
10015 repository.read(cx),
10016 "ignored-dir/ignored-file1",
10017 None,
10018 true,
10019 );
10020 });
10021
10022 fs.create_file(
10023 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10024 Default::default(),
10025 )
10026 .await
10027 .unwrap();
10028 fs.set_index_for_repo(
10029 path!("/root/tree/.git").as_ref(),
10030 &[
10031 (".gitignore", "ignored-dir\n".into()),
10032 ("tracked-dir/tracked-file1", "".into()),
10033 ("tracked-dir/tracked-file2", "".into()),
10034 ],
10035 );
10036 fs.create_file(
10037 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10038 Default::default(),
10039 )
10040 .await
10041 .unwrap();
10042 fs.create_file(
10043 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10044 Default::default(),
10045 )
10046 .await
10047 .unwrap();
10048
10049 cx.executor().run_until_parked();
10050 cx.read(|cx| {
10051 assert_entry_git_state(
10052 tree.read(cx),
10053 repository.read(cx),
10054 "tracked-dir/tracked-file2",
10055 Some(StatusCode::Added),
10056 false,
10057 );
10058 assert_entry_git_state(
10059 tree.read(cx),
10060 repository.read(cx),
10061 "tracked-dir/ancestor-ignored-file2",
10062 None,
10063 false,
10064 );
10065 assert_entry_git_state(
10066 tree.read(cx),
10067 repository.read(cx),
10068 "ignored-dir/ignored-file2",
10069 None,
10070 true,
10071 );
10072 assert!(
10073 tree.read(cx)
10074 .entry_for_path(&rel_path(".git"))
10075 .unwrap()
10076 .is_ignored
10077 );
10078 });
10079}
10080
10081#[gpui::test]
10082async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10083 init_test(cx);
10084
10085 let fs = FakeFs::new(cx.executor());
10086 fs.insert_tree(
10087 path!("/project"),
10088 json!({
10089 ".git": {
10090 "worktrees": {
10091 "some-worktree": {
10092 "commondir": "../..\n",
10093 // For is_git_dir
10094 "HEAD": "",
10095 "config": ""
10096 }
10097 },
10098 "modules": {
10099 "subdir": {
10100 "some-submodule": {
10101 // For is_git_dir
10102 "HEAD": "",
10103 "config": "",
10104 }
10105 }
10106 }
10107 },
10108 "src": {
10109 "a.txt": "A",
10110 },
10111 "some-worktree": {
10112 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10113 "src": {
10114 "b.txt": "B",
10115 }
10116 },
10117 "subdir": {
10118 "some-submodule": {
10119 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10120 "c.txt": "C",
10121 }
10122 }
10123 }),
10124 )
10125 .await;
10126
10127 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10128 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10129 scan_complete.await;
10130
10131 let mut repositories = project.update(cx, |project, cx| {
10132 project
10133 .repositories(cx)
10134 .values()
10135 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10136 .collect::<Vec<_>>()
10137 });
10138 repositories.sort();
10139 pretty_assertions::assert_eq!(
10140 repositories,
10141 [
10142 Path::new(path!("/project")).into(),
10143 Path::new(path!("/project/some-worktree")).into(),
10144 Path::new(path!("/project/subdir/some-submodule")).into(),
10145 ]
10146 );
10147
10148 // Generate a git-related event for the worktree and check that it's refreshed.
10149 fs.with_git_state(
10150 path!("/project/some-worktree/.git").as_ref(),
10151 true,
10152 |state| {
10153 state
10154 .head_contents
10155 .insert(repo_path("src/b.txt"), "b".to_owned());
10156 state
10157 .index_contents
10158 .insert(repo_path("src/b.txt"), "b".to_owned());
10159 },
10160 )
10161 .unwrap();
10162 cx.run_until_parked();
10163
10164 let buffer = project
10165 .update(cx, |project, cx| {
10166 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10167 })
10168 .await
10169 .unwrap();
10170 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10171 let (repo, _) = project
10172 .git_store()
10173 .read(cx)
10174 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10175 .unwrap();
10176 pretty_assertions::assert_eq!(
10177 repo.read(cx).work_directory_abs_path,
10178 Path::new(path!("/project/some-worktree")).into(),
10179 );
10180 let barrier = repo.update(cx, |repo, _| repo.barrier());
10181 (repo.clone(), barrier)
10182 });
10183 barrier.await.unwrap();
10184 worktree_repo.update(cx, |repo, _| {
10185 pretty_assertions::assert_eq!(
10186 repo.status_for_path(&repo_path("src/b.txt"))
10187 .unwrap()
10188 .status,
10189 StatusCode::Modified.worktree(),
10190 );
10191 });
10192
10193 // The same for the submodule.
10194 fs.with_git_state(
10195 path!("/project/subdir/some-submodule/.git").as_ref(),
10196 true,
10197 |state| {
10198 state
10199 .head_contents
10200 .insert(repo_path("c.txt"), "c".to_owned());
10201 state
10202 .index_contents
10203 .insert(repo_path("c.txt"), "c".to_owned());
10204 },
10205 )
10206 .unwrap();
10207 cx.run_until_parked();
10208
10209 let buffer = project
10210 .update(cx, |project, cx| {
10211 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10212 })
10213 .await
10214 .unwrap();
10215 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10216 let (repo, _) = project
10217 .git_store()
10218 .read(cx)
10219 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10220 .unwrap();
10221 pretty_assertions::assert_eq!(
10222 repo.read(cx).work_directory_abs_path,
10223 Path::new(path!("/project/subdir/some-submodule")).into(),
10224 );
10225 let barrier = repo.update(cx, |repo, _| repo.barrier());
10226 (repo.clone(), barrier)
10227 });
10228 barrier.await.unwrap();
10229 submodule_repo.update(cx, |repo, _| {
10230 pretty_assertions::assert_eq!(
10231 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10232 StatusCode::Modified.worktree(),
10233 );
10234 });
10235}
10236
10237#[gpui::test]
10238async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10239 init_test(cx);
10240 let fs = FakeFs::new(cx.background_executor.clone());
10241 fs.insert_tree(
10242 path!("/root"),
10243 json!({
10244 "project": {
10245 ".git": {},
10246 "child1": {
10247 "a.txt": "A",
10248 },
10249 "child2": {
10250 "b.txt": "B",
10251 }
10252 }
10253 }),
10254 )
10255 .await;
10256
10257 let project = Project::test(
10258 fs.clone(),
10259 [
10260 path!("/root/project/child1").as_ref(),
10261 path!("/root/project/child2").as_ref(),
10262 ],
10263 cx,
10264 )
10265 .await;
10266
10267 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10268 tree.flush_fs_events(cx).await;
10269 project
10270 .update(cx, |project, cx| project.git_scans_complete(cx))
10271 .await;
10272 cx.executor().run_until_parked();
10273
10274 let repos = project.read_with(cx, |project, cx| {
10275 project
10276 .repositories(cx)
10277 .values()
10278 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10279 .collect::<Vec<_>>()
10280 });
10281 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10282}
10283
10284#[gpui::test]
10285async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10286 init_test(cx);
10287
10288 let file_1_committed = String::from(r#"file_1_committed"#);
10289 let file_1_staged = String::from(r#"file_1_staged"#);
10290 let file_2_committed = String::from(r#"file_2_committed"#);
10291 let file_2_staged = String::from(r#"file_2_staged"#);
10292 let buffer_contents = String::from(r#"buffer"#);
10293
10294 let fs = FakeFs::new(cx.background_executor.clone());
10295 fs.insert_tree(
10296 path!("/dir"),
10297 json!({
10298 ".git": {},
10299 "src": {
10300 "file_1.rs": file_1_committed.clone(),
10301 "file_2.rs": file_2_committed.clone(),
10302 }
10303 }),
10304 )
10305 .await;
10306
10307 fs.set_head_for_repo(
10308 path!("/dir/.git").as_ref(),
10309 &[
10310 ("src/file_1.rs", file_1_committed.clone()),
10311 ("src/file_2.rs", file_2_committed.clone()),
10312 ],
10313 "deadbeef",
10314 );
10315 fs.set_index_for_repo(
10316 path!("/dir/.git").as_ref(),
10317 &[
10318 ("src/file_1.rs", file_1_staged.clone()),
10319 ("src/file_2.rs", file_2_staged.clone()),
10320 ],
10321 );
10322
10323 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10324
10325 let buffer = project
10326 .update(cx, |project, cx| {
10327 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10328 })
10329 .await
10330 .unwrap();
10331
10332 buffer.update(cx, |buffer, cx| {
10333 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10334 });
10335
10336 let unstaged_diff = project
10337 .update(cx, |project, cx| {
10338 project.open_unstaged_diff(buffer.clone(), cx)
10339 })
10340 .await
10341 .unwrap();
10342
10343 cx.run_until_parked();
10344
10345 unstaged_diff.update(cx, |unstaged_diff, _cx| {
10346 let base_text = unstaged_diff.base_text_string().unwrap();
10347 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10348 });
10349
10350 // Save the buffer as `file_2.rs`, which should trigger the
10351 // `BufferChangedFilePath` event.
10352 project
10353 .update(cx, |project, cx| {
10354 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10355 let path = ProjectPath {
10356 worktree_id,
10357 path: rel_path("src/file_2.rs").into(),
10358 };
10359 project.save_buffer_as(buffer.clone(), path, cx)
10360 })
10361 .await
10362 .unwrap();
10363
10364 cx.run_until_parked();
10365
10366 // Verify that the diff bases have been updated to file_2's contents due to
10367 // the `BufferChangedFilePath` event being handled.
10368 unstaged_diff.update(cx, |unstaged_diff, cx| {
10369 let snapshot = buffer.read(cx).snapshot();
10370 let base_text = unstaged_diff.base_text_string().unwrap();
10371 assert_eq!(
10372 base_text, file_2_staged,
10373 "Diff bases should be automatically updated to file_2 staged content"
10374 );
10375
10376 let hunks: Vec<_> = unstaged_diff.hunks(&snapshot, cx).collect();
10377 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10378 });
10379
10380 let uncommitted_diff = project
10381 .update(cx, |project, cx| {
10382 project.open_uncommitted_diff(buffer.clone(), cx)
10383 })
10384 .await
10385 .unwrap();
10386
10387 cx.run_until_parked();
10388
10389 uncommitted_diff.update(cx, |uncommitted_diff, _cx| {
10390 let base_text = uncommitted_diff.base_text_string().unwrap();
10391 assert_eq!(
10392 base_text, file_2_committed,
10393 "Uncommitted diff should compare against file_2 committed content"
10394 );
10395 });
10396}
10397
10398async fn search(
10399 project: &Entity<Project>,
10400 query: SearchQuery,
10401 cx: &mut gpui::TestAppContext,
10402) -> Result<HashMap<String, Vec<Range<usize>>>> {
10403 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10404 let mut results = HashMap::default();
10405 while let Ok(search_result) = search_rx.recv().await {
10406 match search_result {
10407 SearchResult::Buffer { buffer, ranges } => {
10408 results.entry(buffer).or_insert(ranges);
10409 }
10410 SearchResult::LimitReached => {}
10411 }
10412 }
10413 Ok(results
10414 .into_iter()
10415 .map(|(buffer, ranges)| {
10416 buffer.update(cx, |buffer, cx| {
10417 let path = buffer
10418 .file()
10419 .unwrap()
10420 .full_path(cx)
10421 .to_string_lossy()
10422 .to_string();
10423 let ranges = ranges
10424 .into_iter()
10425 .map(|range| range.to_offset(buffer))
10426 .collect::<Vec<_>>();
10427 (path, ranges)
10428 })
10429 })
10430 .collect())
10431}
10432
10433pub fn init_test(cx: &mut gpui::TestAppContext) {
10434 zlog::init_test();
10435
10436 cx.update(|cx| {
10437 let settings_store = SettingsStore::test(cx);
10438 cx.set_global(settings_store);
10439 release_channel::init(semver::Version::new(0, 0, 0), cx);
10440 });
10441}
10442
10443fn json_lang() -> Arc<Language> {
10444 Arc::new(Language::new(
10445 LanguageConfig {
10446 name: "JSON".into(),
10447 matcher: LanguageMatcher {
10448 path_suffixes: vec!["json".to_string()],
10449 ..Default::default()
10450 },
10451 ..Default::default()
10452 },
10453 None,
10454 ))
10455}
10456
10457fn js_lang() -> Arc<Language> {
10458 Arc::new(Language::new(
10459 LanguageConfig {
10460 name: "JavaScript".into(),
10461 matcher: LanguageMatcher {
10462 path_suffixes: vec!["js".to_string()],
10463 ..Default::default()
10464 },
10465 ..Default::default()
10466 },
10467 None,
10468 ))
10469}
10470
10471fn rust_lang() -> Arc<Language> {
10472 Arc::new(Language::new(
10473 LanguageConfig {
10474 name: "Rust".into(),
10475 matcher: LanguageMatcher {
10476 path_suffixes: vec!["rs".to_string()],
10477 ..Default::default()
10478 },
10479 ..Default::default()
10480 },
10481 Some(tree_sitter_rust::LANGUAGE.into()),
10482 ))
10483}
10484
10485fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10486 struct PythonMootToolchainLister(Arc<FakeFs>);
10487 #[async_trait]
10488 impl ToolchainLister for PythonMootToolchainLister {
10489 async fn list(
10490 &self,
10491 worktree_root: PathBuf,
10492 subroot_relative_path: Arc<RelPath>,
10493 _: Option<HashMap<String, String>>,
10494 _: &dyn Fs,
10495 ) -> ToolchainList {
10496 // This lister will always return a path .venv directories within ancestors
10497 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10498 let mut toolchains = vec![];
10499 for ancestor in ancestors {
10500 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10501 if self.0.is_dir(&venv_path).await {
10502 toolchains.push(Toolchain {
10503 name: SharedString::new("Python Venv"),
10504 path: venv_path.to_string_lossy().into_owned().into(),
10505 language_name: LanguageName(SharedString::new_static("Python")),
10506 as_json: serde_json::Value::Null,
10507 })
10508 }
10509 }
10510 ToolchainList {
10511 toolchains,
10512 ..Default::default()
10513 }
10514 }
10515 async fn resolve(
10516 &self,
10517 _: PathBuf,
10518 _: Option<HashMap<String, String>>,
10519 _: &dyn Fs,
10520 ) -> anyhow::Result<Toolchain> {
10521 Err(anyhow::anyhow!("Not implemented"))
10522 }
10523 fn meta(&self) -> ToolchainMetadata {
10524 ToolchainMetadata {
10525 term: SharedString::new_static("Virtual Environment"),
10526 new_toolchain_placeholder: SharedString::new_static(
10527 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10528 ),
10529 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10530 }
10531 }
10532 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10533 vec![]
10534 }
10535 }
10536 Arc::new(
10537 Language::new(
10538 LanguageConfig {
10539 name: "Python".into(),
10540 matcher: LanguageMatcher {
10541 path_suffixes: vec!["py".to_string()],
10542 ..Default::default()
10543 },
10544 ..Default::default()
10545 },
10546 None, // We're not testing Python parsing with this language.
10547 )
10548 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10549 "pyproject.toml",
10550 ))))
10551 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10552 )
10553}
10554
10555fn typescript_lang() -> Arc<Language> {
10556 Arc::new(Language::new(
10557 LanguageConfig {
10558 name: "TypeScript".into(),
10559 matcher: LanguageMatcher {
10560 path_suffixes: vec!["ts".to_string()],
10561 ..Default::default()
10562 },
10563 ..Default::default()
10564 },
10565 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10566 ))
10567}
10568
10569fn tsx_lang() -> Arc<Language> {
10570 Arc::new(Language::new(
10571 LanguageConfig {
10572 name: "tsx".into(),
10573 matcher: LanguageMatcher {
10574 path_suffixes: vec!["tsx".to_string()],
10575 ..Default::default()
10576 },
10577 ..Default::default()
10578 },
10579 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10580 ))
10581}
10582
10583fn get_all_tasks(
10584 project: &Entity<Project>,
10585 task_contexts: Arc<TaskContexts>,
10586 cx: &mut App,
10587) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10588 let new_tasks = project.update(cx, |project, cx| {
10589 project.task_store.update(cx, |task_store, cx| {
10590 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10591 this.used_and_current_resolved_tasks(task_contexts, cx)
10592 })
10593 })
10594 });
10595
10596 cx.background_spawn(async move {
10597 let (mut old, new) = new_tasks.await;
10598 old.extend(new);
10599 old
10600 })
10601}
10602
10603#[track_caller]
10604fn assert_entry_git_state(
10605 tree: &Worktree,
10606 repository: &Repository,
10607 path: &str,
10608 index_status: Option<StatusCode>,
10609 is_ignored: bool,
10610) {
10611 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10612 let entry = tree
10613 .entry_for_path(&rel_path(path))
10614 .unwrap_or_else(|| panic!("entry {path} not found"));
10615 let status = repository
10616 .status_for_path(&repo_path(path))
10617 .map(|entry| entry.status);
10618 let expected = index_status.map(|index_status| {
10619 TrackedStatus {
10620 index_status,
10621 worktree_status: StatusCode::Unmodified,
10622 }
10623 .into()
10624 });
10625 assert_eq!(
10626 status, expected,
10627 "expected {path} to have git status: {expected:?}"
10628 );
10629 assert_eq!(
10630 entry.is_ignored, is_ignored,
10631 "expected {path} to have is_ignored: {is_ignored}"
10632 );
10633}
10634
10635#[track_caller]
10636fn git_init(path: &Path) -> git2::Repository {
10637 let mut init_opts = RepositoryInitOptions::new();
10638 init_opts.initial_head("main");
10639 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10640}
10641
10642#[track_caller]
10643fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10644 let path = path.as_ref();
10645 let mut index = repo.index().expect("Failed to get index");
10646 index.add_path(path).expect("Failed to add file");
10647 index.write().expect("Failed to write index");
10648}
10649
10650#[track_caller]
10651fn git_remove_index(path: &Path, repo: &git2::Repository) {
10652 let mut index = repo.index().expect("Failed to get index");
10653 index.remove_path(path).expect("Failed to add file");
10654 index.write().expect("Failed to write index");
10655}
10656
10657#[track_caller]
10658fn git_commit(msg: &'static str, repo: &git2::Repository) {
10659 use git2::Signature;
10660
10661 let signature = Signature::now("test", "test@zed.dev").unwrap();
10662 let oid = repo.index().unwrap().write_tree().unwrap();
10663 let tree = repo.find_tree(oid).unwrap();
10664 if let Ok(head) = repo.head() {
10665 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10666
10667 let parent_commit = parent_obj.as_commit().unwrap();
10668
10669 repo.commit(
10670 Some("HEAD"),
10671 &signature,
10672 &signature,
10673 msg,
10674 &tree,
10675 &[parent_commit],
10676 )
10677 .expect("Failed to commit with parent");
10678 } else {
10679 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10680 .expect("Failed to commit");
10681 }
10682}
10683
10684#[cfg(any())]
10685#[track_caller]
10686fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10687 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10688}
10689
10690#[track_caller]
10691fn git_stash(repo: &mut git2::Repository) {
10692 use git2::Signature;
10693
10694 let signature = Signature::now("test", "test@zed.dev").unwrap();
10695 repo.stash_save(&signature, "N/A", None)
10696 .expect("Failed to stash");
10697}
10698
10699#[track_caller]
10700fn git_reset(offset: usize, repo: &git2::Repository) {
10701 let head = repo.head().expect("Couldn't get repo head");
10702 let object = head.peel(git2::ObjectType::Commit).unwrap();
10703 let commit = object.as_commit().unwrap();
10704 let new_head = commit
10705 .parents()
10706 .inspect(|parnet| {
10707 parnet.message();
10708 })
10709 .nth(offset)
10710 .expect("Not enough history");
10711 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10712 .expect("Could not reset");
10713}
10714
10715#[cfg(any())]
10716#[track_caller]
10717fn git_branch(name: &str, repo: &git2::Repository) {
10718 let head = repo
10719 .head()
10720 .expect("Couldn't get repo head")
10721 .peel_to_commit()
10722 .expect("HEAD is not a commit");
10723 repo.branch(name, &head, false).expect("Failed to commit");
10724}
10725
10726#[cfg(any())]
10727#[track_caller]
10728fn git_checkout(name: &str, repo: &git2::Repository) {
10729 repo.set_head(name).expect("Failed to set head");
10730 repo.checkout_head(None).expect("Failed to check out head");
10731}
10732
10733#[cfg(any())]
10734#[track_caller]
10735fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10736 repo.statuses(None)
10737 .unwrap()
10738 .iter()
10739 .map(|status| (status.path().unwrap().to_string(), status.status()))
10740 .collect()
10741}
10742
10743#[gpui::test]
10744async fn test_find_project_path_abs(
10745 background_executor: BackgroundExecutor,
10746 cx: &mut gpui::TestAppContext,
10747) {
10748 // find_project_path should work with absolute paths
10749 init_test(cx);
10750
10751 let fs = FakeFs::new(background_executor);
10752 fs.insert_tree(
10753 path!("/root"),
10754 json!({
10755 "project1": {
10756 "file1.txt": "content1",
10757 "subdir": {
10758 "file2.txt": "content2"
10759 }
10760 },
10761 "project2": {
10762 "file3.txt": "content3"
10763 }
10764 }),
10765 )
10766 .await;
10767
10768 let project = Project::test(
10769 fs.clone(),
10770 [
10771 path!("/root/project1").as_ref(),
10772 path!("/root/project2").as_ref(),
10773 ],
10774 cx,
10775 )
10776 .await;
10777
10778 // Make sure the worktrees are fully initialized
10779 project
10780 .update(cx, |project, cx| project.git_scans_complete(cx))
10781 .await;
10782 cx.run_until_parked();
10783
10784 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10785 project.read_with(cx, |project, cx| {
10786 let worktrees: Vec<_> = project.worktrees(cx).collect();
10787 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10788 let id1 = worktrees[0].read(cx).id();
10789 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10790 let id2 = worktrees[1].read(cx).id();
10791 (abs_path1, id1, abs_path2, id2)
10792 });
10793
10794 project.update(cx, |project, cx| {
10795 let abs_path = project1_abs_path.join("file1.txt");
10796 let found_path = project.find_project_path(abs_path, cx).unwrap();
10797 assert_eq!(found_path.worktree_id, project1_id);
10798 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10799
10800 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10801 let found_path = project.find_project_path(abs_path, cx).unwrap();
10802 assert_eq!(found_path.worktree_id, project1_id);
10803 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10804
10805 let abs_path = project2_abs_path.join("file3.txt");
10806 let found_path = project.find_project_path(abs_path, cx).unwrap();
10807 assert_eq!(found_path.worktree_id, project2_id);
10808 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10809
10810 let abs_path = project1_abs_path.join("nonexistent.txt");
10811 let found_path = project.find_project_path(abs_path, cx);
10812 assert!(
10813 found_path.is_some(),
10814 "Should find project path for nonexistent file in worktree"
10815 );
10816
10817 // Test with an absolute path outside any worktree
10818 let abs_path = Path::new("/some/other/path");
10819 let found_path = project.find_project_path(abs_path, cx);
10820 assert!(
10821 found_path.is_none(),
10822 "Should not find project path for path outside any worktree"
10823 );
10824 });
10825}
10826
10827#[gpui::test]
10828async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10829 init_test(cx);
10830
10831 let fs = FakeFs::new(cx.executor());
10832 fs.insert_tree(
10833 path!("/root"),
10834 json!({
10835 "a": {
10836 ".git": {},
10837 "src": {
10838 "main.rs": "fn main() {}",
10839 }
10840 },
10841 "b": {
10842 ".git": {},
10843 "src": {
10844 "main.rs": "fn main() {}",
10845 },
10846 "script": {
10847 "run.sh": "#!/bin/bash"
10848 }
10849 }
10850 }),
10851 )
10852 .await;
10853
10854 let project = Project::test(
10855 fs.clone(),
10856 [
10857 path!("/root/a").as_ref(),
10858 path!("/root/b/script").as_ref(),
10859 path!("/root/b").as_ref(),
10860 ],
10861 cx,
10862 )
10863 .await;
10864 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10865 scan_complete.await;
10866
10867 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10868 assert_eq!(worktrees.len(), 3);
10869
10870 let worktree_id_by_abs_path = worktrees
10871 .into_iter()
10872 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10873 .collect::<HashMap<_, _>>();
10874 let worktree_id = worktree_id_by_abs_path
10875 .get(Path::new(path!("/root/b/script")))
10876 .unwrap();
10877
10878 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10879 assert_eq!(repos.len(), 2);
10880
10881 project.update(cx, |project, cx| {
10882 project.remove_worktree(*worktree_id, cx);
10883 });
10884 cx.run_until_parked();
10885
10886 let mut repo_paths = project
10887 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10888 .values()
10889 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10890 .collect::<Vec<_>>();
10891 repo_paths.sort();
10892
10893 pretty_assertions::assert_eq!(
10894 repo_paths,
10895 [
10896 Path::new(path!("/root/a")).into(),
10897 Path::new(path!("/root/b")).into(),
10898 ]
10899 );
10900
10901 let active_repo_path = project
10902 .read_with(cx, |p, cx| {
10903 p.active_repository(cx)
10904 .map(|r| r.read(cx).work_directory_abs_path.clone())
10905 })
10906 .unwrap();
10907 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10908
10909 let worktree_id = worktree_id_by_abs_path
10910 .get(Path::new(path!("/root/a")))
10911 .unwrap();
10912 project.update(cx, |project, cx| {
10913 project.remove_worktree(*worktree_id, cx);
10914 });
10915 cx.run_until_parked();
10916
10917 let active_repo_path = project
10918 .read_with(cx, |p, cx| {
10919 p.active_repository(cx)
10920 .map(|r| r.read(cx).work_directory_abs_path.clone())
10921 })
10922 .unwrap();
10923 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10924
10925 let worktree_id = worktree_id_by_abs_path
10926 .get(Path::new(path!("/root/b")))
10927 .unwrap();
10928 project.update(cx, |project, cx| {
10929 project.remove_worktree(*worktree_id, cx);
10930 });
10931 cx.run_until_parked();
10932
10933 let active_repo_path = project.read_with(cx, |p, cx| {
10934 p.active_repository(cx)
10935 .map(|r| r.read(cx).work_directory_abs_path.clone())
10936 });
10937 assert!(active_repo_path.is_none());
10938}