1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let settings_json_contents = json!({
1216 "languages": {
1217 "Rust": {
1218 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1219 }
1220 },
1221 "lsp": {
1222 "my_fake_lsp": {
1223 "binary": {
1224 // file exists, so this is treated as a relative path
1225 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1226 }
1227 },
1228 "lsp_on_path": {
1229 "binary": {
1230 // file doesn't exist, so it will fall back on PATH env var
1231 "path": path!("lsp_on_path.exe").to_string(),
1232 }
1233 }
1234 },
1235 });
1236
1237 let fs = FakeFs::new(cx.executor());
1238 fs.insert_tree(
1239 path!("/the-root"),
1240 json!({
1241 ".zed": {
1242 "settings.json": settings_json_contents.to_string(),
1243 },
1244 ".relative_path": {
1245 "to": {
1246 "my_fake_lsp.exe": "",
1247 },
1248 },
1249 "src": {
1250 "main.rs": "",
1251 }
1252 }),
1253 )
1254 .await;
1255
1256 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1257 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1258 language_registry.add(rust_lang());
1259
1260 let mut my_fake_lsp = language_registry.register_fake_lsp(
1261 "Rust",
1262 FakeLspAdapter {
1263 name: "my_fake_lsp",
1264 ..Default::default()
1265 },
1266 );
1267 let mut lsp_on_path = language_registry.register_fake_lsp(
1268 "Rust",
1269 FakeLspAdapter {
1270 name: "lsp_on_path",
1271 ..Default::default()
1272 },
1273 );
1274
1275 cx.run_until_parked();
1276
1277 // Start the language server by opening a buffer with a compatible file extension.
1278 project
1279 .update(cx, |project, cx| {
1280 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284
1285 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1286 assert_eq!(
1287 lsp_path.to_string_lossy(),
1288 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1289 );
1290
1291 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1292 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1293}
1294
1295#[gpui::test]
1296async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1297 init_test(cx);
1298
1299 let settings_json_contents = json!({
1300 "languages": {
1301 "Rust": {
1302 "language_servers": ["tilde_lsp"]
1303 }
1304 },
1305 "lsp": {
1306 "tilde_lsp": {
1307 "binary": {
1308 "path": "~/.local/bin/rust-analyzer",
1309 }
1310 }
1311 },
1312 });
1313
1314 let fs = FakeFs::new(cx.executor());
1315 fs.insert_tree(
1316 path!("/root"),
1317 json!({
1318 ".zed": {
1319 "settings.json": settings_json_contents.to_string(),
1320 },
1321 "src": {
1322 "main.rs": "fn main() {}",
1323 }
1324 }),
1325 )
1326 .await;
1327
1328 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1329 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1330 language_registry.add(rust_lang());
1331
1332 let mut tilde_lsp = language_registry.register_fake_lsp(
1333 "Rust",
1334 FakeLspAdapter {
1335 name: "tilde_lsp",
1336 ..Default::default()
1337 },
1338 );
1339 cx.run_until_parked();
1340
1341 project
1342 .update(cx, |project, cx| {
1343 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1344 })
1345 .await
1346 .unwrap();
1347
1348 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1349 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1350 assert_eq!(
1351 lsp_path, expected_path,
1352 "Tilde path should expand to home directory"
1353 );
1354}
1355
1356#[gpui::test]
1357async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1358 init_test(cx);
1359
1360 let fs = FakeFs::new(cx.executor());
1361 fs.insert_tree(
1362 path!("/the-root"),
1363 json!({
1364 ".gitignore": "target\n",
1365 "Cargo.lock": "",
1366 "src": {
1367 "a.rs": "",
1368 "b.rs": "",
1369 },
1370 "target": {
1371 "x": {
1372 "out": {
1373 "x.rs": ""
1374 }
1375 },
1376 "y": {
1377 "out": {
1378 "y.rs": "",
1379 }
1380 },
1381 "z": {
1382 "out": {
1383 "z.rs": ""
1384 }
1385 }
1386 }
1387 }),
1388 )
1389 .await;
1390 fs.insert_tree(
1391 path!("/the-registry"),
1392 json!({
1393 "dep1": {
1394 "src": {
1395 "dep1.rs": "",
1396 }
1397 },
1398 "dep2": {
1399 "src": {
1400 "dep2.rs": "",
1401 }
1402 },
1403 }),
1404 )
1405 .await;
1406 fs.insert_tree(
1407 path!("/the/stdlib"),
1408 json!({
1409 "LICENSE": "",
1410 "src": {
1411 "string.rs": "",
1412 }
1413 }),
1414 )
1415 .await;
1416
1417 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1418 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1419 (project.languages().clone(), project.lsp_store())
1420 });
1421 language_registry.add(rust_lang());
1422 let mut fake_servers = language_registry.register_fake_lsp(
1423 "Rust",
1424 FakeLspAdapter {
1425 name: "the-language-server",
1426 ..Default::default()
1427 },
1428 );
1429
1430 cx.executor().run_until_parked();
1431
1432 // Start the language server by opening a buffer with a compatible file extension.
1433 project
1434 .update(cx, |project, cx| {
1435 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1436 })
1437 .await
1438 .unwrap();
1439
1440 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1441 project.update(cx, |project, cx| {
1442 let worktree = project.worktrees(cx).next().unwrap();
1443 assert_eq!(
1444 worktree
1445 .read(cx)
1446 .snapshot()
1447 .entries(true, 0)
1448 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1449 .collect::<Vec<_>>(),
1450 &[
1451 ("", false),
1452 (".gitignore", false),
1453 ("Cargo.lock", false),
1454 ("src", false),
1455 ("src/a.rs", false),
1456 ("src/b.rs", false),
1457 ("target", true),
1458 ]
1459 );
1460 });
1461
1462 let prev_read_dir_count = fs.read_dir_call_count();
1463
1464 let fake_server = fake_servers.next().await.unwrap();
1465 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1466 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1467 id
1468 });
1469
1470 // Simulate jumping to a definition in a dependency outside of the worktree.
1471 let _out_of_worktree_buffer = project
1472 .update(cx, |project, cx| {
1473 project.open_local_buffer_via_lsp(
1474 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1475 server_id,
1476 cx,
1477 )
1478 })
1479 .await
1480 .unwrap();
1481
1482 // Keep track of the FS events reported to the language server.
1483 let file_changes = Arc::new(Mutex::new(Vec::new()));
1484 fake_server
1485 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1486 registrations: vec![lsp::Registration {
1487 id: Default::default(),
1488 method: "workspace/didChangeWatchedFiles".to_string(),
1489 register_options: serde_json::to_value(
1490 lsp::DidChangeWatchedFilesRegistrationOptions {
1491 watchers: vec![
1492 lsp::FileSystemWatcher {
1493 glob_pattern: lsp::GlobPattern::String(
1494 path!("/the-root/Cargo.toml").to_string(),
1495 ),
1496 kind: None,
1497 },
1498 lsp::FileSystemWatcher {
1499 glob_pattern: lsp::GlobPattern::String(
1500 path!("/the-root/src/*.{rs,c}").to_string(),
1501 ),
1502 kind: None,
1503 },
1504 lsp::FileSystemWatcher {
1505 glob_pattern: lsp::GlobPattern::String(
1506 path!("/the-root/target/y/**/*.rs").to_string(),
1507 ),
1508 kind: None,
1509 },
1510 lsp::FileSystemWatcher {
1511 glob_pattern: lsp::GlobPattern::String(
1512 path!("/the/stdlib/src/**/*.rs").to_string(),
1513 ),
1514 kind: None,
1515 },
1516 lsp::FileSystemWatcher {
1517 glob_pattern: lsp::GlobPattern::String(
1518 path!("**/Cargo.lock").to_string(),
1519 ),
1520 kind: None,
1521 },
1522 ],
1523 },
1524 )
1525 .ok(),
1526 }],
1527 })
1528 .await
1529 .into_response()
1530 .unwrap();
1531 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1532 let file_changes = file_changes.clone();
1533 move |params, _| {
1534 let mut file_changes = file_changes.lock();
1535 file_changes.extend(params.changes);
1536 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1537 }
1538 });
1539
1540 cx.executor().run_until_parked();
1541 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1542 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1543
1544 let mut new_watched_paths = fs.watched_paths();
1545 new_watched_paths.retain(|path| {
1546 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1547 });
1548 assert_eq!(
1549 &new_watched_paths,
1550 &[
1551 Path::new(path!("/the-root")),
1552 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1553 Path::new(path!("/the/stdlib/src"))
1554 ]
1555 );
1556
1557 // Now the language server has asked us to watch an ignored directory path,
1558 // so we recursively load it.
1559 project.update(cx, |project, cx| {
1560 let worktree = project.visible_worktrees(cx).next().unwrap();
1561 assert_eq!(
1562 worktree
1563 .read(cx)
1564 .snapshot()
1565 .entries(true, 0)
1566 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1567 .collect::<Vec<_>>(),
1568 &[
1569 ("", false),
1570 (".gitignore", false),
1571 ("Cargo.lock", false),
1572 ("src", false),
1573 ("src/a.rs", false),
1574 ("src/b.rs", false),
1575 ("target", true),
1576 ("target/x", true),
1577 ("target/y", true),
1578 ("target/y/out", true),
1579 ("target/y/out/y.rs", true),
1580 ("target/z", true),
1581 ]
1582 );
1583 });
1584
1585 // Perform some file system mutations, two of which match the watched patterns,
1586 // and one of which does not.
1587 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1588 .await
1589 .unwrap();
1590 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1591 .await
1592 .unwrap();
1593 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1594 .await
1595 .unwrap();
1596 fs.create_file(
1597 path!("/the-root/target/x/out/x2.rs").as_ref(),
1598 Default::default(),
1599 )
1600 .await
1601 .unwrap();
1602 fs.create_file(
1603 path!("/the-root/target/y/out/y2.rs").as_ref(),
1604 Default::default(),
1605 )
1606 .await
1607 .unwrap();
1608 fs.save(
1609 path!("/the-root/Cargo.lock").as_ref(),
1610 &"".into(),
1611 Default::default(),
1612 )
1613 .await
1614 .unwrap();
1615 fs.save(
1616 path!("/the-stdlib/LICENSE").as_ref(),
1617 &"".into(),
1618 Default::default(),
1619 )
1620 .await
1621 .unwrap();
1622 fs.save(
1623 path!("/the/stdlib/src/string.rs").as_ref(),
1624 &"".into(),
1625 Default::default(),
1626 )
1627 .await
1628 .unwrap();
1629
1630 // The language server receives events for the FS mutations that match its watch patterns.
1631 cx.executor().run_until_parked();
1632 assert_eq!(
1633 &*file_changes.lock(),
1634 &[
1635 lsp::FileEvent {
1636 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1637 typ: lsp::FileChangeType::CHANGED,
1638 },
1639 lsp::FileEvent {
1640 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1641 typ: lsp::FileChangeType::DELETED,
1642 },
1643 lsp::FileEvent {
1644 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1645 typ: lsp::FileChangeType::CREATED,
1646 },
1647 lsp::FileEvent {
1648 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1649 typ: lsp::FileChangeType::CREATED,
1650 },
1651 lsp::FileEvent {
1652 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1653 typ: lsp::FileChangeType::CHANGED,
1654 },
1655 ]
1656 );
1657}
1658
1659#[gpui::test]
1660async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1661 init_test(cx);
1662
1663 let fs = FakeFs::new(cx.executor());
1664 fs.insert_tree(
1665 path!("/dir"),
1666 json!({
1667 "a.rs": "let a = 1;",
1668 "b.rs": "let b = 2;"
1669 }),
1670 )
1671 .await;
1672
1673 let project = Project::test(
1674 fs,
1675 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1676 cx,
1677 )
1678 .await;
1679 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1680
1681 let buffer_a = project
1682 .update(cx, |project, cx| {
1683 project.open_local_buffer(path!("/dir/a.rs"), cx)
1684 })
1685 .await
1686 .unwrap();
1687 let buffer_b = project
1688 .update(cx, |project, cx| {
1689 project.open_local_buffer(path!("/dir/b.rs"), cx)
1690 })
1691 .await
1692 .unwrap();
1693
1694 lsp_store.update(cx, |lsp_store, cx| {
1695 lsp_store
1696 .update_diagnostics(
1697 LanguageServerId(0),
1698 lsp::PublishDiagnosticsParams {
1699 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1700 version: None,
1701 diagnostics: vec![lsp::Diagnostic {
1702 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1703 severity: Some(lsp::DiagnosticSeverity::ERROR),
1704 message: "error 1".to_string(),
1705 ..Default::default()
1706 }],
1707 },
1708 None,
1709 DiagnosticSourceKind::Pushed,
1710 &[],
1711 cx,
1712 )
1713 .unwrap();
1714 lsp_store
1715 .update_diagnostics(
1716 LanguageServerId(0),
1717 lsp::PublishDiagnosticsParams {
1718 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1719 version: None,
1720 diagnostics: vec![lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1722 severity: Some(DiagnosticSeverity::WARNING),
1723 message: "error 2".to_string(),
1724 ..Default::default()
1725 }],
1726 },
1727 None,
1728 DiagnosticSourceKind::Pushed,
1729 &[],
1730 cx,
1731 )
1732 .unwrap();
1733 });
1734
1735 buffer_a.update(cx, |buffer, _| {
1736 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1737 assert_eq!(
1738 chunks
1739 .iter()
1740 .map(|(s, d)| (s.as_str(), *d))
1741 .collect::<Vec<_>>(),
1742 &[
1743 ("let ", None),
1744 ("a", Some(DiagnosticSeverity::ERROR)),
1745 (" = 1;", None),
1746 ]
1747 );
1748 });
1749 buffer_b.update(cx, |buffer, _| {
1750 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1751 assert_eq!(
1752 chunks
1753 .iter()
1754 .map(|(s, d)| (s.as_str(), *d))
1755 .collect::<Vec<_>>(),
1756 &[
1757 ("let ", None),
1758 ("b", Some(DiagnosticSeverity::WARNING)),
1759 (" = 2;", None),
1760 ]
1761 );
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(
1771 path!("/root"),
1772 json!({
1773 "dir": {
1774 ".git": {
1775 "HEAD": "ref: refs/heads/main",
1776 },
1777 ".gitignore": "b.rs",
1778 "a.rs": "let a = 1;",
1779 "b.rs": "let b = 2;",
1780 },
1781 "other.rs": "let b = c;"
1782 }),
1783 )
1784 .await;
1785
1786 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1787 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1788 let (worktree, _) = project
1789 .update(cx, |project, cx| {
1790 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1791 })
1792 .await
1793 .unwrap();
1794 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1795
1796 let (worktree, _) = project
1797 .update(cx, |project, cx| {
1798 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1799 })
1800 .await
1801 .unwrap();
1802 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1803
1804 let server_id = LanguageServerId(0);
1805 lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store
1807 .update_diagnostics(
1808 server_id,
1809 lsp::PublishDiagnosticsParams {
1810 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1811 version: None,
1812 diagnostics: vec![lsp::Diagnostic {
1813 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1814 severity: Some(lsp::DiagnosticSeverity::ERROR),
1815 message: "unused variable 'b'".to_string(),
1816 ..Default::default()
1817 }],
1818 },
1819 None,
1820 DiagnosticSourceKind::Pushed,
1821 &[],
1822 cx,
1823 )
1824 .unwrap();
1825 lsp_store
1826 .update_diagnostics(
1827 server_id,
1828 lsp::PublishDiagnosticsParams {
1829 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1830 version: None,
1831 diagnostics: vec![lsp::Diagnostic {
1832 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1833 severity: Some(lsp::DiagnosticSeverity::ERROR),
1834 message: "unknown variable 'c'".to_string(),
1835 ..Default::default()
1836 }],
1837 },
1838 None,
1839 DiagnosticSourceKind::Pushed,
1840 &[],
1841 cx,
1842 )
1843 .unwrap();
1844 });
1845
1846 let main_ignored_buffer = project
1847 .update(cx, |project, cx| {
1848 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1849 })
1850 .await
1851 .unwrap();
1852 main_ignored_buffer.update(cx, |buffer, _| {
1853 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1854 assert_eq!(
1855 chunks
1856 .iter()
1857 .map(|(s, d)| (s.as_str(), *d))
1858 .collect::<Vec<_>>(),
1859 &[
1860 ("let ", None),
1861 ("b", Some(DiagnosticSeverity::ERROR)),
1862 (" = 2;", None),
1863 ],
1864 "Gigitnored buffers should still get in-buffer diagnostics",
1865 );
1866 });
1867 let other_buffer = project
1868 .update(cx, |project, cx| {
1869 project.open_buffer((other_worktree_id, rel_path("")), cx)
1870 })
1871 .await
1872 .unwrap();
1873 other_buffer.update(cx, |buffer, _| {
1874 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1875 assert_eq!(
1876 chunks
1877 .iter()
1878 .map(|(s, d)| (s.as_str(), *d))
1879 .collect::<Vec<_>>(),
1880 &[
1881 ("let b = ", None),
1882 ("c", Some(DiagnosticSeverity::ERROR)),
1883 (";", None),
1884 ],
1885 "Buffers from hidden projects should still get in-buffer diagnostics"
1886 );
1887 });
1888
1889 project.update(cx, |project, cx| {
1890 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1891 assert_eq!(
1892 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1893 vec![(
1894 ProjectPath {
1895 worktree_id: main_worktree_id,
1896 path: rel_path("b.rs").into(),
1897 },
1898 server_id,
1899 DiagnosticSummary {
1900 error_count: 1,
1901 warning_count: 0,
1902 }
1903 )]
1904 );
1905 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1906 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1907 });
1908}
1909
1910#[gpui::test]
1911async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1912 init_test(cx);
1913
1914 let progress_token = "the-progress-token";
1915
1916 let fs = FakeFs::new(cx.executor());
1917 fs.insert_tree(
1918 path!("/dir"),
1919 json!({
1920 "a.rs": "fn a() { A }",
1921 "b.rs": "const y: i32 = 1",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1928
1929 language_registry.add(rust_lang());
1930 let mut fake_servers = language_registry.register_fake_lsp(
1931 "Rust",
1932 FakeLspAdapter {
1933 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1934 disk_based_diagnostics_sources: vec!["disk".into()],
1935 ..Default::default()
1936 },
1937 );
1938
1939 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1940
1941 // Cause worktree to start the fake language server
1942 let _ = project
1943 .update(cx, |project, cx| {
1944 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1945 })
1946 .await
1947 .unwrap();
1948
1949 let mut events = cx.events(&project);
1950
1951 let fake_server = fake_servers.next().await.unwrap();
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(0),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 ),
1959 );
1960
1961 fake_server
1962 .start_progress(format!("{}/0", progress_token))
1963 .await;
1964 assert_eq!(
1965 events.next().await.unwrap(),
1966 Event::DiskBasedDiagnosticsStarted {
1967 language_server_id: LanguageServerId(0),
1968 }
1969 );
1970
1971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1973 version: None,
1974 diagnostics: vec![lsp::Diagnostic {
1975 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1976 severity: Some(lsp::DiagnosticSeverity::ERROR),
1977 message: "undefined variable 'A'".to_string(),
1978 ..Default::default()
1979 }],
1980 });
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiagnosticsUpdated {
1984 language_server_id: LanguageServerId(0),
1985 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1986 }
1987 );
1988
1989 fake_server.end_progress(format!("{}/0", progress_token));
1990 assert_eq!(
1991 events.next().await.unwrap(),
1992 Event::DiskBasedDiagnosticsFinished {
1993 language_server_id: LanguageServerId(0)
1994 }
1995 );
1996
1997 let buffer = project
1998 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1999 .await
2000 .unwrap();
2001
2002 buffer.update(cx, |buffer, _| {
2003 let snapshot = buffer.snapshot();
2004 let diagnostics = snapshot
2005 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2006 .collect::<Vec<_>>();
2007 assert_eq!(
2008 diagnostics,
2009 &[DiagnosticEntryRef {
2010 range: Point::new(0, 9)..Point::new(0, 10),
2011 diagnostic: &Diagnostic {
2012 severity: lsp::DiagnosticSeverity::ERROR,
2013 message: "undefined variable 'A'".to_string(),
2014 group_id: 0,
2015 is_primary: true,
2016 source_kind: DiagnosticSourceKind::Pushed,
2017 ..Diagnostic::default()
2018 }
2019 }]
2020 )
2021 });
2022
2023 // Ensure publishing empty diagnostics twice only results in one update event.
2024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2025 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2026 version: None,
2027 diagnostics: Default::default(),
2028 });
2029 assert_eq!(
2030 events.next().await.unwrap(),
2031 Event::DiagnosticsUpdated {
2032 language_server_id: LanguageServerId(0),
2033 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2034 }
2035 );
2036
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: Default::default(),
2041 });
2042 cx.executor().run_until_parked();
2043 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2044}
2045
2046#[gpui::test]
2047async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2048 init_test(cx);
2049
2050 let progress_token = "the-progress-token";
2051
2052 let fs = FakeFs::new(cx.executor());
2053 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2054
2055 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2056
2057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2058 language_registry.add(rust_lang());
2059 let mut fake_servers = language_registry.register_fake_lsp(
2060 "Rust",
2061 FakeLspAdapter {
2062 name: "the-language-server",
2063 disk_based_diagnostics_sources: vec!["disk".into()],
2064 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2065 ..FakeLspAdapter::default()
2066 },
2067 );
2068
2069 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2070
2071 let (buffer, _handle) = project
2072 .update(cx, |project, cx| {
2073 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2074 })
2075 .await
2076 .unwrap();
2077 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2078 // Simulate diagnostics starting to update.
2079 let fake_server = fake_servers.next().await.unwrap();
2080 fake_server.start_progress(progress_token).await;
2081
2082 // Restart the server before the diagnostics finish updating.
2083 project.update(cx, |project, cx| {
2084 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2085 });
2086 let mut events = cx.events(&project);
2087
2088 // Simulate the newly started server sending more diagnostics.
2089 let fake_server = fake_servers.next().await.unwrap();
2090 assert_eq!(
2091 events.next().await.unwrap(),
2092 Event::LanguageServerRemoved(LanguageServerId(0))
2093 );
2094 assert_eq!(
2095 events.next().await.unwrap(),
2096 Event::LanguageServerAdded(
2097 LanguageServerId(1),
2098 fake_server.server.name(),
2099 Some(worktree_id)
2100 )
2101 );
2102 fake_server.start_progress(progress_token).await;
2103 assert_eq!(
2104 events.next().await.unwrap(),
2105 Event::LanguageServerBufferRegistered {
2106 server_id: LanguageServerId(1),
2107 buffer_id,
2108 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2109 name: Some(fake_server.server.name())
2110 }
2111 );
2112 assert_eq!(
2113 events.next().await.unwrap(),
2114 Event::DiskBasedDiagnosticsStarted {
2115 language_server_id: LanguageServerId(1)
2116 }
2117 );
2118 project.update(cx, |project, cx| {
2119 assert_eq!(
2120 project
2121 .language_servers_running_disk_based_diagnostics(cx)
2122 .collect::<Vec<_>>(),
2123 [LanguageServerId(1)]
2124 );
2125 });
2126
2127 // All diagnostics are considered done, despite the old server's diagnostic
2128 // task never completing.
2129 fake_server.end_progress(progress_token);
2130 assert_eq!(
2131 events.next().await.unwrap(),
2132 Event::DiskBasedDiagnosticsFinished {
2133 language_server_id: LanguageServerId(1)
2134 }
2135 );
2136 project.update(cx, |project, cx| {
2137 assert_eq!(
2138 project
2139 .language_servers_running_disk_based_diagnostics(cx)
2140 .collect::<Vec<_>>(),
2141 [] as [language::LanguageServerId; 0]
2142 );
2143 });
2144}
2145
2146#[gpui::test]
2147async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2148 init_test(cx);
2149
2150 let fs = FakeFs::new(cx.executor());
2151 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2152
2153 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2154
2155 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2156 language_registry.add(rust_lang());
2157 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2158
2159 let (buffer, _) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Publish diagnostics
2167 let fake_server = fake_servers.next().await.unwrap();
2168 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2169 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2170 version: None,
2171 diagnostics: vec![lsp::Diagnostic {
2172 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2173 severity: Some(lsp::DiagnosticSeverity::ERROR),
2174 message: "the message".to_string(),
2175 ..Default::default()
2176 }],
2177 });
2178
2179 cx.executor().run_until_parked();
2180 buffer.update(cx, |buffer, _| {
2181 assert_eq!(
2182 buffer
2183 .snapshot()
2184 .diagnostics_in_range::<_, usize>(0..1, false)
2185 .map(|entry| entry.diagnostic.message.clone())
2186 .collect::<Vec<_>>(),
2187 ["the message".to_string()]
2188 );
2189 });
2190 project.update(cx, |project, cx| {
2191 assert_eq!(
2192 project.diagnostic_summary(false, cx),
2193 DiagnosticSummary {
2194 error_count: 1,
2195 warning_count: 0,
2196 }
2197 );
2198 });
2199
2200 project.update(cx, |project, cx| {
2201 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2202 });
2203
2204 // The diagnostics are cleared.
2205 cx.executor().run_until_parked();
2206 buffer.update(cx, |buffer, _| {
2207 assert_eq!(
2208 buffer
2209 .snapshot()
2210 .diagnostics_in_range::<_, usize>(0..1, false)
2211 .map(|entry| entry.diagnostic.message.clone())
2212 .collect::<Vec<_>>(),
2213 Vec::<String>::new(),
2214 );
2215 });
2216 project.update(cx, |project, cx| {
2217 assert_eq!(
2218 project.diagnostic_summary(false, cx),
2219 DiagnosticSummary {
2220 error_count: 0,
2221 warning_count: 0,
2222 }
2223 );
2224 });
2225}
2226
2227#[gpui::test]
2228async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2229 init_test(cx);
2230
2231 let fs = FakeFs::new(cx.executor());
2232 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2233
2234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2235 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2236
2237 language_registry.add(rust_lang());
2238 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2239
2240 let (buffer, _handle) = project
2241 .update(cx, |project, cx| {
2242 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2243 })
2244 .await
2245 .unwrap();
2246
2247 // Before restarting the server, report diagnostics with an unknown buffer version.
2248 let fake_server = fake_servers.next().await.unwrap();
2249 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2250 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2251 version: Some(10000),
2252 diagnostics: Vec::new(),
2253 });
2254 cx.executor().run_until_parked();
2255 project.update(cx, |project, cx| {
2256 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2257 });
2258
2259 let mut fake_server = fake_servers.next().await.unwrap();
2260 let notification = fake_server
2261 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2262 .await
2263 .text_document;
2264 assert_eq!(notification.version, 0);
2265}
2266
2267#[gpui::test]
2268async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2269 init_test(cx);
2270
2271 let progress_token = "the-progress-token";
2272
2273 let fs = FakeFs::new(cx.executor());
2274 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2275
2276 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2277
2278 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2279 language_registry.add(rust_lang());
2280 let mut fake_servers = language_registry.register_fake_lsp(
2281 "Rust",
2282 FakeLspAdapter {
2283 name: "the-language-server",
2284 disk_based_diagnostics_sources: vec!["disk".into()],
2285 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2286 ..Default::default()
2287 },
2288 );
2289
2290 let (buffer, _handle) = project
2291 .update(cx, |project, cx| {
2292 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2293 })
2294 .await
2295 .unwrap();
2296
2297 // Simulate diagnostics starting to update.
2298 let mut fake_server = fake_servers.next().await.unwrap();
2299 fake_server
2300 .start_progress_with(
2301 "another-token",
2302 lsp::WorkDoneProgressBegin {
2303 cancellable: Some(false),
2304 ..Default::default()
2305 },
2306 )
2307 .await;
2308 fake_server
2309 .start_progress_with(
2310 progress_token,
2311 lsp::WorkDoneProgressBegin {
2312 cancellable: Some(true),
2313 ..Default::default()
2314 },
2315 )
2316 .await;
2317 cx.executor().run_until_parked();
2318
2319 project.update(cx, |project, cx| {
2320 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2321 });
2322
2323 let cancel_notification = fake_server
2324 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2325 .await;
2326 assert_eq!(
2327 cancel_notification.token,
2328 NumberOrString::String(progress_token.into())
2329 );
2330}
2331
2332#[gpui::test]
2333async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2334 init_test(cx);
2335
2336 let fs = FakeFs::new(cx.executor());
2337 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2338 .await;
2339
2340 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2341 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2342
2343 let mut fake_rust_servers = language_registry.register_fake_lsp(
2344 "Rust",
2345 FakeLspAdapter {
2346 name: "rust-lsp",
2347 ..Default::default()
2348 },
2349 );
2350 let mut fake_js_servers = language_registry.register_fake_lsp(
2351 "JavaScript",
2352 FakeLspAdapter {
2353 name: "js-lsp",
2354 ..Default::default()
2355 },
2356 );
2357 language_registry.add(rust_lang());
2358 language_registry.add(js_lang());
2359
2360 let _rs_buffer = project
2361 .update(cx, |project, cx| {
2362 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2363 })
2364 .await
2365 .unwrap();
2366 let _js_buffer = project
2367 .update(cx, |project, cx| {
2368 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2369 })
2370 .await
2371 .unwrap();
2372
2373 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2374 assert_eq!(
2375 fake_rust_server_1
2376 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2377 .await
2378 .text_document
2379 .uri
2380 .as_str(),
2381 uri!("file:///dir/a.rs")
2382 );
2383
2384 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2385 assert_eq!(
2386 fake_js_server
2387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2388 .await
2389 .text_document
2390 .uri
2391 .as_str(),
2392 uri!("file:///dir/b.js")
2393 );
2394
2395 // Disable Rust language server, ensuring only that server gets stopped.
2396 cx.update(|cx| {
2397 SettingsStore::update_global(cx, |settings, cx| {
2398 settings.update_user_settings(cx, |settings| {
2399 settings.languages_mut().insert(
2400 "Rust".into(),
2401 LanguageSettingsContent {
2402 enable_language_server: Some(false),
2403 ..Default::default()
2404 },
2405 );
2406 });
2407 })
2408 });
2409 fake_rust_server_1
2410 .receive_notification::<lsp::notification::Exit>()
2411 .await;
2412
2413 // Enable Rust and disable JavaScript language servers, ensuring that the
2414 // former gets started again and that the latter stops.
2415 cx.update(|cx| {
2416 SettingsStore::update_global(cx, |settings, cx| {
2417 settings.update_user_settings(cx, |settings| {
2418 settings.languages_mut().insert(
2419 "Rust".into(),
2420 LanguageSettingsContent {
2421 enable_language_server: Some(true),
2422 ..Default::default()
2423 },
2424 );
2425 settings.languages_mut().insert(
2426 "JavaScript".into(),
2427 LanguageSettingsContent {
2428 enable_language_server: Some(false),
2429 ..Default::default()
2430 },
2431 );
2432 });
2433 })
2434 });
2435 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2436 assert_eq!(
2437 fake_rust_server_2
2438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2439 .await
2440 .text_document
2441 .uri
2442 .as_str(),
2443 uri!("file:///dir/a.rs")
2444 );
2445 fake_js_server
2446 .receive_notification::<lsp::notification::Exit>()
2447 .await;
2448}
2449
2450#[gpui::test(iterations = 3)]
2451async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2452 init_test(cx);
2453
2454 let text = "
2455 fn a() { A }
2456 fn b() { BB }
2457 fn c() { CCC }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2463
2464 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2465 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2466
2467 language_registry.add(rust_lang());
2468 let mut fake_servers = language_registry.register_fake_lsp(
2469 "Rust",
2470 FakeLspAdapter {
2471 disk_based_diagnostics_sources: vec!["disk".into()],
2472 ..Default::default()
2473 },
2474 );
2475
2476 let buffer = project
2477 .update(cx, |project, cx| {
2478 project.open_local_buffer(path!("/dir/a.rs"), cx)
2479 })
2480 .await
2481 .unwrap();
2482
2483 let _handle = project.update(cx, |project, cx| {
2484 project.register_buffer_with_language_servers(&buffer, cx)
2485 });
2486
2487 let mut fake_server = fake_servers.next().await.unwrap();
2488 let open_notification = fake_server
2489 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2490 .await;
2491
2492 // Edit the buffer, moving the content down
2493 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2494 let change_notification_1 = fake_server
2495 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2496 .await;
2497 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2498
2499 // Report some diagnostics for the initial version of the buffer
2500 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2501 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2502 version: Some(open_notification.text_document.version),
2503 diagnostics: vec![
2504 lsp::Diagnostic {
2505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2506 severity: Some(DiagnosticSeverity::ERROR),
2507 message: "undefined variable 'A'".to_string(),
2508 source: Some("disk".to_string()),
2509 ..Default::default()
2510 },
2511 lsp::Diagnostic {
2512 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2513 severity: Some(DiagnosticSeverity::ERROR),
2514 message: "undefined variable 'BB'".to_string(),
2515 source: Some("disk".to_string()),
2516 ..Default::default()
2517 },
2518 lsp::Diagnostic {
2519 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2520 severity: Some(DiagnosticSeverity::ERROR),
2521 source: Some("disk".to_string()),
2522 message: "undefined variable 'CCC'".to_string(),
2523 ..Default::default()
2524 },
2525 ],
2526 });
2527
2528 // The diagnostics have moved down since they were created.
2529 cx.executor().run_until_parked();
2530 buffer.update(cx, |buffer, _| {
2531 assert_eq!(
2532 buffer
2533 .snapshot()
2534 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2535 .collect::<Vec<_>>(),
2536 &[
2537 DiagnosticEntry {
2538 range: Point::new(3, 9)..Point::new(3, 11),
2539 diagnostic: Diagnostic {
2540 source: Some("disk".into()),
2541 severity: DiagnosticSeverity::ERROR,
2542 message: "undefined variable 'BB'".to_string(),
2543 is_disk_based: true,
2544 group_id: 1,
2545 is_primary: true,
2546 source_kind: DiagnosticSourceKind::Pushed,
2547 ..Diagnostic::default()
2548 },
2549 },
2550 DiagnosticEntry {
2551 range: Point::new(4, 9)..Point::new(4, 12),
2552 diagnostic: Diagnostic {
2553 source: Some("disk".into()),
2554 severity: DiagnosticSeverity::ERROR,
2555 message: "undefined variable 'CCC'".to_string(),
2556 is_disk_based: true,
2557 group_id: 2,
2558 is_primary: true,
2559 source_kind: DiagnosticSourceKind::Pushed,
2560 ..Diagnostic::default()
2561 }
2562 }
2563 ]
2564 );
2565 assert_eq!(
2566 chunks_with_diagnostics(buffer, 0..buffer.len()),
2567 [
2568 ("\n\nfn a() { ".to_string(), None),
2569 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2570 (" }\nfn b() { ".to_string(), None),
2571 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2572 (" }\nfn c() { ".to_string(), None),
2573 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2574 (" }\n".to_string(), None),
2575 ]
2576 );
2577 assert_eq!(
2578 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2579 [
2580 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2581 (" }\nfn c() { ".to_string(), None),
2582 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2583 ]
2584 );
2585 });
2586
2587 // Ensure overlapping diagnostics are highlighted correctly.
2588 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2589 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2590 version: Some(open_notification.text_document.version),
2591 diagnostics: vec![
2592 lsp::Diagnostic {
2593 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2594 severity: Some(DiagnosticSeverity::ERROR),
2595 message: "undefined variable 'A'".to_string(),
2596 source: Some("disk".to_string()),
2597 ..Default::default()
2598 },
2599 lsp::Diagnostic {
2600 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2601 severity: Some(DiagnosticSeverity::WARNING),
2602 message: "unreachable statement".to_string(),
2603 source: Some("disk".to_string()),
2604 ..Default::default()
2605 },
2606 ],
2607 });
2608
2609 cx.executor().run_until_parked();
2610 buffer.update(cx, |buffer, _| {
2611 assert_eq!(
2612 buffer
2613 .snapshot()
2614 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2615 .collect::<Vec<_>>(),
2616 &[
2617 DiagnosticEntry {
2618 range: Point::new(2, 9)..Point::new(2, 12),
2619 diagnostic: Diagnostic {
2620 source: Some("disk".into()),
2621 severity: DiagnosticSeverity::WARNING,
2622 message: "unreachable statement".to_string(),
2623 is_disk_based: true,
2624 group_id: 4,
2625 is_primary: true,
2626 source_kind: DiagnosticSourceKind::Pushed,
2627 ..Diagnostic::default()
2628 }
2629 },
2630 DiagnosticEntry {
2631 range: Point::new(2, 9)..Point::new(2, 10),
2632 diagnostic: Diagnostic {
2633 source: Some("disk".into()),
2634 severity: DiagnosticSeverity::ERROR,
2635 message: "undefined variable 'A'".to_string(),
2636 is_disk_based: true,
2637 group_id: 3,
2638 is_primary: true,
2639 source_kind: DiagnosticSourceKind::Pushed,
2640 ..Diagnostic::default()
2641 },
2642 }
2643 ]
2644 );
2645 assert_eq!(
2646 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2647 [
2648 ("fn a() { ".to_string(), None),
2649 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2650 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2651 ("\n".to_string(), None),
2652 ]
2653 );
2654 assert_eq!(
2655 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2656 [
2657 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2658 ("\n".to_string(), None),
2659 ]
2660 );
2661 });
2662
2663 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2664 // changes since the last save.
2665 buffer.update(cx, |buffer, cx| {
2666 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2667 buffer.edit(
2668 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2669 None,
2670 cx,
2671 );
2672 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2673 });
2674 let change_notification_2 = fake_server
2675 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2676 .await;
2677 assert!(
2678 change_notification_2.text_document.version > change_notification_1.text_document.version
2679 );
2680
2681 // Handle out-of-order diagnostics
2682 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2683 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2684 version: Some(change_notification_2.text_document.version),
2685 diagnostics: vec![
2686 lsp::Diagnostic {
2687 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2688 severity: Some(DiagnosticSeverity::ERROR),
2689 message: "undefined variable 'BB'".to_string(),
2690 source: Some("disk".to_string()),
2691 ..Default::default()
2692 },
2693 lsp::Diagnostic {
2694 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2695 severity: Some(DiagnosticSeverity::WARNING),
2696 message: "undefined variable 'A'".to_string(),
2697 source: Some("disk".to_string()),
2698 ..Default::default()
2699 },
2700 ],
2701 });
2702
2703 cx.executor().run_until_parked();
2704 buffer.update(cx, |buffer, _| {
2705 assert_eq!(
2706 buffer
2707 .snapshot()
2708 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2709 .collect::<Vec<_>>(),
2710 &[
2711 DiagnosticEntry {
2712 range: Point::new(2, 21)..Point::new(2, 22),
2713 diagnostic: Diagnostic {
2714 source: Some("disk".into()),
2715 severity: DiagnosticSeverity::WARNING,
2716 message: "undefined variable 'A'".to_string(),
2717 is_disk_based: true,
2718 group_id: 6,
2719 is_primary: true,
2720 source_kind: DiagnosticSourceKind::Pushed,
2721 ..Diagnostic::default()
2722 }
2723 },
2724 DiagnosticEntry {
2725 range: Point::new(3, 9)..Point::new(3, 14),
2726 diagnostic: Diagnostic {
2727 source: Some("disk".into()),
2728 severity: DiagnosticSeverity::ERROR,
2729 message: "undefined variable 'BB'".to_string(),
2730 is_disk_based: true,
2731 group_id: 5,
2732 is_primary: true,
2733 source_kind: DiagnosticSourceKind::Pushed,
2734 ..Diagnostic::default()
2735 },
2736 }
2737 ]
2738 );
2739 });
2740}
2741
2742#[gpui::test]
2743async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2744 init_test(cx);
2745
2746 let text = concat!(
2747 "let one = ;\n", //
2748 "let two = \n",
2749 "let three = 3;\n",
2750 );
2751
2752 let fs = FakeFs::new(cx.executor());
2753 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2754
2755 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2756 let buffer = project
2757 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2758 .await
2759 .unwrap();
2760
2761 project.update(cx, |project, cx| {
2762 project.lsp_store.update(cx, |lsp_store, cx| {
2763 lsp_store
2764 .update_diagnostic_entries(
2765 LanguageServerId(0),
2766 PathBuf::from("/dir/a.rs"),
2767 None,
2768 None,
2769 vec![
2770 DiagnosticEntry {
2771 range: Unclipped(PointUtf16::new(0, 10))
2772 ..Unclipped(PointUtf16::new(0, 10)),
2773 diagnostic: Diagnostic {
2774 severity: DiagnosticSeverity::ERROR,
2775 message: "syntax error 1".to_string(),
2776 source_kind: DiagnosticSourceKind::Pushed,
2777 ..Diagnostic::default()
2778 },
2779 },
2780 DiagnosticEntry {
2781 range: Unclipped(PointUtf16::new(1, 10))
2782 ..Unclipped(PointUtf16::new(1, 10)),
2783 diagnostic: Diagnostic {
2784 severity: DiagnosticSeverity::ERROR,
2785 message: "syntax error 2".to_string(),
2786 source_kind: DiagnosticSourceKind::Pushed,
2787 ..Diagnostic::default()
2788 },
2789 },
2790 ],
2791 cx,
2792 )
2793 .unwrap();
2794 })
2795 });
2796
2797 // An empty range is extended forward to include the following character.
2798 // At the end of a line, an empty range is extended backward to include
2799 // the preceding character.
2800 buffer.update(cx, |buffer, _| {
2801 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2802 assert_eq!(
2803 chunks
2804 .iter()
2805 .map(|(s, d)| (s.as_str(), *d))
2806 .collect::<Vec<_>>(),
2807 &[
2808 ("let one = ", None),
2809 (";", Some(DiagnosticSeverity::ERROR)),
2810 ("\nlet two =", None),
2811 (" ", Some(DiagnosticSeverity::ERROR)),
2812 ("\nlet three = 3;\n", None)
2813 ]
2814 );
2815 });
2816}
2817
2818#[gpui::test]
2819async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2820 init_test(cx);
2821
2822 let fs = FakeFs::new(cx.executor());
2823 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2824 .await;
2825
2826 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2827 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2828
2829 lsp_store.update(cx, |lsp_store, cx| {
2830 lsp_store
2831 .update_diagnostic_entries(
2832 LanguageServerId(0),
2833 Path::new("/dir/a.rs").to_owned(),
2834 None,
2835 None,
2836 vec![DiagnosticEntry {
2837 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2838 diagnostic: Diagnostic {
2839 severity: DiagnosticSeverity::ERROR,
2840 is_primary: true,
2841 message: "syntax error a1".to_string(),
2842 source_kind: DiagnosticSourceKind::Pushed,
2843 ..Diagnostic::default()
2844 },
2845 }],
2846 cx,
2847 )
2848 .unwrap();
2849 lsp_store
2850 .update_diagnostic_entries(
2851 LanguageServerId(1),
2852 Path::new("/dir/a.rs").to_owned(),
2853 None,
2854 None,
2855 vec![DiagnosticEntry {
2856 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2857 diagnostic: Diagnostic {
2858 severity: DiagnosticSeverity::ERROR,
2859 is_primary: true,
2860 message: "syntax error b1".to_string(),
2861 source_kind: DiagnosticSourceKind::Pushed,
2862 ..Diagnostic::default()
2863 },
2864 }],
2865 cx,
2866 )
2867 .unwrap();
2868
2869 assert_eq!(
2870 lsp_store.diagnostic_summary(false, cx),
2871 DiagnosticSummary {
2872 error_count: 2,
2873 warning_count: 0,
2874 }
2875 );
2876 });
2877}
2878
2879#[gpui::test]
2880async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2881 init_test(cx);
2882
2883 let text = "
2884 fn a() {
2885 f1();
2886 }
2887 fn b() {
2888 f2();
2889 }
2890 fn c() {
2891 f3();
2892 }
2893 "
2894 .unindent();
2895
2896 let fs = FakeFs::new(cx.executor());
2897 fs.insert_tree(
2898 path!("/dir"),
2899 json!({
2900 "a.rs": text.clone(),
2901 }),
2902 )
2903 .await;
2904
2905 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2906 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2907
2908 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2909 language_registry.add(rust_lang());
2910 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2911
2912 let (buffer, _handle) = project
2913 .update(cx, |project, cx| {
2914 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2915 })
2916 .await
2917 .unwrap();
2918
2919 let mut fake_server = fake_servers.next().await.unwrap();
2920 let lsp_document_version = fake_server
2921 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2922 .await
2923 .text_document
2924 .version;
2925
2926 // Simulate editing the buffer after the language server computes some edits.
2927 buffer.update(cx, |buffer, cx| {
2928 buffer.edit(
2929 [(
2930 Point::new(0, 0)..Point::new(0, 0),
2931 "// above first function\n",
2932 )],
2933 None,
2934 cx,
2935 );
2936 buffer.edit(
2937 [(
2938 Point::new(2, 0)..Point::new(2, 0),
2939 " // inside first function\n",
2940 )],
2941 None,
2942 cx,
2943 );
2944 buffer.edit(
2945 [(
2946 Point::new(6, 4)..Point::new(6, 4),
2947 "// inside second function ",
2948 )],
2949 None,
2950 cx,
2951 );
2952
2953 assert_eq!(
2954 buffer.text(),
2955 "
2956 // above first function
2957 fn a() {
2958 // inside first function
2959 f1();
2960 }
2961 fn b() {
2962 // inside second function f2();
2963 }
2964 fn c() {
2965 f3();
2966 }
2967 "
2968 .unindent()
2969 );
2970 });
2971
2972 let edits = lsp_store
2973 .update(cx, |lsp_store, cx| {
2974 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2975 &buffer,
2976 vec![
2977 // replace body of first function
2978 lsp::TextEdit {
2979 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2980 new_text: "
2981 fn a() {
2982 f10();
2983 }
2984 "
2985 .unindent(),
2986 },
2987 // edit inside second function
2988 lsp::TextEdit {
2989 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2990 new_text: "00".into(),
2991 },
2992 // edit inside third function via two distinct edits
2993 lsp::TextEdit {
2994 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2995 new_text: "4000".into(),
2996 },
2997 lsp::TextEdit {
2998 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2999 new_text: "".into(),
3000 },
3001 ],
3002 LanguageServerId(0),
3003 Some(lsp_document_version),
3004 cx,
3005 )
3006 })
3007 .await
3008 .unwrap();
3009
3010 buffer.update(cx, |buffer, cx| {
3011 for (range, new_text) in edits {
3012 buffer.edit([(range, new_text)], None, cx);
3013 }
3014 assert_eq!(
3015 buffer.text(),
3016 "
3017 // above first function
3018 fn a() {
3019 // inside first function
3020 f10();
3021 }
3022 fn b() {
3023 // inside second function f200();
3024 }
3025 fn c() {
3026 f4000();
3027 }
3028 "
3029 .unindent()
3030 );
3031 });
3032}
3033
3034#[gpui::test]
3035async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3036 init_test(cx);
3037
3038 let text = "
3039 use a::b;
3040 use a::c;
3041
3042 fn f() {
3043 b();
3044 c();
3045 }
3046 "
3047 .unindent();
3048
3049 let fs = FakeFs::new(cx.executor());
3050 fs.insert_tree(
3051 path!("/dir"),
3052 json!({
3053 "a.rs": text.clone(),
3054 }),
3055 )
3056 .await;
3057
3058 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3059 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3060 let buffer = project
3061 .update(cx, |project, cx| {
3062 project.open_local_buffer(path!("/dir/a.rs"), cx)
3063 })
3064 .await
3065 .unwrap();
3066
3067 // Simulate the language server sending us a small edit in the form of a very large diff.
3068 // Rust-analyzer does this when performing a merge-imports code action.
3069 let edits = lsp_store
3070 .update(cx, |lsp_store, cx| {
3071 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3072 &buffer,
3073 [
3074 // Replace the first use statement without editing the semicolon.
3075 lsp::TextEdit {
3076 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3077 new_text: "a::{b, c}".into(),
3078 },
3079 // Reinsert the remainder of the file between the semicolon and the final
3080 // newline of the file.
3081 lsp::TextEdit {
3082 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3083 new_text: "\n\n".into(),
3084 },
3085 lsp::TextEdit {
3086 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3087 new_text: "
3088 fn f() {
3089 b();
3090 c();
3091 }"
3092 .unindent(),
3093 },
3094 // Delete everything after the first newline of the file.
3095 lsp::TextEdit {
3096 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3097 new_text: "".into(),
3098 },
3099 ],
3100 LanguageServerId(0),
3101 None,
3102 cx,
3103 )
3104 })
3105 .await
3106 .unwrap();
3107
3108 buffer.update(cx, |buffer, cx| {
3109 let edits = edits
3110 .into_iter()
3111 .map(|(range, text)| {
3112 (
3113 range.start.to_point(buffer)..range.end.to_point(buffer),
3114 text,
3115 )
3116 })
3117 .collect::<Vec<_>>();
3118
3119 assert_eq!(
3120 edits,
3121 [
3122 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3123 (Point::new(1, 0)..Point::new(2, 0), "".into())
3124 ]
3125 );
3126
3127 for (range, new_text) in edits {
3128 buffer.edit([(range, new_text)], None, cx);
3129 }
3130 assert_eq!(
3131 buffer.text(),
3132 "
3133 use a::{b, c};
3134
3135 fn f() {
3136 b();
3137 c();
3138 }
3139 "
3140 .unindent()
3141 );
3142 });
3143}
3144
3145#[gpui::test]
3146async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3147 cx: &mut gpui::TestAppContext,
3148) {
3149 init_test(cx);
3150
3151 let text = "Path()";
3152
3153 let fs = FakeFs::new(cx.executor());
3154 fs.insert_tree(
3155 path!("/dir"),
3156 json!({
3157 "a.rs": text
3158 }),
3159 )
3160 .await;
3161
3162 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3163 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3164 let buffer = project
3165 .update(cx, |project, cx| {
3166 project.open_local_buffer(path!("/dir/a.rs"), cx)
3167 })
3168 .await
3169 .unwrap();
3170
3171 // Simulate the language server sending us a pair of edits at the same location,
3172 // with an insertion following a replacement (which violates the LSP spec).
3173 let edits = lsp_store
3174 .update(cx, |lsp_store, cx| {
3175 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3176 &buffer,
3177 [
3178 lsp::TextEdit {
3179 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3180 new_text: "Path".into(),
3181 },
3182 lsp::TextEdit {
3183 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3184 new_text: "from path import Path\n\n\n".into(),
3185 },
3186 ],
3187 LanguageServerId(0),
3188 None,
3189 cx,
3190 )
3191 })
3192 .await
3193 .unwrap();
3194
3195 buffer.update(cx, |buffer, cx| {
3196 buffer.edit(edits, None, cx);
3197 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3198 });
3199}
3200
3201#[gpui::test]
3202async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3203 init_test(cx);
3204
3205 let text = "
3206 use a::b;
3207 use a::c;
3208
3209 fn f() {
3210 b();
3211 c();
3212 }
3213 "
3214 .unindent();
3215
3216 let fs = FakeFs::new(cx.executor());
3217 fs.insert_tree(
3218 path!("/dir"),
3219 json!({
3220 "a.rs": text.clone(),
3221 }),
3222 )
3223 .await;
3224
3225 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3226 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3227 let buffer = project
3228 .update(cx, |project, cx| {
3229 project.open_local_buffer(path!("/dir/a.rs"), cx)
3230 })
3231 .await
3232 .unwrap();
3233
3234 // Simulate the language server sending us edits in a non-ordered fashion,
3235 // with ranges sometimes being inverted or pointing to invalid locations.
3236 let edits = lsp_store
3237 .update(cx, |lsp_store, cx| {
3238 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3239 &buffer,
3240 [
3241 lsp::TextEdit {
3242 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3243 new_text: "\n\n".into(),
3244 },
3245 lsp::TextEdit {
3246 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3247 new_text: "a::{b, c}".into(),
3248 },
3249 lsp::TextEdit {
3250 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3251 new_text: "".into(),
3252 },
3253 lsp::TextEdit {
3254 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3255 new_text: "
3256 fn f() {
3257 b();
3258 c();
3259 }"
3260 .unindent(),
3261 },
3262 ],
3263 LanguageServerId(0),
3264 None,
3265 cx,
3266 )
3267 })
3268 .await
3269 .unwrap();
3270
3271 buffer.update(cx, |buffer, cx| {
3272 let edits = edits
3273 .into_iter()
3274 .map(|(range, text)| {
3275 (
3276 range.start.to_point(buffer)..range.end.to_point(buffer),
3277 text,
3278 )
3279 })
3280 .collect::<Vec<_>>();
3281
3282 assert_eq!(
3283 edits,
3284 [
3285 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3286 (Point::new(1, 0)..Point::new(2, 0), "".into())
3287 ]
3288 );
3289
3290 for (range, new_text) in edits {
3291 buffer.edit([(range, new_text)], None, cx);
3292 }
3293 assert_eq!(
3294 buffer.text(),
3295 "
3296 use a::{b, c};
3297
3298 fn f() {
3299 b();
3300 c();
3301 }
3302 "
3303 .unindent()
3304 );
3305 });
3306}
3307
3308fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3309 buffer: &Buffer,
3310 range: Range<T>,
3311) -> Vec<(String, Option<DiagnosticSeverity>)> {
3312 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3313 for chunk in buffer.snapshot().chunks(range, true) {
3314 if chunks
3315 .last()
3316 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3317 {
3318 chunks.last_mut().unwrap().0.push_str(chunk.text);
3319 } else {
3320 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3321 }
3322 }
3323 chunks
3324}
3325
3326#[gpui::test(iterations = 10)]
3327async fn test_definition(cx: &mut gpui::TestAppContext) {
3328 init_test(cx);
3329
3330 let fs = FakeFs::new(cx.executor());
3331 fs.insert_tree(
3332 path!("/dir"),
3333 json!({
3334 "a.rs": "const fn a() { A }",
3335 "b.rs": "const y: i32 = crate::a()",
3336 }),
3337 )
3338 .await;
3339
3340 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3341
3342 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3343 language_registry.add(rust_lang());
3344 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3345
3346 let (buffer, _handle) = project
3347 .update(cx, |project, cx| {
3348 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3349 })
3350 .await
3351 .unwrap();
3352
3353 let fake_server = fake_servers.next().await.unwrap();
3354 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3355 let params = params.text_document_position_params;
3356 assert_eq!(
3357 params.text_document.uri.to_file_path().unwrap(),
3358 Path::new(path!("/dir/b.rs")),
3359 );
3360 assert_eq!(params.position, lsp::Position::new(0, 22));
3361
3362 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3363 lsp::Location::new(
3364 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3365 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3366 ),
3367 )))
3368 });
3369 let mut definitions = project
3370 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3371 .await
3372 .unwrap()
3373 .unwrap();
3374
3375 // Assert no new language server started
3376 cx.executor().run_until_parked();
3377 assert!(fake_servers.try_next().is_err());
3378
3379 assert_eq!(definitions.len(), 1);
3380 let definition = definitions.pop().unwrap();
3381 cx.update(|cx| {
3382 let target_buffer = definition.target.buffer.read(cx);
3383 assert_eq!(
3384 target_buffer
3385 .file()
3386 .unwrap()
3387 .as_local()
3388 .unwrap()
3389 .abs_path(cx),
3390 Path::new(path!("/dir/a.rs")),
3391 );
3392 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3393 assert_eq!(
3394 list_worktrees(&project, cx),
3395 [
3396 (path!("/dir/a.rs").as_ref(), false),
3397 (path!("/dir/b.rs").as_ref(), true)
3398 ],
3399 );
3400
3401 drop(definition);
3402 });
3403 cx.update(|cx| {
3404 assert_eq!(
3405 list_worktrees(&project, cx),
3406 [(path!("/dir/b.rs").as_ref(), true)]
3407 );
3408 });
3409
3410 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3411 project
3412 .read(cx)
3413 .worktrees(cx)
3414 .map(|worktree| {
3415 let worktree = worktree.read(cx);
3416 (
3417 worktree.as_local().unwrap().abs_path().as_ref(),
3418 worktree.is_visible(),
3419 )
3420 })
3421 .collect::<Vec<_>>()
3422 }
3423}
3424
3425#[gpui::test]
3426async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3427 init_test(cx);
3428
3429 let fs = FakeFs::new(cx.executor());
3430 fs.insert_tree(
3431 path!("/dir"),
3432 json!({
3433 "a.ts": "",
3434 }),
3435 )
3436 .await;
3437
3438 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3439
3440 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3441 language_registry.add(typescript_lang());
3442 let mut fake_language_servers = language_registry.register_fake_lsp(
3443 "TypeScript",
3444 FakeLspAdapter {
3445 capabilities: lsp::ServerCapabilities {
3446 completion_provider: Some(lsp::CompletionOptions {
3447 trigger_characters: Some(vec![".".to_string()]),
3448 ..Default::default()
3449 }),
3450 ..Default::default()
3451 },
3452 ..Default::default()
3453 },
3454 );
3455
3456 let (buffer, _handle) = project
3457 .update(cx, |p, cx| {
3458 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3459 })
3460 .await
3461 .unwrap();
3462
3463 let fake_server = fake_language_servers.next().await.unwrap();
3464
3465 // When text_edit exists, it takes precedence over insert_text and label
3466 let text = "let a = obj.fqn";
3467 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3468 let completions = project.update(cx, |project, cx| {
3469 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3470 });
3471
3472 fake_server
3473 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3474 Ok(Some(lsp::CompletionResponse::Array(vec![
3475 lsp::CompletionItem {
3476 label: "labelText".into(),
3477 insert_text: Some("insertText".into()),
3478 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3479 range: lsp::Range::new(
3480 lsp::Position::new(0, text.len() as u32 - 3),
3481 lsp::Position::new(0, text.len() as u32),
3482 ),
3483 new_text: "textEditText".into(),
3484 })),
3485 ..Default::default()
3486 },
3487 ])))
3488 })
3489 .next()
3490 .await;
3491
3492 let completions = completions
3493 .await
3494 .unwrap()
3495 .into_iter()
3496 .flat_map(|response| response.completions)
3497 .collect::<Vec<_>>();
3498 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3499
3500 assert_eq!(completions.len(), 1);
3501 assert_eq!(completions[0].new_text, "textEditText");
3502 assert_eq!(
3503 completions[0].replace_range.to_offset(&snapshot),
3504 text.len() - 3..text.len()
3505 );
3506}
3507
3508#[gpui::test]
3509async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3510 init_test(cx);
3511
3512 let fs = FakeFs::new(cx.executor());
3513 fs.insert_tree(
3514 path!("/dir"),
3515 json!({
3516 "a.ts": "",
3517 }),
3518 )
3519 .await;
3520
3521 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3522
3523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3524 language_registry.add(typescript_lang());
3525 let mut fake_language_servers = language_registry.register_fake_lsp(
3526 "TypeScript",
3527 FakeLspAdapter {
3528 capabilities: lsp::ServerCapabilities {
3529 completion_provider: Some(lsp::CompletionOptions {
3530 trigger_characters: Some(vec![".".to_string()]),
3531 ..Default::default()
3532 }),
3533 ..Default::default()
3534 },
3535 ..Default::default()
3536 },
3537 );
3538
3539 let (buffer, _handle) = project
3540 .update(cx, |p, cx| {
3541 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3542 })
3543 .await
3544 .unwrap();
3545
3546 let fake_server = fake_language_servers.next().await.unwrap();
3547 let text = "let a = obj.fqn";
3548
3549 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3550 {
3551 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3552 let completions = project.update(cx, |project, cx| {
3553 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3554 });
3555
3556 fake_server
3557 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3558 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3559 is_incomplete: false,
3560 item_defaults: Some(lsp::CompletionListItemDefaults {
3561 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3562 lsp::Range::new(
3563 lsp::Position::new(0, text.len() as u32 - 3),
3564 lsp::Position::new(0, text.len() as u32),
3565 ),
3566 )),
3567 ..Default::default()
3568 }),
3569 items: vec![lsp::CompletionItem {
3570 label: "labelText".into(),
3571 text_edit_text: Some("textEditText".into()),
3572 text_edit: None,
3573 ..Default::default()
3574 }],
3575 })))
3576 })
3577 .next()
3578 .await;
3579
3580 let completions = completions
3581 .await
3582 .unwrap()
3583 .into_iter()
3584 .flat_map(|response| response.completions)
3585 .collect::<Vec<_>>();
3586 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3587
3588 assert_eq!(completions.len(), 1);
3589 assert_eq!(completions[0].new_text, "textEditText");
3590 assert_eq!(
3591 completions[0].replace_range.to_offset(&snapshot),
3592 text.len() - 3..text.len()
3593 );
3594 }
3595
3596 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3597 {
3598 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3599 let completions = project.update(cx, |project, cx| {
3600 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3601 });
3602
3603 fake_server
3604 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3605 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3606 is_incomplete: false,
3607 item_defaults: Some(lsp::CompletionListItemDefaults {
3608 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3609 lsp::Range::new(
3610 lsp::Position::new(0, text.len() as u32 - 3),
3611 lsp::Position::new(0, text.len() as u32),
3612 ),
3613 )),
3614 ..Default::default()
3615 }),
3616 items: vec![lsp::CompletionItem {
3617 label: "labelText".into(),
3618 text_edit_text: None,
3619 insert_text: Some("irrelevant".into()),
3620 text_edit: None,
3621 ..Default::default()
3622 }],
3623 })))
3624 })
3625 .next()
3626 .await;
3627
3628 let completions = completions
3629 .await
3630 .unwrap()
3631 .into_iter()
3632 .flat_map(|response| response.completions)
3633 .collect::<Vec<_>>();
3634 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3635
3636 assert_eq!(completions.len(), 1);
3637 assert_eq!(completions[0].new_text, "labelText");
3638 assert_eq!(
3639 completions[0].replace_range.to_offset(&snapshot),
3640 text.len() - 3..text.len()
3641 );
3642 }
3643}
3644
3645#[gpui::test]
3646async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3647 init_test(cx);
3648
3649 let fs = FakeFs::new(cx.executor());
3650 fs.insert_tree(
3651 path!("/dir"),
3652 json!({
3653 "a.ts": "",
3654 }),
3655 )
3656 .await;
3657
3658 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3659
3660 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3661 language_registry.add(typescript_lang());
3662 let mut fake_language_servers = language_registry.register_fake_lsp(
3663 "TypeScript",
3664 FakeLspAdapter {
3665 capabilities: lsp::ServerCapabilities {
3666 completion_provider: Some(lsp::CompletionOptions {
3667 trigger_characters: Some(vec![":".to_string()]),
3668 ..Default::default()
3669 }),
3670 ..Default::default()
3671 },
3672 ..Default::default()
3673 },
3674 );
3675
3676 let (buffer, _handle) = project
3677 .update(cx, |p, cx| {
3678 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3679 })
3680 .await
3681 .unwrap();
3682
3683 let fake_server = fake_language_servers.next().await.unwrap();
3684
3685 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3686 let text = "let a = b.fqn";
3687 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3688 let completions = project.update(cx, |project, cx| {
3689 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3690 });
3691
3692 fake_server
3693 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3694 Ok(Some(lsp::CompletionResponse::Array(vec![
3695 lsp::CompletionItem {
3696 label: "fullyQualifiedName?".into(),
3697 insert_text: Some("fullyQualifiedName".into()),
3698 ..Default::default()
3699 },
3700 ])))
3701 })
3702 .next()
3703 .await;
3704 let completions = completions
3705 .await
3706 .unwrap()
3707 .into_iter()
3708 .flat_map(|response| response.completions)
3709 .collect::<Vec<_>>();
3710 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3711 assert_eq!(completions.len(), 1);
3712 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3713 assert_eq!(
3714 completions[0].replace_range.to_offset(&snapshot),
3715 text.len() - 3..text.len()
3716 );
3717
3718 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3719 let text = "let a = \"atoms/cmp\"";
3720 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3721 let completions = project.update(cx, |project, cx| {
3722 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3723 });
3724
3725 fake_server
3726 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3727 Ok(Some(lsp::CompletionResponse::Array(vec![
3728 lsp::CompletionItem {
3729 label: "component".into(),
3730 ..Default::default()
3731 },
3732 ])))
3733 })
3734 .next()
3735 .await;
3736 let completions = completions
3737 .await
3738 .unwrap()
3739 .into_iter()
3740 .flat_map(|response| response.completions)
3741 .collect::<Vec<_>>();
3742 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3743 assert_eq!(completions.len(), 1);
3744 assert_eq!(completions[0].new_text, "component");
3745 assert_eq!(
3746 completions[0].replace_range.to_offset(&snapshot),
3747 text.len() - 4..text.len() - 1
3748 );
3749}
3750
3751#[gpui::test]
3752async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3753 init_test(cx);
3754
3755 let fs = FakeFs::new(cx.executor());
3756 fs.insert_tree(
3757 path!("/dir"),
3758 json!({
3759 "a.ts": "",
3760 }),
3761 )
3762 .await;
3763
3764 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3765
3766 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3767 language_registry.add(typescript_lang());
3768 let mut fake_language_servers = language_registry.register_fake_lsp(
3769 "TypeScript",
3770 FakeLspAdapter {
3771 capabilities: lsp::ServerCapabilities {
3772 completion_provider: Some(lsp::CompletionOptions {
3773 trigger_characters: Some(vec![":".to_string()]),
3774 ..Default::default()
3775 }),
3776 ..Default::default()
3777 },
3778 ..Default::default()
3779 },
3780 );
3781
3782 let (buffer, _handle) = project
3783 .update(cx, |p, cx| {
3784 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3785 })
3786 .await
3787 .unwrap();
3788
3789 let fake_server = fake_language_servers.next().await.unwrap();
3790
3791 let text = "let a = b.fqn";
3792 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3793 let completions = project.update(cx, |project, cx| {
3794 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3795 });
3796
3797 fake_server
3798 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3799 Ok(Some(lsp::CompletionResponse::Array(vec![
3800 lsp::CompletionItem {
3801 label: "fullyQualifiedName?".into(),
3802 insert_text: Some("fully\rQualified\r\nName".into()),
3803 ..Default::default()
3804 },
3805 ])))
3806 })
3807 .next()
3808 .await;
3809 let completions = completions
3810 .await
3811 .unwrap()
3812 .into_iter()
3813 .flat_map(|response| response.completions)
3814 .collect::<Vec<_>>();
3815 assert_eq!(completions.len(), 1);
3816 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3817}
3818
3819#[gpui::test(iterations = 10)]
3820async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3821 init_test(cx);
3822
3823 let fs = FakeFs::new(cx.executor());
3824 fs.insert_tree(
3825 path!("/dir"),
3826 json!({
3827 "a.ts": "a",
3828 }),
3829 )
3830 .await;
3831
3832 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3833
3834 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3835 language_registry.add(typescript_lang());
3836 let mut fake_language_servers = language_registry.register_fake_lsp(
3837 "TypeScript",
3838 FakeLspAdapter {
3839 capabilities: lsp::ServerCapabilities {
3840 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3841 lsp::CodeActionOptions {
3842 resolve_provider: Some(true),
3843 ..lsp::CodeActionOptions::default()
3844 },
3845 )),
3846 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3847 commands: vec!["_the/command".to_string()],
3848 ..lsp::ExecuteCommandOptions::default()
3849 }),
3850 ..lsp::ServerCapabilities::default()
3851 },
3852 ..FakeLspAdapter::default()
3853 },
3854 );
3855
3856 let (buffer, _handle) = project
3857 .update(cx, |p, cx| {
3858 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3859 })
3860 .await
3861 .unwrap();
3862
3863 let fake_server = fake_language_servers.next().await.unwrap();
3864
3865 // Language server returns code actions that contain commands, and not edits.
3866 let actions = project.update(cx, |project, cx| {
3867 project.code_actions(&buffer, 0..0, None, cx)
3868 });
3869 fake_server
3870 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3871 Ok(Some(vec![
3872 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3873 title: "The code action".into(),
3874 data: Some(serde_json::json!({
3875 "command": "_the/command",
3876 })),
3877 ..lsp::CodeAction::default()
3878 }),
3879 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3880 title: "two".into(),
3881 ..lsp::CodeAction::default()
3882 }),
3883 ]))
3884 })
3885 .next()
3886 .await;
3887
3888 let action = actions.await.unwrap().unwrap()[0].clone();
3889 let apply = project.update(cx, |project, cx| {
3890 project.apply_code_action(buffer.clone(), action, true, cx)
3891 });
3892
3893 // Resolving the code action does not populate its edits. In absence of
3894 // edits, we must execute the given command.
3895 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3896 |mut action, _| async move {
3897 if action.data.is_some() {
3898 action.command = Some(lsp::Command {
3899 title: "The command".into(),
3900 command: "_the/command".into(),
3901 arguments: Some(vec![json!("the-argument")]),
3902 });
3903 }
3904 Ok(action)
3905 },
3906 );
3907
3908 // While executing the command, the language server sends the editor
3909 // a `workspaceEdit` request.
3910 fake_server
3911 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3912 let fake = fake_server.clone();
3913 move |params, _| {
3914 assert_eq!(params.command, "_the/command");
3915 let fake = fake.clone();
3916 async move {
3917 fake.server
3918 .request::<lsp::request::ApplyWorkspaceEdit>(
3919 lsp::ApplyWorkspaceEditParams {
3920 label: None,
3921 edit: lsp::WorkspaceEdit {
3922 changes: Some(
3923 [(
3924 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3925 vec![lsp::TextEdit {
3926 range: lsp::Range::new(
3927 lsp::Position::new(0, 0),
3928 lsp::Position::new(0, 0),
3929 ),
3930 new_text: "X".into(),
3931 }],
3932 )]
3933 .into_iter()
3934 .collect(),
3935 ),
3936 ..Default::default()
3937 },
3938 },
3939 )
3940 .await
3941 .into_response()
3942 .unwrap();
3943 Ok(Some(json!(null)))
3944 }
3945 }
3946 })
3947 .next()
3948 .await;
3949
3950 // Applying the code action returns a project transaction containing the edits
3951 // sent by the language server in its `workspaceEdit` request.
3952 let transaction = apply.await.unwrap();
3953 assert!(transaction.0.contains_key(&buffer));
3954 buffer.update(cx, |buffer, cx| {
3955 assert_eq!(buffer.text(), "Xa");
3956 buffer.undo(cx);
3957 assert_eq!(buffer.text(), "a");
3958 });
3959}
3960
3961#[gpui::test]
3962async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3963 init_test(cx);
3964 let fs = FakeFs::new(cx.background_executor.clone());
3965 let expected_contents = "content";
3966 fs.as_fake()
3967 .insert_tree(
3968 "/root",
3969 json!({
3970 "test.txt": expected_contents
3971 }),
3972 )
3973 .await;
3974
3975 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3976
3977 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3978 let worktree = project.worktrees(cx).next().unwrap();
3979 let entry_id = worktree
3980 .read(cx)
3981 .entry_for_path(rel_path("test.txt"))
3982 .unwrap()
3983 .id;
3984 (worktree, entry_id)
3985 });
3986 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3987 let _result = project
3988 .update(cx, |project, cx| {
3989 project.rename_entry(
3990 entry_id,
3991 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3992 cx,
3993 )
3994 })
3995 .await
3996 .unwrap();
3997 worktree.read_with(cx, |worktree, _| {
3998 assert!(
3999 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4000 "Old file should have been removed"
4001 );
4002 assert!(
4003 worktree
4004 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4005 .is_some(),
4006 "Whole directory hierarchy and the new file should have been created"
4007 );
4008 });
4009 assert_eq!(
4010 worktree
4011 .update(cx, |worktree, cx| {
4012 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4013 })
4014 .await
4015 .unwrap()
4016 .text,
4017 expected_contents,
4018 "Moved file's contents should be preserved"
4019 );
4020
4021 let entry_id = worktree.read_with(cx, |worktree, _| {
4022 worktree
4023 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4024 .unwrap()
4025 .id
4026 });
4027
4028 let _result = project
4029 .update(cx, |project, cx| {
4030 project.rename_entry(
4031 entry_id,
4032 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4033 cx,
4034 )
4035 })
4036 .await
4037 .unwrap();
4038 worktree.read_with(cx, |worktree, _| {
4039 assert!(
4040 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4041 "First file should not reappear"
4042 );
4043 assert!(
4044 worktree
4045 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4046 .is_none(),
4047 "Old file should have been removed"
4048 );
4049 assert!(
4050 worktree
4051 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4052 .is_some(),
4053 "No error should have occurred after moving into existing directory"
4054 );
4055 });
4056 assert_eq!(
4057 worktree
4058 .update(cx, |worktree, cx| {
4059 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4060 })
4061 .await
4062 .unwrap()
4063 .text,
4064 expected_contents,
4065 "Moved file's contents should be preserved"
4066 );
4067}
4068
4069#[gpui::test(iterations = 10)]
4070async fn test_save_file(cx: &mut gpui::TestAppContext) {
4071 init_test(cx);
4072
4073 let fs = FakeFs::new(cx.executor());
4074 fs.insert_tree(
4075 path!("/dir"),
4076 json!({
4077 "file1": "the old contents",
4078 }),
4079 )
4080 .await;
4081
4082 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4083 let buffer = project
4084 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4085 .await
4086 .unwrap();
4087 buffer.update(cx, |buffer, cx| {
4088 assert_eq!(buffer.text(), "the old contents");
4089 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4090 });
4091
4092 project
4093 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4094 .await
4095 .unwrap();
4096
4097 let new_text = fs
4098 .load(Path::new(path!("/dir/file1")))
4099 .await
4100 .unwrap()
4101 .replace("\r\n", "\n");
4102 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4103}
4104
4105#[gpui::test(iterations = 10)]
4106async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4107 // Issue: #24349
4108 init_test(cx);
4109
4110 let fs = FakeFs::new(cx.executor());
4111 fs.insert_tree(path!("/dir"), json!({})).await;
4112
4113 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4114 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4115
4116 language_registry.add(rust_lang());
4117 let mut fake_rust_servers = language_registry.register_fake_lsp(
4118 "Rust",
4119 FakeLspAdapter {
4120 name: "the-rust-language-server",
4121 capabilities: lsp::ServerCapabilities {
4122 completion_provider: Some(lsp::CompletionOptions {
4123 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4124 ..Default::default()
4125 }),
4126 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4127 lsp::TextDocumentSyncOptions {
4128 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4129 ..Default::default()
4130 },
4131 )),
4132 ..Default::default()
4133 },
4134 ..Default::default()
4135 },
4136 );
4137
4138 let buffer = project
4139 .update(cx, |this, cx| this.create_buffer(false, cx))
4140 .unwrap()
4141 .await;
4142 project.update(cx, |this, cx| {
4143 this.register_buffer_with_language_servers(&buffer, cx);
4144 buffer.update(cx, |buffer, cx| {
4145 assert!(!this.has_language_servers_for(buffer, cx));
4146 })
4147 });
4148
4149 project
4150 .update(cx, |this, cx| {
4151 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4152 this.save_buffer_as(
4153 buffer.clone(),
4154 ProjectPath {
4155 worktree_id,
4156 path: rel_path("file.rs").into(),
4157 },
4158 cx,
4159 )
4160 })
4161 .await
4162 .unwrap();
4163 // A server is started up, and it is notified about Rust files.
4164 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4165 assert_eq!(
4166 fake_rust_server
4167 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4168 .await
4169 .text_document,
4170 lsp::TextDocumentItem {
4171 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4172 version: 0,
4173 text: "".to_string(),
4174 language_id: "rust".to_string(),
4175 }
4176 );
4177
4178 project.update(cx, |this, cx| {
4179 buffer.update(cx, |buffer, cx| {
4180 assert!(this.has_language_servers_for(buffer, cx));
4181 })
4182 });
4183}
4184
4185#[gpui::test(iterations = 30)]
4186async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4187 init_test(cx);
4188
4189 let fs = FakeFs::new(cx.executor());
4190 fs.insert_tree(
4191 path!("/dir"),
4192 json!({
4193 "file1": "the original contents",
4194 }),
4195 )
4196 .await;
4197
4198 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4199 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4200 let buffer = project
4201 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4202 .await
4203 .unwrap();
4204
4205 // Simulate buffer diffs being slow, so that they don't complete before
4206 // the next file change occurs.
4207 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4208
4209 // Change the buffer's file on disk, and then wait for the file change
4210 // to be detected by the worktree, so that the buffer starts reloading.
4211 fs.save(
4212 path!("/dir/file1").as_ref(),
4213 &"the first contents".into(),
4214 Default::default(),
4215 )
4216 .await
4217 .unwrap();
4218 worktree.next_event(cx).await;
4219
4220 // Change the buffer's file again. Depending on the random seed, the
4221 // previous file change may still be in progress.
4222 fs.save(
4223 path!("/dir/file1").as_ref(),
4224 &"the second contents".into(),
4225 Default::default(),
4226 )
4227 .await
4228 .unwrap();
4229 worktree.next_event(cx).await;
4230
4231 cx.executor().run_until_parked();
4232 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4233 buffer.read_with(cx, |buffer, _| {
4234 assert_eq!(buffer.text(), on_disk_text);
4235 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4236 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4237 });
4238}
4239
4240#[gpui::test(iterations = 30)]
4241async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4242 init_test(cx);
4243
4244 let fs = FakeFs::new(cx.executor());
4245 fs.insert_tree(
4246 path!("/dir"),
4247 json!({
4248 "file1": "the original contents",
4249 }),
4250 )
4251 .await;
4252
4253 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4254 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4255 let buffer = project
4256 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4257 .await
4258 .unwrap();
4259
4260 // Simulate buffer diffs being slow, so that they don't complete before
4261 // the next file change occurs.
4262 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4263
4264 // Change the buffer's file on disk, and then wait for the file change
4265 // to be detected by the worktree, so that the buffer starts reloading.
4266 fs.save(
4267 path!("/dir/file1").as_ref(),
4268 &"the first contents".into(),
4269 Default::default(),
4270 )
4271 .await
4272 .unwrap();
4273 worktree.next_event(cx).await;
4274
4275 cx.executor()
4276 .spawn(cx.executor().simulate_random_delay())
4277 .await;
4278
4279 // Perform a noop edit, causing the buffer's version to increase.
4280 buffer.update(cx, |buffer, cx| {
4281 buffer.edit([(0..0, " ")], None, cx);
4282 buffer.undo(cx);
4283 });
4284
4285 cx.executor().run_until_parked();
4286 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4287 buffer.read_with(cx, |buffer, _| {
4288 let buffer_text = buffer.text();
4289 if buffer_text == on_disk_text {
4290 assert!(
4291 !buffer.is_dirty() && !buffer.has_conflict(),
4292 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4293 );
4294 }
4295 // If the file change occurred while the buffer was processing the first
4296 // change, the buffer will be in a conflicting state.
4297 else {
4298 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4299 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4300 }
4301 });
4302}
4303
4304#[gpui::test]
4305async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4306 init_test(cx);
4307
4308 let fs = FakeFs::new(cx.executor());
4309 fs.insert_tree(
4310 path!("/dir"),
4311 json!({
4312 "file1": "the old contents",
4313 }),
4314 )
4315 .await;
4316
4317 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4318 let buffer = project
4319 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4320 .await
4321 .unwrap();
4322 buffer.update(cx, |buffer, cx| {
4323 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4324 });
4325
4326 project
4327 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4328 .await
4329 .unwrap();
4330
4331 let new_text = fs
4332 .load(Path::new(path!("/dir/file1")))
4333 .await
4334 .unwrap()
4335 .replace("\r\n", "\n");
4336 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4337}
4338
4339#[gpui::test]
4340async fn test_save_as(cx: &mut gpui::TestAppContext) {
4341 init_test(cx);
4342
4343 let fs = FakeFs::new(cx.executor());
4344 fs.insert_tree("/dir", json!({})).await;
4345
4346 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4347
4348 let languages = project.update(cx, |project, _| project.languages().clone());
4349 languages.add(rust_lang());
4350
4351 let buffer = project.update(cx, |project, cx| {
4352 project.create_local_buffer("", None, false, cx)
4353 });
4354 buffer.update(cx, |buffer, cx| {
4355 buffer.edit([(0..0, "abc")], None, cx);
4356 assert!(buffer.is_dirty());
4357 assert!(!buffer.has_conflict());
4358 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4359 });
4360 project
4361 .update(cx, |project, cx| {
4362 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4363 let path = ProjectPath {
4364 worktree_id,
4365 path: rel_path("file1.rs").into(),
4366 };
4367 project.save_buffer_as(buffer.clone(), path, cx)
4368 })
4369 .await
4370 .unwrap();
4371 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4372
4373 cx.executor().run_until_parked();
4374 buffer.update(cx, |buffer, cx| {
4375 assert_eq!(
4376 buffer.file().unwrap().full_path(cx),
4377 Path::new("dir/file1.rs")
4378 );
4379 assert!(!buffer.is_dirty());
4380 assert!(!buffer.has_conflict());
4381 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4382 });
4383
4384 let opened_buffer = project
4385 .update(cx, |project, cx| {
4386 project.open_local_buffer("/dir/file1.rs", cx)
4387 })
4388 .await
4389 .unwrap();
4390 assert_eq!(opened_buffer, buffer);
4391}
4392
4393#[gpui::test]
4394async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4395 init_test(cx);
4396
4397 let fs = FakeFs::new(cx.executor());
4398 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4399
4400 fs.insert_tree(
4401 path!("/dir"),
4402 json!({
4403 "data_a.txt": "data about a"
4404 }),
4405 )
4406 .await;
4407
4408 let buffer = project
4409 .update(cx, |project, cx| {
4410 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4411 })
4412 .await
4413 .unwrap();
4414
4415 buffer.update(cx, |buffer, cx| {
4416 buffer.edit([(11..12, "b")], None, cx);
4417 });
4418
4419 // Save buffer's contents as a new file and confirm that the buffer's now
4420 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4421 // file associated with the buffer has now been updated to `data_b.txt`
4422 project
4423 .update(cx, |project, cx| {
4424 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4425 let new_path = ProjectPath {
4426 worktree_id,
4427 path: rel_path("data_b.txt").into(),
4428 };
4429
4430 project.save_buffer_as(buffer.clone(), new_path, cx)
4431 })
4432 .await
4433 .unwrap();
4434
4435 buffer.update(cx, |buffer, cx| {
4436 assert_eq!(
4437 buffer.file().unwrap().full_path(cx),
4438 Path::new("dir/data_b.txt")
4439 )
4440 });
4441
4442 // Open the original `data_a.txt` file, confirming that its contents are
4443 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4444 let original_buffer = project
4445 .update(cx, |project, cx| {
4446 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4447 })
4448 .await
4449 .unwrap();
4450
4451 original_buffer.update(cx, |buffer, cx| {
4452 assert_eq!(buffer.text(), "data about a");
4453 assert_eq!(
4454 buffer.file().unwrap().full_path(cx),
4455 Path::new("dir/data_a.txt")
4456 )
4457 });
4458}
4459
4460#[gpui::test(retries = 5)]
4461async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4462 use worktree::WorktreeModelHandle as _;
4463
4464 init_test(cx);
4465 cx.executor().allow_parking();
4466
4467 let dir = TempTree::new(json!({
4468 "a": {
4469 "file1": "",
4470 "file2": "",
4471 "file3": "",
4472 },
4473 "b": {
4474 "c": {
4475 "file4": "",
4476 "file5": "",
4477 }
4478 }
4479 }));
4480
4481 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4482
4483 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4484 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4485 async move { buffer.await.unwrap() }
4486 };
4487 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4488 project.update(cx, |project, cx| {
4489 let tree = project.worktrees(cx).next().unwrap();
4490 tree.read(cx)
4491 .entry_for_path(rel_path(path))
4492 .unwrap_or_else(|| panic!("no entry for path {}", path))
4493 .id
4494 })
4495 };
4496
4497 let buffer2 = buffer_for_path("a/file2", cx).await;
4498 let buffer3 = buffer_for_path("a/file3", cx).await;
4499 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4500 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4501
4502 let file2_id = id_for_path("a/file2", cx);
4503 let file3_id = id_for_path("a/file3", cx);
4504 let file4_id = id_for_path("b/c/file4", cx);
4505
4506 // Create a remote copy of this worktree.
4507 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4508 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4509
4510 let updates = Arc::new(Mutex::new(Vec::new()));
4511 tree.update(cx, |tree, cx| {
4512 let updates = updates.clone();
4513 tree.observe_updates(0, cx, move |update| {
4514 updates.lock().push(update);
4515 async { true }
4516 });
4517 });
4518
4519 let remote = cx.update(|cx| {
4520 Worktree::remote(
4521 0,
4522 ReplicaId::REMOTE_SERVER,
4523 metadata,
4524 project.read(cx).client().into(),
4525 project.read(cx).path_style(cx),
4526 cx,
4527 )
4528 });
4529
4530 cx.executor().run_until_parked();
4531
4532 cx.update(|cx| {
4533 assert!(!buffer2.read(cx).is_dirty());
4534 assert!(!buffer3.read(cx).is_dirty());
4535 assert!(!buffer4.read(cx).is_dirty());
4536 assert!(!buffer5.read(cx).is_dirty());
4537 });
4538
4539 // Rename and delete files and directories.
4540 tree.flush_fs_events(cx).await;
4541 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4542 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4543 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4544 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4545 tree.flush_fs_events(cx).await;
4546
4547 cx.update(|app| {
4548 assert_eq!(
4549 tree.read(app).paths().collect::<Vec<_>>(),
4550 vec![
4551 rel_path("a"),
4552 rel_path("a/file1"),
4553 rel_path("a/file2.new"),
4554 rel_path("b"),
4555 rel_path("d"),
4556 rel_path("d/file3"),
4557 rel_path("d/file4"),
4558 ]
4559 );
4560 });
4561
4562 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4563 assert_eq!(id_for_path("d/file3", cx), file3_id);
4564 assert_eq!(id_for_path("d/file4", cx), file4_id);
4565
4566 cx.update(|cx| {
4567 assert_eq!(
4568 buffer2.read(cx).file().unwrap().path().as_ref(),
4569 rel_path("a/file2.new")
4570 );
4571 assert_eq!(
4572 buffer3.read(cx).file().unwrap().path().as_ref(),
4573 rel_path("d/file3")
4574 );
4575 assert_eq!(
4576 buffer4.read(cx).file().unwrap().path().as_ref(),
4577 rel_path("d/file4")
4578 );
4579 assert_eq!(
4580 buffer5.read(cx).file().unwrap().path().as_ref(),
4581 rel_path("b/c/file5")
4582 );
4583
4584 assert_matches!(
4585 buffer2.read(cx).file().unwrap().disk_state(),
4586 DiskState::Present { .. }
4587 );
4588 assert_matches!(
4589 buffer3.read(cx).file().unwrap().disk_state(),
4590 DiskState::Present { .. }
4591 );
4592 assert_matches!(
4593 buffer4.read(cx).file().unwrap().disk_state(),
4594 DiskState::Present { .. }
4595 );
4596 assert_eq!(
4597 buffer5.read(cx).file().unwrap().disk_state(),
4598 DiskState::Deleted
4599 );
4600 });
4601
4602 // Update the remote worktree. Check that it becomes consistent with the
4603 // local worktree.
4604 cx.executor().run_until_parked();
4605
4606 remote.update(cx, |remote, _| {
4607 for update in updates.lock().drain(..) {
4608 remote.as_remote_mut().unwrap().update_from_remote(update);
4609 }
4610 });
4611 cx.executor().run_until_parked();
4612 remote.update(cx, |remote, _| {
4613 assert_eq!(
4614 remote.paths().collect::<Vec<_>>(),
4615 vec![
4616 rel_path("a"),
4617 rel_path("a/file1"),
4618 rel_path("a/file2.new"),
4619 rel_path("b"),
4620 rel_path("d"),
4621 rel_path("d/file3"),
4622 rel_path("d/file4"),
4623 ]
4624 );
4625 });
4626}
4627
4628#[gpui::test(iterations = 10)]
4629async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4630 init_test(cx);
4631
4632 let fs = FakeFs::new(cx.executor());
4633 fs.insert_tree(
4634 path!("/dir"),
4635 json!({
4636 "a": {
4637 "file1": "",
4638 }
4639 }),
4640 )
4641 .await;
4642
4643 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4644 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4645 let tree_id = tree.update(cx, |tree, _| tree.id());
4646
4647 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4648 project.update(cx, |project, cx| {
4649 let tree = project.worktrees(cx).next().unwrap();
4650 tree.read(cx)
4651 .entry_for_path(rel_path(path))
4652 .unwrap_or_else(|| panic!("no entry for path {}", path))
4653 .id
4654 })
4655 };
4656
4657 let dir_id = id_for_path("a", cx);
4658 let file_id = id_for_path("a/file1", cx);
4659 let buffer = project
4660 .update(cx, |p, cx| {
4661 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4662 })
4663 .await
4664 .unwrap();
4665 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4666
4667 project
4668 .update(cx, |project, cx| {
4669 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4670 })
4671 .unwrap()
4672 .await
4673 .into_included()
4674 .unwrap();
4675 cx.executor().run_until_parked();
4676
4677 assert_eq!(id_for_path("b", cx), dir_id);
4678 assert_eq!(id_for_path("b/file1", cx), file_id);
4679 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4680}
4681
4682#[gpui::test]
4683async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4684 init_test(cx);
4685
4686 let fs = FakeFs::new(cx.executor());
4687 fs.insert_tree(
4688 "/dir",
4689 json!({
4690 "a.txt": "a-contents",
4691 "b.txt": "b-contents",
4692 }),
4693 )
4694 .await;
4695
4696 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4697
4698 // Spawn multiple tasks to open paths, repeating some paths.
4699 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4700 (
4701 p.open_local_buffer("/dir/a.txt", cx),
4702 p.open_local_buffer("/dir/b.txt", cx),
4703 p.open_local_buffer("/dir/a.txt", cx),
4704 )
4705 });
4706
4707 let buffer_a_1 = buffer_a_1.await.unwrap();
4708 let buffer_a_2 = buffer_a_2.await.unwrap();
4709 let buffer_b = buffer_b.await.unwrap();
4710 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4711 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4712
4713 // There is only one buffer per path.
4714 let buffer_a_id = buffer_a_1.entity_id();
4715 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4716
4717 // Open the same path again while it is still open.
4718 drop(buffer_a_1);
4719 let buffer_a_3 = project
4720 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4721 .await
4722 .unwrap();
4723
4724 // There's still only one buffer per path.
4725 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4726}
4727
4728#[gpui::test]
4729async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4730 init_test(cx);
4731
4732 let fs = FakeFs::new(cx.executor());
4733 fs.insert_tree(
4734 path!("/dir"),
4735 json!({
4736 "file1": "abc",
4737 "file2": "def",
4738 "file3": "ghi",
4739 }),
4740 )
4741 .await;
4742
4743 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4744
4745 let buffer1 = project
4746 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4747 .await
4748 .unwrap();
4749 let events = Arc::new(Mutex::new(Vec::new()));
4750
4751 // initially, the buffer isn't dirty.
4752 buffer1.update(cx, |buffer, cx| {
4753 cx.subscribe(&buffer1, {
4754 let events = events.clone();
4755 move |_, _, event, _| match event {
4756 BufferEvent::Operation { .. } => {}
4757 _ => events.lock().push(event.clone()),
4758 }
4759 })
4760 .detach();
4761
4762 assert!(!buffer.is_dirty());
4763 assert!(events.lock().is_empty());
4764
4765 buffer.edit([(1..2, "")], None, cx);
4766 });
4767
4768 // after the first edit, the buffer is dirty, and emits a dirtied event.
4769 buffer1.update(cx, |buffer, cx| {
4770 assert!(buffer.text() == "ac");
4771 assert!(buffer.is_dirty());
4772 assert_eq!(
4773 *events.lock(),
4774 &[
4775 language::BufferEvent::Edited,
4776 language::BufferEvent::DirtyChanged
4777 ]
4778 );
4779 events.lock().clear();
4780 buffer.did_save(
4781 buffer.version(),
4782 buffer.file().unwrap().disk_state().mtime(),
4783 cx,
4784 );
4785 });
4786
4787 // after saving, the buffer is not dirty, and emits a saved event.
4788 buffer1.update(cx, |buffer, cx| {
4789 assert!(!buffer.is_dirty());
4790 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4791 events.lock().clear();
4792
4793 buffer.edit([(1..1, "B")], None, cx);
4794 buffer.edit([(2..2, "D")], None, cx);
4795 });
4796
4797 // after editing again, the buffer is dirty, and emits another dirty event.
4798 buffer1.update(cx, |buffer, cx| {
4799 assert!(buffer.text() == "aBDc");
4800 assert!(buffer.is_dirty());
4801 assert_eq!(
4802 *events.lock(),
4803 &[
4804 language::BufferEvent::Edited,
4805 language::BufferEvent::DirtyChanged,
4806 language::BufferEvent::Edited,
4807 ],
4808 );
4809 events.lock().clear();
4810
4811 // After restoring the buffer to its previously-saved state,
4812 // the buffer is not considered dirty anymore.
4813 buffer.edit([(1..3, "")], None, cx);
4814 assert!(buffer.text() == "ac");
4815 assert!(!buffer.is_dirty());
4816 });
4817
4818 assert_eq!(
4819 *events.lock(),
4820 &[
4821 language::BufferEvent::Edited,
4822 language::BufferEvent::DirtyChanged
4823 ]
4824 );
4825
4826 // When a file is deleted, it is not considered dirty.
4827 let events = Arc::new(Mutex::new(Vec::new()));
4828 let buffer2 = project
4829 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4830 .await
4831 .unwrap();
4832 buffer2.update(cx, |_, cx| {
4833 cx.subscribe(&buffer2, {
4834 let events = events.clone();
4835 move |_, _, event, _| match event {
4836 BufferEvent::Operation { .. } => {}
4837 _ => events.lock().push(event.clone()),
4838 }
4839 })
4840 .detach();
4841 });
4842
4843 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4844 .await
4845 .unwrap();
4846 cx.executor().run_until_parked();
4847 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4848 assert_eq!(
4849 mem::take(&mut *events.lock()),
4850 &[language::BufferEvent::FileHandleChanged]
4851 );
4852
4853 // Buffer becomes dirty when edited.
4854 buffer2.update(cx, |buffer, cx| {
4855 buffer.edit([(2..3, "")], None, cx);
4856 assert_eq!(buffer.is_dirty(), true);
4857 });
4858 assert_eq!(
4859 mem::take(&mut *events.lock()),
4860 &[
4861 language::BufferEvent::Edited,
4862 language::BufferEvent::DirtyChanged
4863 ]
4864 );
4865
4866 // Buffer becomes clean again when all of its content is removed, because
4867 // the file was deleted.
4868 buffer2.update(cx, |buffer, cx| {
4869 buffer.edit([(0..2, "")], None, cx);
4870 assert_eq!(buffer.is_empty(), true);
4871 assert_eq!(buffer.is_dirty(), false);
4872 });
4873 assert_eq!(
4874 *events.lock(),
4875 &[
4876 language::BufferEvent::Edited,
4877 language::BufferEvent::DirtyChanged
4878 ]
4879 );
4880
4881 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4882 let events = Arc::new(Mutex::new(Vec::new()));
4883 let buffer3 = project
4884 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4885 .await
4886 .unwrap();
4887 buffer3.update(cx, |_, cx| {
4888 cx.subscribe(&buffer3, {
4889 let events = events.clone();
4890 move |_, _, event, _| match event {
4891 BufferEvent::Operation { .. } => {}
4892 _ => events.lock().push(event.clone()),
4893 }
4894 })
4895 .detach();
4896 });
4897
4898 buffer3.update(cx, |buffer, cx| {
4899 buffer.edit([(0..0, "x")], None, cx);
4900 });
4901 events.lock().clear();
4902 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4903 .await
4904 .unwrap();
4905 cx.executor().run_until_parked();
4906 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4907 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4908}
4909
4910#[gpui::test]
4911async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4912 init_test(cx);
4913
4914 let (initial_contents, initial_offsets) =
4915 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4916 let fs = FakeFs::new(cx.executor());
4917 fs.insert_tree(
4918 path!("/dir"),
4919 json!({
4920 "the-file": initial_contents,
4921 }),
4922 )
4923 .await;
4924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4925 let buffer = project
4926 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4927 .await
4928 .unwrap();
4929
4930 let anchors = initial_offsets
4931 .iter()
4932 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4933 .collect::<Vec<_>>();
4934
4935 // Change the file on disk, adding two new lines of text, and removing
4936 // one line.
4937 buffer.update(cx, |buffer, _| {
4938 assert!(!buffer.is_dirty());
4939 assert!(!buffer.has_conflict());
4940 });
4941
4942 let (new_contents, new_offsets) =
4943 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4944 fs.save(
4945 path!("/dir/the-file").as_ref(),
4946 &new_contents.as_str().into(),
4947 LineEnding::Unix,
4948 )
4949 .await
4950 .unwrap();
4951
4952 // Because the buffer was not modified, it is reloaded from disk. Its
4953 // contents are edited according to the diff between the old and new
4954 // file contents.
4955 cx.executor().run_until_parked();
4956 buffer.update(cx, |buffer, _| {
4957 assert_eq!(buffer.text(), new_contents);
4958 assert!(!buffer.is_dirty());
4959 assert!(!buffer.has_conflict());
4960
4961 let anchor_offsets = anchors
4962 .iter()
4963 .map(|anchor| anchor.to_offset(&*buffer))
4964 .collect::<Vec<_>>();
4965 assert_eq!(anchor_offsets, new_offsets);
4966 });
4967
4968 // Modify the buffer
4969 buffer.update(cx, |buffer, cx| {
4970 buffer.edit([(0..0, " ")], None, cx);
4971 assert!(buffer.is_dirty());
4972 assert!(!buffer.has_conflict());
4973 });
4974
4975 // Change the file on disk again, adding blank lines to the beginning.
4976 fs.save(
4977 path!("/dir/the-file").as_ref(),
4978 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4979 LineEnding::Unix,
4980 )
4981 .await
4982 .unwrap();
4983
4984 // Because the buffer is modified, it doesn't reload from disk, but is
4985 // marked as having a conflict.
4986 cx.executor().run_until_parked();
4987 buffer.update(cx, |buffer, _| {
4988 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4989 assert!(buffer.has_conflict());
4990 });
4991}
4992
4993#[gpui::test]
4994async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4995 init_test(cx);
4996
4997 let fs = FakeFs::new(cx.executor());
4998 fs.insert_tree(
4999 path!("/dir"),
5000 json!({
5001 "file1": "a\nb\nc\n",
5002 "file2": "one\r\ntwo\r\nthree\r\n",
5003 }),
5004 )
5005 .await;
5006
5007 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5008 let buffer1 = project
5009 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5010 .await
5011 .unwrap();
5012 let buffer2 = project
5013 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5014 .await
5015 .unwrap();
5016
5017 buffer1.update(cx, |buffer, _| {
5018 assert_eq!(buffer.text(), "a\nb\nc\n");
5019 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5020 });
5021 buffer2.update(cx, |buffer, _| {
5022 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5023 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5024 });
5025
5026 // Change a file's line endings on disk from unix to windows. The buffer's
5027 // state updates correctly.
5028 fs.save(
5029 path!("/dir/file1").as_ref(),
5030 &"aaa\nb\nc\n".into(),
5031 LineEnding::Windows,
5032 )
5033 .await
5034 .unwrap();
5035 cx.executor().run_until_parked();
5036 buffer1.update(cx, |buffer, _| {
5037 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5038 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5039 });
5040
5041 // Save a file with windows line endings. The file is written correctly.
5042 buffer2.update(cx, |buffer, cx| {
5043 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5044 });
5045 project
5046 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5047 .await
5048 .unwrap();
5049 assert_eq!(
5050 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5051 "one\r\ntwo\r\nthree\r\nfour\r\n",
5052 );
5053}
5054
5055#[gpui::test]
5056async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5057 init_test(cx);
5058
5059 let fs = FakeFs::new(cx.executor());
5060 fs.insert_tree(
5061 path!("/dir"),
5062 json!({
5063 "a.rs": "
5064 fn foo(mut v: Vec<usize>) {
5065 for x in &v {
5066 v.push(1);
5067 }
5068 }
5069 "
5070 .unindent(),
5071 }),
5072 )
5073 .await;
5074
5075 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5076 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5077 let buffer = project
5078 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5079 .await
5080 .unwrap();
5081
5082 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5083 let message = lsp::PublishDiagnosticsParams {
5084 uri: buffer_uri.clone(),
5085 diagnostics: vec![
5086 lsp::Diagnostic {
5087 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5088 severity: Some(DiagnosticSeverity::WARNING),
5089 message: "error 1".to_string(),
5090 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5091 location: lsp::Location {
5092 uri: buffer_uri.clone(),
5093 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5094 },
5095 message: "error 1 hint 1".to_string(),
5096 }]),
5097 ..Default::default()
5098 },
5099 lsp::Diagnostic {
5100 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5101 severity: Some(DiagnosticSeverity::HINT),
5102 message: "error 1 hint 1".to_string(),
5103 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5104 location: lsp::Location {
5105 uri: buffer_uri.clone(),
5106 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5107 },
5108 message: "original diagnostic".to_string(),
5109 }]),
5110 ..Default::default()
5111 },
5112 lsp::Diagnostic {
5113 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5114 severity: Some(DiagnosticSeverity::ERROR),
5115 message: "error 2".to_string(),
5116 related_information: Some(vec![
5117 lsp::DiagnosticRelatedInformation {
5118 location: lsp::Location {
5119 uri: buffer_uri.clone(),
5120 range: lsp::Range::new(
5121 lsp::Position::new(1, 13),
5122 lsp::Position::new(1, 15),
5123 ),
5124 },
5125 message: "error 2 hint 1".to_string(),
5126 },
5127 lsp::DiagnosticRelatedInformation {
5128 location: lsp::Location {
5129 uri: buffer_uri.clone(),
5130 range: lsp::Range::new(
5131 lsp::Position::new(1, 13),
5132 lsp::Position::new(1, 15),
5133 ),
5134 },
5135 message: "error 2 hint 2".to_string(),
5136 },
5137 ]),
5138 ..Default::default()
5139 },
5140 lsp::Diagnostic {
5141 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5142 severity: Some(DiagnosticSeverity::HINT),
5143 message: "error 2 hint 1".to_string(),
5144 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5145 location: lsp::Location {
5146 uri: buffer_uri.clone(),
5147 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5148 },
5149 message: "original diagnostic".to_string(),
5150 }]),
5151 ..Default::default()
5152 },
5153 lsp::Diagnostic {
5154 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5155 severity: Some(DiagnosticSeverity::HINT),
5156 message: "error 2 hint 2".to_string(),
5157 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5158 location: lsp::Location {
5159 uri: buffer_uri,
5160 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5161 },
5162 message: "original diagnostic".to_string(),
5163 }]),
5164 ..Default::default()
5165 },
5166 ],
5167 version: None,
5168 };
5169
5170 lsp_store
5171 .update(cx, |lsp_store, cx| {
5172 lsp_store.update_diagnostics(
5173 LanguageServerId(0),
5174 message,
5175 None,
5176 DiagnosticSourceKind::Pushed,
5177 &[],
5178 cx,
5179 )
5180 })
5181 .unwrap();
5182 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5183
5184 assert_eq!(
5185 buffer
5186 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5187 .collect::<Vec<_>>(),
5188 &[
5189 DiagnosticEntry {
5190 range: Point::new(1, 8)..Point::new(1, 9),
5191 diagnostic: Diagnostic {
5192 severity: DiagnosticSeverity::WARNING,
5193 message: "error 1".to_string(),
5194 group_id: 1,
5195 is_primary: true,
5196 source_kind: DiagnosticSourceKind::Pushed,
5197 ..Diagnostic::default()
5198 }
5199 },
5200 DiagnosticEntry {
5201 range: Point::new(1, 8)..Point::new(1, 9),
5202 diagnostic: Diagnostic {
5203 severity: DiagnosticSeverity::HINT,
5204 message: "error 1 hint 1".to_string(),
5205 group_id: 1,
5206 is_primary: false,
5207 source_kind: DiagnosticSourceKind::Pushed,
5208 ..Diagnostic::default()
5209 }
5210 },
5211 DiagnosticEntry {
5212 range: Point::new(1, 13)..Point::new(1, 15),
5213 diagnostic: Diagnostic {
5214 severity: DiagnosticSeverity::HINT,
5215 message: "error 2 hint 1".to_string(),
5216 group_id: 0,
5217 is_primary: false,
5218 source_kind: DiagnosticSourceKind::Pushed,
5219 ..Diagnostic::default()
5220 }
5221 },
5222 DiagnosticEntry {
5223 range: Point::new(1, 13)..Point::new(1, 15),
5224 diagnostic: Diagnostic {
5225 severity: DiagnosticSeverity::HINT,
5226 message: "error 2 hint 2".to_string(),
5227 group_id: 0,
5228 is_primary: false,
5229 source_kind: DiagnosticSourceKind::Pushed,
5230 ..Diagnostic::default()
5231 }
5232 },
5233 DiagnosticEntry {
5234 range: Point::new(2, 8)..Point::new(2, 17),
5235 diagnostic: Diagnostic {
5236 severity: DiagnosticSeverity::ERROR,
5237 message: "error 2".to_string(),
5238 group_id: 0,
5239 is_primary: true,
5240 source_kind: DiagnosticSourceKind::Pushed,
5241 ..Diagnostic::default()
5242 }
5243 }
5244 ]
5245 );
5246
5247 assert_eq!(
5248 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5249 &[
5250 DiagnosticEntry {
5251 range: Point::new(1, 13)..Point::new(1, 15),
5252 diagnostic: Diagnostic {
5253 severity: DiagnosticSeverity::HINT,
5254 message: "error 2 hint 1".to_string(),
5255 group_id: 0,
5256 is_primary: false,
5257 source_kind: DiagnosticSourceKind::Pushed,
5258 ..Diagnostic::default()
5259 }
5260 },
5261 DiagnosticEntry {
5262 range: Point::new(1, 13)..Point::new(1, 15),
5263 diagnostic: Diagnostic {
5264 severity: DiagnosticSeverity::HINT,
5265 message: "error 2 hint 2".to_string(),
5266 group_id: 0,
5267 is_primary: false,
5268 source_kind: DiagnosticSourceKind::Pushed,
5269 ..Diagnostic::default()
5270 }
5271 },
5272 DiagnosticEntry {
5273 range: Point::new(2, 8)..Point::new(2, 17),
5274 diagnostic: Diagnostic {
5275 severity: DiagnosticSeverity::ERROR,
5276 message: "error 2".to_string(),
5277 group_id: 0,
5278 is_primary: true,
5279 source_kind: DiagnosticSourceKind::Pushed,
5280 ..Diagnostic::default()
5281 }
5282 }
5283 ]
5284 );
5285
5286 assert_eq!(
5287 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5288 &[
5289 DiagnosticEntry {
5290 range: Point::new(1, 8)..Point::new(1, 9),
5291 diagnostic: Diagnostic {
5292 severity: DiagnosticSeverity::WARNING,
5293 message: "error 1".to_string(),
5294 group_id: 1,
5295 is_primary: true,
5296 source_kind: DiagnosticSourceKind::Pushed,
5297 ..Diagnostic::default()
5298 }
5299 },
5300 DiagnosticEntry {
5301 range: Point::new(1, 8)..Point::new(1, 9),
5302 diagnostic: Diagnostic {
5303 severity: DiagnosticSeverity::HINT,
5304 message: "error 1 hint 1".to_string(),
5305 group_id: 1,
5306 is_primary: false,
5307 source_kind: DiagnosticSourceKind::Pushed,
5308 ..Diagnostic::default()
5309 }
5310 },
5311 ]
5312 );
5313}
5314
5315#[gpui::test]
5316async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5317 init_test(cx);
5318
5319 let fs = FakeFs::new(cx.executor());
5320 fs.insert_tree(
5321 path!("/dir"),
5322 json!({
5323 "one.rs": "const ONE: usize = 1;",
5324 "two": {
5325 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5326 }
5327
5328 }),
5329 )
5330 .await;
5331 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5332
5333 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5334 language_registry.add(rust_lang());
5335 let watched_paths = lsp::FileOperationRegistrationOptions {
5336 filters: vec![
5337 FileOperationFilter {
5338 scheme: Some("file".to_owned()),
5339 pattern: lsp::FileOperationPattern {
5340 glob: "**/*.rs".to_owned(),
5341 matches: Some(lsp::FileOperationPatternKind::File),
5342 options: None,
5343 },
5344 },
5345 FileOperationFilter {
5346 scheme: Some("file".to_owned()),
5347 pattern: lsp::FileOperationPattern {
5348 glob: "**/**".to_owned(),
5349 matches: Some(lsp::FileOperationPatternKind::Folder),
5350 options: None,
5351 },
5352 },
5353 ],
5354 };
5355 let mut fake_servers = language_registry.register_fake_lsp(
5356 "Rust",
5357 FakeLspAdapter {
5358 capabilities: lsp::ServerCapabilities {
5359 workspace: Some(lsp::WorkspaceServerCapabilities {
5360 workspace_folders: None,
5361 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5362 did_rename: Some(watched_paths.clone()),
5363 will_rename: Some(watched_paths),
5364 ..Default::default()
5365 }),
5366 }),
5367 ..Default::default()
5368 },
5369 ..Default::default()
5370 },
5371 );
5372
5373 let _ = project
5374 .update(cx, |project, cx| {
5375 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5376 })
5377 .await
5378 .unwrap();
5379
5380 let fake_server = fake_servers.next().await.unwrap();
5381 let response = project.update(cx, |project, cx| {
5382 let worktree = project.worktrees(cx).next().unwrap();
5383 let entry = worktree
5384 .read(cx)
5385 .entry_for_path(rel_path("one.rs"))
5386 .unwrap();
5387 project.rename_entry(
5388 entry.id,
5389 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5390 cx,
5391 )
5392 });
5393 let expected_edit = lsp::WorkspaceEdit {
5394 changes: None,
5395 document_changes: Some(DocumentChanges::Edits({
5396 vec![TextDocumentEdit {
5397 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5398 range: lsp::Range {
5399 start: lsp::Position {
5400 line: 0,
5401 character: 1,
5402 },
5403 end: lsp::Position {
5404 line: 0,
5405 character: 3,
5406 },
5407 },
5408 new_text: "This is not a drill".to_owned(),
5409 })],
5410 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5411 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5412 version: Some(1337),
5413 },
5414 }]
5415 })),
5416 change_annotations: None,
5417 };
5418 let resolved_workspace_edit = Arc::new(OnceLock::new());
5419 fake_server
5420 .set_request_handler::<WillRenameFiles, _, _>({
5421 let resolved_workspace_edit = resolved_workspace_edit.clone();
5422 let expected_edit = expected_edit.clone();
5423 move |params, _| {
5424 let resolved_workspace_edit = resolved_workspace_edit.clone();
5425 let expected_edit = expected_edit.clone();
5426 async move {
5427 assert_eq!(params.files.len(), 1);
5428 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5429 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5430 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5431 Ok(Some(expected_edit))
5432 }
5433 }
5434 })
5435 .next()
5436 .await
5437 .unwrap();
5438 let _ = response.await.unwrap();
5439 fake_server
5440 .handle_notification::<DidRenameFiles, _>(|params, _| {
5441 assert_eq!(params.files.len(), 1);
5442 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5443 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5444 })
5445 .next()
5446 .await
5447 .unwrap();
5448 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5449}
5450
5451#[gpui::test]
5452async fn test_rename(cx: &mut gpui::TestAppContext) {
5453 // hi
5454 init_test(cx);
5455
5456 let fs = FakeFs::new(cx.executor());
5457 fs.insert_tree(
5458 path!("/dir"),
5459 json!({
5460 "one.rs": "const ONE: usize = 1;",
5461 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5462 }),
5463 )
5464 .await;
5465
5466 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5467
5468 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5469 language_registry.add(rust_lang());
5470 let mut fake_servers = language_registry.register_fake_lsp(
5471 "Rust",
5472 FakeLspAdapter {
5473 capabilities: lsp::ServerCapabilities {
5474 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5475 prepare_provider: Some(true),
5476 work_done_progress_options: Default::default(),
5477 })),
5478 ..Default::default()
5479 },
5480 ..Default::default()
5481 },
5482 );
5483
5484 let (buffer, _handle) = project
5485 .update(cx, |project, cx| {
5486 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5487 })
5488 .await
5489 .unwrap();
5490
5491 let fake_server = fake_servers.next().await.unwrap();
5492
5493 let response = project.update(cx, |project, cx| {
5494 project.prepare_rename(buffer.clone(), 7, cx)
5495 });
5496 fake_server
5497 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5498 assert_eq!(
5499 params.text_document.uri.as_str(),
5500 uri!("file:///dir/one.rs")
5501 );
5502 assert_eq!(params.position, lsp::Position::new(0, 7));
5503 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5504 lsp::Position::new(0, 6),
5505 lsp::Position::new(0, 9),
5506 ))))
5507 })
5508 .next()
5509 .await
5510 .unwrap();
5511 let response = response.await.unwrap();
5512 let PrepareRenameResponse::Success(range) = response else {
5513 panic!("{:?}", response);
5514 };
5515 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5516 assert_eq!(range, 6..9);
5517
5518 let response = project.update(cx, |project, cx| {
5519 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5520 });
5521 fake_server
5522 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5523 assert_eq!(
5524 params.text_document_position.text_document.uri.as_str(),
5525 uri!("file:///dir/one.rs")
5526 );
5527 assert_eq!(
5528 params.text_document_position.position,
5529 lsp::Position::new(0, 7)
5530 );
5531 assert_eq!(params.new_name, "THREE");
5532 Ok(Some(lsp::WorkspaceEdit {
5533 changes: Some(
5534 [
5535 (
5536 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5537 vec![lsp::TextEdit::new(
5538 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5539 "THREE".to_string(),
5540 )],
5541 ),
5542 (
5543 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5544 vec![
5545 lsp::TextEdit::new(
5546 lsp::Range::new(
5547 lsp::Position::new(0, 24),
5548 lsp::Position::new(0, 27),
5549 ),
5550 "THREE".to_string(),
5551 ),
5552 lsp::TextEdit::new(
5553 lsp::Range::new(
5554 lsp::Position::new(0, 35),
5555 lsp::Position::new(0, 38),
5556 ),
5557 "THREE".to_string(),
5558 ),
5559 ],
5560 ),
5561 ]
5562 .into_iter()
5563 .collect(),
5564 ),
5565 ..Default::default()
5566 }))
5567 })
5568 .next()
5569 .await
5570 .unwrap();
5571 let mut transaction = response.await.unwrap().0;
5572 assert_eq!(transaction.len(), 2);
5573 assert_eq!(
5574 transaction
5575 .remove_entry(&buffer)
5576 .unwrap()
5577 .0
5578 .update(cx, |buffer, _| buffer.text()),
5579 "const THREE: usize = 1;"
5580 );
5581 assert_eq!(
5582 transaction
5583 .into_keys()
5584 .next()
5585 .unwrap()
5586 .update(cx, |buffer, _| buffer.text()),
5587 "const TWO: usize = one::THREE + one::THREE;"
5588 );
5589}
5590
5591#[gpui::test]
5592async fn test_search(cx: &mut gpui::TestAppContext) {
5593 init_test(cx);
5594
5595 let fs = FakeFs::new(cx.executor());
5596 fs.insert_tree(
5597 path!("/dir"),
5598 json!({
5599 "one.rs": "const ONE: usize = 1;",
5600 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5601 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5602 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5603 }),
5604 )
5605 .await;
5606 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5607 assert_eq!(
5608 search(
5609 &project,
5610 SearchQuery::text(
5611 "TWO",
5612 false,
5613 true,
5614 false,
5615 Default::default(),
5616 Default::default(),
5617 false,
5618 None
5619 )
5620 .unwrap(),
5621 cx
5622 )
5623 .await
5624 .unwrap(),
5625 HashMap::from_iter([
5626 (path!("dir/two.rs").to_string(), vec![6..9]),
5627 (path!("dir/three.rs").to_string(), vec![37..40])
5628 ])
5629 );
5630
5631 let buffer_4 = project
5632 .update(cx, |project, cx| {
5633 project.open_local_buffer(path!("/dir/four.rs"), cx)
5634 })
5635 .await
5636 .unwrap();
5637 buffer_4.update(cx, |buffer, cx| {
5638 let text = "two::TWO";
5639 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5640 });
5641
5642 assert_eq!(
5643 search(
5644 &project,
5645 SearchQuery::text(
5646 "TWO",
5647 false,
5648 true,
5649 false,
5650 Default::default(),
5651 Default::default(),
5652 false,
5653 None,
5654 )
5655 .unwrap(),
5656 cx
5657 )
5658 .await
5659 .unwrap(),
5660 HashMap::from_iter([
5661 (path!("dir/two.rs").to_string(), vec![6..9]),
5662 (path!("dir/three.rs").to_string(), vec![37..40]),
5663 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5664 ])
5665 );
5666}
5667
5668#[gpui::test]
5669async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5670 init_test(cx);
5671
5672 let search_query = "file";
5673
5674 let fs = FakeFs::new(cx.executor());
5675 fs.insert_tree(
5676 path!("/dir"),
5677 json!({
5678 "one.rs": r#"// Rust file one"#,
5679 "one.ts": r#"// TypeScript file one"#,
5680 "two.rs": r#"// Rust file two"#,
5681 "two.ts": r#"// TypeScript file two"#,
5682 }),
5683 )
5684 .await;
5685 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5686
5687 assert!(
5688 search(
5689 &project,
5690 SearchQuery::text(
5691 search_query,
5692 false,
5693 true,
5694 false,
5695 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5696 Default::default(),
5697 false,
5698 None
5699 )
5700 .unwrap(),
5701 cx
5702 )
5703 .await
5704 .unwrap()
5705 .is_empty(),
5706 "If no inclusions match, no files should be returned"
5707 );
5708
5709 assert_eq!(
5710 search(
5711 &project,
5712 SearchQuery::text(
5713 search_query,
5714 false,
5715 true,
5716 false,
5717 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5718 Default::default(),
5719 false,
5720 None
5721 )
5722 .unwrap(),
5723 cx
5724 )
5725 .await
5726 .unwrap(),
5727 HashMap::from_iter([
5728 (path!("dir/one.rs").to_string(), vec![8..12]),
5729 (path!("dir/two.rs").to_string(), vec![8..12]),
5730 ]),
5731 "Rust only search should give only Rust files"
5732 );
5733
5734 assert_eq!(
5735 search(
5736 &project,
5737 SearchQuery::text(
5738 search_query,
5739 false,
5740 true,
5741 false,
5742 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5743 .unwrap(),
5744 Default::default(),
5745 false,
5746 None,
5747 )
5748 .unwrap(),
5749 cx
5750 )
5751 .await
5752 .unwrap(),
5753 HashMap::from_iter([
5754 (path!("dir/one.ts").to_string(), vec![14..18]),
5755 (path!("dir/two.ts").to_string(), vec![14..18]),
5756 ]),
5757 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5758 );
5759
5760 assert_eq!(
5761 search(
5762 &project,
5763 SearchQuery::text(
5764 search_query,
5765 false,
5766 true,
5767 false,
5768 PathMatcher::new(
5769 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5770 PathStyle::local()
5771 )
5772 .unwrap(),
5773 Default::default(),
5774 false,
5775 None,
5776 )
5777 .unwrap(),
5778 cx
5779 )
5780 .await
5781 .unwrap(),
5782 HashMap::from_iter([
5783 (path!("dir/two.ts").to_string(), vec![14..18]),
5784 (path!("dir/one.rs").to_string(), vec![8..12]),
5785 (path!("dir/one.ts").to_string(), vec![14..18]),
5786 (path!("dir/two.rs").to_string(), vec![8..12]),
5787 ]),
5788 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5789 );
5790}
5791
5792#[gpui::test]
5793async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5794 init_test(cx);
5795
5796 let search_query = "file";
5797
5798 let fs = FakeFs::new(cx.executor());
5799 fs.insert_tree(
5800 path!("/dir"),
5801 json!({
5802 "one.rs": r#"// Rust file one"#,
5803 "one.ts": r#"// TypeScript file one"#,
5804 "two.rs": r#"// Rust file two"#,
5805 "two.ts": r#"// TypeScript file two"#,
5806 }),
5807 )
5808 .await;
5809 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5810
5811 assert_eq!(
5812 search(
5813 &project,
5814 SearchQuery::text(
5815 search_query,
5816 false,
5817 true,
5818 false,
5819 Default::default(),
5820 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5821 false,
5822 None,
5823 )
5824 .unwrap(),
5825 cx
5826 )
5827 .await
5828 .unwrap(),
5829 HashMap::from_iter([
5830 (path!("dir/one.rs").to_string(), vec![8..12]),
5831 (path!("dir/one.ts").to_string(), vec![14..18]),
5832 (path!("dir/two.rs").to_string(), vec![8..12]),
5833 (path!("dir/two.ts").to_string(), vec![14..18]),
5834 ]),
5835 "If no exclusions match, all files should be returned"
5836 );
5837
5838 assert_eq!(
5839 search(
5840 &project,
5841 SearchQuery::text(
5842 search_query,
5843 false,
5844 true,
5845 false,
5846 Default::default(),
5847 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5848 false,
5849 None,
5850 )
5851 .unwrap(),
5852 cx
5853 )
5854 .await
5855 .unwrap(),
5856 HashMap::from_iter([
5857 (path!("dir/one.ts").to_string(), vec![14..18]),
5858 (path!("dir/two.ts").to_string(), vec![14..18]),
5859 ]),
5860 "Rust exclusion search should give only TypeScript files"
5861 );
5862
5863 assert_eq!(
5864 search(
5865 &project,
5866 SearchQuery::text(
5867 search_query,
5868 false,
5869 true,
5870 false,
5871 Default::default(),
5872 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5873 .unwrap(),
5874 false,
5875 None,
5876 )
5877 .unwrap(),
5878 cx
5879 )
5880 .await
5881 .unwrap(),
5882 HashMap::from_iter([
5883 (path!("dir/one.rs").to_string(), vec![8..12]),
5884 (path!("dir/two.rs").to_string(), vec![8..12]),
5885 ]),
5886 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5887 );
5888
5889 assert!(
5890 search(
5891 &project,
5892 SearchQuery::text(
5893 search_query,
5894 false,
5895 true,
5896 false,
5897 Default::default(),
5898 PathMatcher::new(
5899 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5900 PathStyle::local(),
5901 )
5902 .unwrap(),
5903 false,
5904 None,
5905 )
5906 .unwrap(),
5907 cx
5908 )
5909 .await
5910 .unwrap()
5911 .is_empty(),
5912 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5913 );
5914}
5915
5916#[gpui::test]
5917async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5918 init_test(cx);
5919
5920 let search_query = "file";
5921
5922 let fs = FakeFs::new(cx.executor());
5923 fs.insert_tree(
5924 path!("/dir"),
5925 json!({
5926 "one.rs": r#"// Rust file one"#,
5927 "one.ts": r#"// TypeScript file one"#,
5928 "two.rs": r#"// Rust file two"#,
5929 "two.ts": r#"// TypeScript file two"#,
5930 }),
5931 )
5932 .await;
5933
5934 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5935 let path_style = PathStyle::local();
5936 let _buffer = project.update(cx, |project, cx| {
5937 project.create_local_buffer("file", None, false, cx)
5938 });
5939
5940 assert_eq!(
5941 search(
5942 &project,
5943 SearchQuery::text(
5944 search_query,
5945 false,
5946 true,
5947 false,
5948 Default::default(),
5949 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5950 false,
5951 None,
5952 )
5953 .unwrap(),
5954 cx
5955 )
5956 .await
5957 .unwrap(),
5958 HashMap::from_iter([
5959 (path!("dir/one.rs").to_string(), vec![8..12]),
5960 (path!("dir/one.ts").to_string(), vec![14..18]),
5961 (path!("dir/two.rs").to_string(), vec![8..12]),
5962 (path!("dir/two.ts").to_string(), vec![14..18]),
5963 ]),
5964 "If no exclusions match, all files should be returned"
5965 );
5966
5967 assert_eq!(
5968 search(
5969 &project,
5970 SearchQuery::text(
5971 search_query,
5972 false,
5973 true,
5974 false,
5975 Default::default(),
5976 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5977 false,
5978 None,
5979 )
5980 .unwrap(),
5981 cx
5982 )
5983 .await
5984 .unwrap(),
5985 HashMap::from_iter([
5986 (path!("dir/one.ts").to_string(), vec![14..18]),
5987 (path!("dir/two.ts").to_string(), vec![14..18]),
5988 ]),
5989 "Rust exclusion search should give only TypeScript files"
5990 );
5991
5992 assert_eq!(
5993 search(
5994 &project,
5995 SearchQuery::text(
5996 search_query,
5997 false,
5998 true,
5999 false,
6000 Default::default(),
6001 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6002 false,
6003 None,
6004 )
6005 .unwrap(),
6006 cx
6007 )
6008 .await
6009 .unwrap(),
6010 HashMap::from_iter([
6011 (path!("dir/one.rs").to_string(), vec![8..12]),
6012 (path!("dir/two.rs").to_string(), vec![8..12]),
6013 ]),
6014 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6015 );
6016
6017 assert!(
6018 search(
6019 &project,
6020 SearchQuery::text(
6021 search_query,
6022 false,
6023 true,
6024 false,
6025 Default::default(),
6026 PathMatcher::new(
6027 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6028 PathStyle::local(),
6029 )
6030 .unwrap(),
6031 false,
6032 None,
6033 )
6034 .unwrap(),
6035 cx
6036 )
6037 .await
6038 .unwrap()
6039 .is_empty(),
6040 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6041 );
6042}
6043
6044#[gpui::test]
6045async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6046 init_test(cx);
6047
6048 let search_query = "file";
6049
6050 let fs = FakeFs::new(cx.executor());
6051 fs.insert_tree(
6052 path!("/dir"),
6053 json!({
6054 "one.rs": r#"// Rust file one"#,
6055 "one.ts": r#"// TypeScript file one"#,
6056 "two.rs": r#"// Rust file two"#,
6057 "two.ts": r#"// TypeScript file two"#,
6058 }),
6059 )
6060 .await;
6061 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6062 assert!(
6063 search(
6064 &project,
6065 SearchQuery::text(
6066 search_query,
6067 false,
6068 true,
6069 false,
6070 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6071 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6072 false,
6073 None,
6074 )
6075 .unwrap(),
6076 cx
6077 )
6078 .await
6079 .unwrap()
6080 .is_empty(),
6081 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6082 );
6083
6084 assert!(
6085 search(
6086 &project,
6087 SearchQuery::text(
6088 search_query,
6089 false,
6090 true,
6091 false,
6092 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6093 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6094 false,
6095 None,
6096 )
6097 .unwrap(),
6098 cx
6099 )
6100 .await
6101 .unwrap()
6102 .is_empty(),
6103 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6104 );
6105
6106 assert!(
6107 search(
6108 &project,
6109 SearchQuery::text(
6110 search_query,
6111 false,
6112 true,
6113 false,
6114 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6115 .unwrap(),
6116 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6117 .unwrap(),
6118 false,
6119 None,
6120 )
6121 .unwrap(),
6122 cx
6123 )
6124 .await
6125 .unwrap()
6126 .is_empty(),
6127 "Non-matching inclusions and exclusions should not change that."
6128 );
6129
6130 assert_eq!(
6131 search(
6132 &project,
6133 SearchQuery::text(
6134 search_query,
6135 false,
6136 true,
6137 false,
6138 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6139 .unwrap(),
6140 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6141 .unwrap(),
6142 false,
6143 None,
6144 )
6145 .unwrap(),
6146 cx
6147 )
6148 .await
6149 .unwrap(),
6150 HashMap::from_iter([
6151 (path!("dir/one.ts").to_string(), vec![14..18]),
6152 (path!("dir/two.ts").to_string(), vec![14..18]),
6153 ]),
6154 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6155 );
6156}
6157
6158#[gpui::test]
6159async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6160 init_test(cx);
6161
6162 let fs = FakeFs::new(cx.executor());
6163 fs.insert_tree(
6164 path!("/worktree-a"),
6165 json!({
6166 "haystack.rs": r#"// NEEDLE"#,
6167 "haystack.ts": r#"// NEEDLE"#,
6168 }),
6169 )
6170 .await;
6171 fs.insert_tree(
6172 path!("/worktree-b"),
6173 json!({
6174 "haystack.rs": r#"// NEEDLE"#,
6175 "haystack.ts": r#"// NEEDLE"#,
6176 }),
6177 )
6178 .await;
6179
6180 let path_style = PathStyle::local();
6181 let project = Project::test(
6182 fs.clone(),
6183 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6184 cx,
6185 )
6186 .await;
6187
6188 assert_eq!(
6189 search(
6190 &project,
6191 SearchQuery::text(
6192 "NEEDLE",
6193 false,
6194 true,
6195 false,
6196 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6197 Default::default(),
6198 true,
6199 None,
6200 )
6201 .unwrap(),
6202 cx
6203 )
6204 .await
6205 .unwrap(),
6206 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6207 "should only return results from included worktree"
6208 );
6209 assert_eq!(
6210 search(
6211 &project,
6212 SearchQuery::text(
6213 "NEEDLE",
6214 false,
6215 true,
6216 false,
6217 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6218 Default::default(),
6219 true,
6220 None,
6221 )
6222 .unwrap(),
6223 cx
6224 )
6225 .await
6226 .unwrap(),
6227 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6228 "should only return results from included worktree"
6229 );
6230
6231 assert_eq!(
6232 search(
6233 &project,
6234 SearchQuery::text(
6235 "NEEDLE",
6236 false,
6237 true,
6238 false,
6239 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6240 Default::default(),
6241 false,
6242 None,
6243 )
6244 .unwrap(),
6245 cx
6246 )
6247 .await
6248 .unwrap(),
6249 HashMap::from_iter([
6250 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6251 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6252 ]),
6253 "should return results from both worktrees"
6254 );
6255}
6256
6257#[gpui::test]
6258async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6259 init_test(cx);
6260
6261 let fs = FakeFs::new(cx.background_executor.clone());
6262 fs.insert_tree(
6263 path!("/dir"),
6264 json!({
6265 ".git": {},
6266 ".gitignore": "**/target\n/node_modules\n",
6267 "target": {
6268 "index.txt": "index_key:index_value"
6269 },
6270 "node_modules": {
6271 "eslint": {
6272 "index.ts": "const eslint_key = 'eslint value'",
6273 "package.json": r#"{ "some_key": "some value" }"#,
6274 },
6275 "prettier": {
6276 "index.ts": "const prettier_key = 'prettier value'",
6277 "package.json": r#"{ "other_key": "other value" }"#,
6278 },
6279 },
6280 "package.json": r#"{ "main_key": "main value" }"#,
6281 }),
6282 )
6283 .await;
6284 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6285
6286 let query = "key";
6287 assert_eq!(
6288 search(
6289 &project,
6290 SearchQuery::text(
6291 query,
6292 false,
6293 false,
6294 false,
6295 Default::default(),
6296 Default::default(),
6297 false,
6298 None,
6299 )
6300 .unwrap(),
6301 cx
6302 )
6303 .await
6304 .unwrap(),
6305 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6306 "Only one non-ignored file should have the query"
6307 );
6308
6309 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6310 let path_style = PathStyle::local();
6311 assert_eq!(
6312 search(
6313 &project,
6314 SearchQuery::text(
6315 query,
6316 false,
6317 false,
6318 true,
6319 Default::default(),
6320 Default::default(),
6321 false,
6322 None,
6323 )
6324 .unwrap(),
6325 cx
6326 )
6327 .await
6328 .unwrap(),
6329 HashMap::from_iter([
6330 (path!("dir/package.json").to_string(), vec![8..11]),
6331 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6332 (
6333 path!("dir/node_modules/prettier/package.json").to_string(),
6334 vec![9..12]
6335 ),
6336 (
6337 path!("dir/node_modules/prettier/index.ts").to_string(),
6338 vec![15..18]
6339 ),
6340 (
6341 path!("dir/node_modules/eslint/index.ts").to_string(),
6342 vec![13..16]
6343 ),
6344 (
6345 path!("dir/node_modules/eslint/package.json").to_string(),
6346 vec![8..11]
6347 ),
6348 ]),
6349 "Unrestricted search with ignored directories should find every file with the query"
6350 );
6351
6352 let files_to_include =
6353 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6354 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6355 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6356 assert_eq!(
6357 search(
6358 &project,
6359 SearchQuery::text(
6360 query,
6361 false,
6362 false,
6363 true,
6364 files_to_include,
6365 files_to_exclude,
6366 false,
6367 None,
6368 )
6369 .unwrap(),
6370 cx
6371 )
6372 .await
6373 .unwrap(),
6374 HashMap::from_iter([(
6375 path!("dir/node_modules/prettier/package.json").to_string(),
6376 vec![9..12]
6377 )]),
6378 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6379 );
6380}
6381
6382#[gpui::test]
6383async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6384 init_test(cx);
6385
6386 let fs = FakeFs::new(cx.executor());
6387 fs.insert_tree(
6388 path!("/dir"),
6389 json!({
6390 "one.rs": "// ПРИВЕТ? привет!",
6391 "two.rs": "// ПРИВЕТ.",
6392 "three.rs": "// привет",
6393 }),
6394 )
6395 .await;
6396 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6397 let unicode_case_sensitive_query = SearchQuery::text(
6398 "привет",
6399 false,
6400 true,
6401 false,
6402 Default::default(),
6403 Default::default(),
6404 false,
6405 None,
6406 );
6407 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6408 assert_eq!(
6409 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6410 .await
6411 .unwrap(),
6412 HashMap::from_iter([
6413 (path!("dir/one.rs").to_string(), vec![17..29]),
6414 (path!("dir/three.rs").to_string(), vec![3..15]),
6415 ])
6416 );
6417
6418 let unicode_case_insensitive_query = SearchQuery::text(
6419 "привет",
6420 false,
6421 false,
6422 false,
6423 Default::default(),
6424 Default::default(),
6425 false,
6426 None,
6427 );
6428 assert_matches!(
6429 unicode_case_insensitive_query,
6430 Ok(SearchQuery::Regex { .. })
6431 );
6432 assert_eq!(
6433 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6434 .await
6435 .unwrap(),
6436 HashMap::from_iter([
6437 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6438 (path!("dir/two.rs").to_string(), vec![3..15]),
6439 (path!("dir/three.rs").to_string(), vec![3..15]),
6440 ])
6441 );
6442
6443 assert_eq!(
6444 search(
6445 &project,
6446 SearchQuery::text(
6447 "привет.",
6448 false,
6449 false,
6450 false,
6451 Default::default(),
6452 Default::default(),
6453 false,
6454 None,
6455 )
6456 .unwrap(),
6457 cx
6458 )
6459 .await
6460 .unwrap(),
6461 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6462 );
6463}
6464
6465#[gpui::test]
6466async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6467 init_test(cx);
6468
6469 let fs = FakeFs::new(cx.executor());
6470 fs.insert_tree(
6471 "/one/two",
6472 json!({
6473 "three": {
6474 "a.txt": "",
6475 "four": {}
6476 },
6477 "c.rs": ""
6478 }),
6479 )
6480 .await;
6481
6482 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6483 project
6484 .update(cx, |project, cx| {
6485 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6486 project.create_entry((id, rel_path("b..")), true, cx)
6487 })
6488 .await
6489 .unwrap()
6490 .into_included()
6491 .unwrap();
6492
6493 assert_eq!(
6494 fs.paths(true),
6495 vec![
6496 PathBuf::from(path!("/")),
6497 PathBuf::from(path!("/one")),
6498 PathBuf::from(path!("/one/two")),
6499 PathBuf::from(path!("/one/two/c.rs")),
6500 PathBuf::from(path!("/one/two/three")),
6501 PathBuf::from(path!("/one/two/three/a.txt")),
6502 PathBuf::from(path!("/one/two/three/b..")),
6503 PathBuf::from(path!("/one/two/three/four")),
6504 ]
6505 );
6506}
6507
6508#[gpui::test]
6509async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6510 init_test(cx);
6511
6512 let fs = FakeFs::new(cx.executor());
6513 fs.insert_tree(
6514 path!("/dir"),
6515 json!({
6516 "a.tsx": "a",
6517 }),
6518 )
6519 .await;
6520
6521 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6522
6523 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6524 language_registry.add(tsx_lang());
6525 let language_server_names = [
6526 "TypeScriptServer",
6527 "TailwindServer",
6528 "ESLintServer",
6529 "NoHoverCapabilitiesServer",
6530 ];
6531 let mut language_servers = [
6532 language_registry.register_fake_lsp(
6533 "tsx",
6534 FakeLspAdapter {
6535 name: language_server_names[0],
6536 capabilities: lsp::ServerCapabilities {
6537 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6538 ..lsp::ServerCapabilities::default()
6539 },
6540 ..FakeLspAdapter::default()
6541 },
6542 ),
6543 language_registry.register_fake_lsp(
6544 "tsx",
6545 FakeLspAdapter {
6546 name: language_server_names[1],
6547 capabilities: lsp::ServerCapabilities {
6548 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6549 ..lsp::ServerCapabilities::default()
6550 },
6551 ..FakeLspAdapter::default()
6552 },
6553 ),
6554 language_registry.register_fake_lsp(
6555 "tsx",
6556 FakeLspAdapter {
6557 name: language_server_names[2],
6558 capabilities: lsp::ServerCapabilities {
6559 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6560 ..lsp::ServerCapabilities::default()
6561 },
6562 ..FakeLspAdapter::default()
6563 },
6564 ),
6565 language_registry.register_fake_lsp(
6566 "tsx",
6567 FakeLspAdapter {
6568 name: language_server_names[3],
6569 capabilities: lsp::ServerCapabilities {
6570 hover_provider: None,
6571 ..lsp::ServerCapabilities::default()
6572 },
6573 ..FakeLspAdapter::default()
6574 },
6575 ),
6576 ];
6577
6578 let (buffer, _handle) = project
6579 .update(cx, |p, cx| {
6580 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6581 })
6582 .await
6583 .unwrap();
6584 cx.executor().run_until_parked();
6585
6586 let mut servers_with_hover_requests = HashMap::default();
6587 for i in 0..language_server_names.len() {
6588 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6589 panic!(
6590 "Failed to get language server #{i} with name {}",
6591 &language_server_names[i]
6592 )
6593 });
6594 let new_server_name = new_server.server.name();
6595 assert!(
6596 !servers_with_hover_requests.contains_key(&new_server_name),
6597 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6598 );
6599 match new_server_name.as_ref() {
6600 "TailwindServer" | "TypeScriptServer" => {
6601 servers_with_hover_requests.insert(
6602 new_server_name.clone(),
6603 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6604 move |_, _| {
6605 let name = new_server_name.clone();
6606 async move {
6607 Ok(Some(lsp::Hover {
6608 contents: lsp::HoverContents::Scalar(
6609 lsp::MarkedString::String(format!("{name} hover")),
6610 ),
6611 range: None,
6612 }))
6613 }
6614 },
6615 ),
6616 );
6617 }
6618 "ESLintServer" => {
6619 servers_with_hover_requests.insert(
6620 new_server_name,
6621 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6622 |_, _| async move { Ok(None) },
6623 ),
6624 );
6625 }
6626 "NoHoverCapabilitiesServer" => {
6627 let _never_handled = new_server
6628 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6629 panic!(
6630 "Should not call for hovers server with no corresponding capabilities"
6631 )
6632 });
6633 }
6634 unexpected => panic!("Unexpected server name: {unexpected}"),
6635 }
6636 }
6637
6638 let hover_task = project.update(cx, |project, cx| {
6639 project.hover(&buffer, Point::new(0, 0), cx)
6640 });
6641 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6642 |mut hover_request| async move {
6643 hover_request
6644 .next()
6645 .await
6646 .expect("All hover requests should have been triggered")
6647 },
6648 ))
6649 .await;
6650 assert_eq!(
6651 vec!["TailwindServer hover", "TypeScriptServer hover"],
6652 hover_task
6653 .await
6654 .into_iter()
6655 .flatten()
6656 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6657 .sorted()
6658 .collect::<Vec<_>>(),
6659 "Should receive hover responses from all related servers with hover capabilities"
6660 );
6661}
6662
6663#[gpui::test]
6664async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6665 init_test(cx);
6666
6667 let fs = FakeFs::new(cx.executor());
6668 fs.insert_tree(
6669 path!("/dir"),
6670 json!({
6671 "a.ts": "a",
6672 }),
6673 )
6674 .await;
6675
6676 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6677
6678 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6679 language_registry.add(typescript_lang());
6680 let mut fake_language_servers = language_registry.register_fake_lsp(
6681 "TypeScript",
6682 FakeLspAdapter {
6683 capabilities: lsp::ServerCapabilities {
6684 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6685 ..lsp::ServerCapabilities::default()
6686 },
6687 ..FakeLspAdapter::default()
6688 },
6689 );
6690
6691 let (buffer, _handle) = project
6692 .update(cx, |p, cx| {
6693 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6694 })
6695 .await
6696 .unwrap();
6697 cx.executor().run_until_parked();
6698
6699 let fake_server = fake_language_servers
6700 .next()
6701 .await
6702 .expect("failed to get the language server");
6703
6704 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6705 move |_, _| async move {
6706 Ok(Some(lsp::Hover {
6707 contents: lsp::HoverContents::Array(vec![
6708 lsp::MarkedString::String("".to_string()),
6709 lsp::MarkedString::String(" ".to_string()),
6710 lsp::MarkedString::String("\n\n\n".to_string()),
6711 ]),
6712 range: None,
6713 }))
6714 },
6715 );
6716
6717 let hover_task = project.update(cx, |project, cx| {
6718 project.hover(&buffer, Point::new(0, 0), cx)
6719 });
6720 let () = request_handled
6721 .next()
6722 .await
6723 .expect("All hover requests should have been triggered");
6724 assert_eq!(
6725 Vec::<String>::new(),
6726 hover_task
6727 .await
6728 .into_iter()
6729 .flatten()
6730 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6731 .sorted()
6732 .collect::<Vec<_>>(),
6733 "Empty hover parts should be ignored"
6734 );
6735}
6736
6737#[gpui::test]
6738async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6739 init_test(cx);
6740
6741 let fs = FakeFs::new(cx.executor());
6742 fs.insert_tree(
6743 path!("/dir"),
6744 json!({
6745 "a.ts": "a",
6746 }),
6747 )
6748 .await;
6749
6750 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6751
6752 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6753 language_registry.add(typescript_lang());
6754 let mut fake_language_servers = language_registry.register_fake_lsp(
6755 "TypeScript",
6756 FakeLspAdapter {
6757 capabilities: lsp::ServerCapabilities {
6758 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6759 ..lsp::ServerCapabilities::default()
6760 },
6761 ..FakeLspAdapter::default()
6762 },
6763 );
6764
6765 let (buffer, _handle) = project
6766 .update(cx, |p, cx| {
6767 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6768 })
6769 .await
6770 .unwrap();
6771 cx.executor().run_until_parked();
6772
6773 let fake_server = fake_language_servers
6774 .next()
6775 .await
6776 .expect("failed to get the language server");
6777
6778 let mut request_handled = fake_server
6779 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6780 Ok(Some(vec![
6781 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6782 title: "organize imports".to_string(),
6783 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6784 ..lsp::CodeAction::default()
6785 }),
6786 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6787 title: "fix code".to_string(),
6788 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6789 ..lsp::CodeAction::default()
6790 }),
6791 ]))
6792 });
6793
6794 let code_actions_task = project.update(cx, |project, cx| {
6795 project.code_actions(
6796 &buffer,
6797 0..buffer.read(cx).len(),
6798 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6799 cx,
6800 )
6801 });
6802
6803 let () = request_handled
6804 .next()
6805 .await
6806 .expect("The code action request should have been triggered");
6807
6808 let code_actions = code_actions_task.await.unwrap().unwrap();
6809 assert_eq!(code_actions.len(), 1);
6810 assert_eq!(
6811 code_actions[0].lsp_action.action_kind(),
6812 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6813 );
6814}
6815
6816#[gpui::test]
6817async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6818 init_test(cx);
6819
6820 let fs = FakeFs::new(cx.executor());
6821 fs.insert_tree(
6822 path!("/dir"),
6823 json!({
6824 "a.tsx": "a",
6825 }),
6826 )
6827 .await;
6828
6829 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6830
6831 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6832 language_registry.add(tsx_lang());
6833 let language_server_names = [
6834 "TypeScriptServer",
6835 "TailwindServer",
6836 "ESLintServer",
6837 "NoActionsCapabilitiesServer",
6838 ];
6839
6840 let mut language_server_rxs = [
6841 language_registry.register_fake_lsp(
6842 "tsx",
6843 FakeLspAdapter {
6844 name: language_server_names[0],
6845 capabilities: lsp::ServerCapabilities {
6846 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6847 ..lsp::ServerCapabilities::default()
6848 },
6849 ..FakeLspAdapter::default()
6850 },
6851 ),
6852 language_registry.register_fake_lsp(
6853 "tsx",
6854 FakeLspAdapter {
6855 name: language_server_names[1],
6856 capabilities: lsp::ServerCapabilities {
6857 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6858 ..lsp::ServerCapabilities::default()
6859 },
6860 ..FakeLspAdapter::default()
6861 },
6862 ),
6863 language_registry.register_fake_lsp(
6864 "tsx",
6865 FakeLspAdapter {
6866 name: language_server_names[2],
6867 capabilities: lsp::ServerCapabilities {
6868 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6869 ..lsp::ServerCapabilities::default()
6870 },
6871 ..FakeLspAdapter::default()
6872 },
6873 ),
6874 language_registry.register_fake_lsp(
6875 "tsx",
6876 FakeLspAdapter {
6877 name: language_server_names[3],
6878 capabilities: lsp::ServerCapabilities {
6879 code_action_provider: None,
6880 ..lsp::ServerCapabilities::default()
6881 },
6882 ..FakeLspAdapter::default()
6883 },
6884 ),
6885 ];
6886
6887 let (buffer, _handle) = project
6888 .update(cx, |p, cx| {
6889 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6890 })
6891 .await
6892 .unwrap();
6893 cx.executor().run_until_parked();
6894
6895 let mut servers_with_actions_requests = HashMap::default();
6896 for i in 0..language_server_names.len() {
6897 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6898 panic!(
6899 "Failed to get language server #{i} with name {}",
6900 &language_server_names[i]
6901 )
6902 });
6903 let new_server_name = new_server.server.name();
6904
6905 assert!(
6906 !servers_with_actions_requests.contains_key(&new_server_name),
6907 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6908 );
6909 match new_server_name.0.as_ref() {
6910 "TailwindServer" | "TypeScriptServer" => {
6911 servers_with_actions_requests.insert(
6912 new_server_name.clone(),
6913 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6914 move |_, _| {
6915 let name = new_server_name.clone();
6916 async move {
6917 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6918 lsp::CodeAction {
6919 title: format!("{name} code action"),
6920 ..lsp::CodeAction::default()
6921 },
6922 )]))
6923 }
6924 },
6925 ),
6926 );
6927 }
6928 "ESLintServer" => {
6929 servers_with_actions_requests.insert(
6930 new_server_name,
6931 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6932 |_, _| async move { Ok(None) },
6933 ),
6934 );
6935 }
6936 "NoActionsCapabilitiesServer" => {
6937 let _never_handled = new_server
6938 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6939 panic!(
6940 "Should not call for code actions server with no corresponding capabilities"
6941 )
6942 });
6943 }
6944 unexpected => panic!("Unexpected server name: {unexpected}"),
6945 }
6946 }
6947
6948 let code_actions_task = project.update(cx, |project, cx| {
6949 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6950 });
6951
6952 // cx.run_until_parked();
6953 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6954 |mut code_actions_request| async move {
6955 code_actions_request
6956 .next()
6957 .await
6958 .expect("All code actions requests should have been triggered")
6959 },
6960 ))
6961 .await;
6962 assert_eq!(
6963 vec!["TailwindServer code action", "TypeScriptServer code action"],
6964 code_actions_task
6965 .await
6966 .unwrap()
6967 .unwrap()
6968 .into_iter()
6969 .map(|code_action| code_action.lsp_action.title().to_owned())
6970 .sorted()
6971 .collect::<Vec<_>>(),
6972 "Should receive code actions responses from all related servers with hover capabilities"
6973 );
6974}
6975
6976#[gpui::test]
6977async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6978 init_test(cx);
6979
6980 let fs = FakeFs::new(cx.executor());
6981 fs.insert_tree(
6982 "/dir",
6983 json!({
6984 "a.rs": "let a = 1;",
6985 "b.rs": "let b = 2;",
6986 "c.rs": "let c = 2;",
6987 }),
6988 )
6989 .await;
6990
6991 let project = Project::test(
6992 fs,
6993 [
6994 "/dir/a.rs".as_ref(),
6995 "/dir/b.rs".as_ref(),
6996 "/dir/c.rs".as_ref(),
6997 ],
6998 cx,
6999 )
7000 .await;
7001
7002 // check the initial state and get the worktrees
7003 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7004 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7005 assert_eq!(worktrees.len(), 3);
7006
7007 let worktree_a = worktrees[0].read(cx);
7008 let worktree_b = worktrees[1].read(cx);
7009 let worktree_c = worktrees[2].read(cx);
7010
7011 // check they start in the right order
7012 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7013 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7014 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7015
7016 (
7017 worktrees[0].clone(),
7018 worktrees[1].clone(),
7019 worktrees[2].clone(),
7020 )
7021 });
7022
7023 // move first worktree to after the second
7024 // [a, b, c] -> [b, a, c]
7025 project
7026 .update(cx, |project, cx| {
7027 let first = worktree_a.read(cx);
7028 let second = worktree_b.read(cx);
7029 project.move_worktree(first.id(), second.id(), cx)
7030 })
7031 .expect("moving first after second");
7032
7033 // check the state after moving
7034 project.update(cx, |project, cx| {
7035 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7036 assert_eq!(worktrees.len(), 3);
7037
7038 let first = worktrees[0].read(cx);
7039 let second = worktrees[1].read(cx);
7040 let third = worktrees[2].read(cx);
7041
7042 // check they are now in the right order
7043 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7044 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7045 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7046 });
7047
7048 // move the second worktree to before the first
7049 // [b, a, c] -> [a, b, c]
7050 project
7051 .update(cx, |project, cx| {
7052 let second = worktree_a.read(cx);
7053 let first = worktree_b.read(cx);
7054 project.move_worktree(first.id(), second.id(), cx)
7055 })
7056 .expect("moving second before first");
7057
7058 // check the state after moving
7059 project.update(cx, |project, cx| {
7060 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7061 assert_eq!(worktrees.len(), 3);
7062
7063 let first = worktrees[0].read(cx);
7064 let second = worktrees[1].read(cx);
7065 let third = worktrees[2].read(cx);
7066
7067 // check they are now in the right order
7068 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7069 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7070 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7071 });
7072
7073 // move the second worktree to after the third
7074 // [a, b, c] -> [a, c, b]
7075 project
7076 .update(cx, |project, cx| {
7077 let second = worktree_b.read(cx);
7078 let third = worktree_c.read(cx);
7079 project.move_worktree(second.id(), third.id(), cx)
7080 })
7081 .expect("moving second after third");
7082
7083 // check the state after moving
7084 project.update(cx, |project, cx| {
7085 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7086 assert_eq!(worktrees.len(), 3);
7087
7088 let first = worktrees[0].read(cx);
7089 let second = worktrees[1].read(cx);
7090 let third = worktrees[2].read(cx);
7091
7092 // check they are now in the right order
7093 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7094 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7095 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7096 });
7097
7098 // move the third worktree to before the second
7099 // [a, c, b] -> [a, b, c]
7100 project
7101 .update(cx, |project, cx| {
7102 let third = worktree_c.read(cx);
7103 let second = worktree_b.read(cx);
7104 project.move_worktree(third.id(), second.id(), cx)
7105 })
7106 .expect("moving third before second");
7107
7108 // check the state after moving
7109 project.update(cx, |project, cx| {
7110 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7111 assert_eq!(worktrees.len(), 3);
7112
7113 let first = worktrees[0].read(cx);
7114 let second = worktrees[1].read(cx);
7115 let third = worktrees[2].read(cx);
7116
7117 // check they are now in the right order
7118 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7119 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7120 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7121 });
7122
7123 // move the first worktree to after the third
7124 // [a, b, c] -> [b, c, a]
7125 project
7126 .update(cx, |project, cx| {
7127 let first = worktree_a.read(cx);
7128 let third = worktree_c.read(cx);
7129 project.move_worktree(first.id(), third.id(), cx)
7130 })
7131 .expect("moving first after third");
7132
7133 // check the state after moving
7134 project.update(cx, |project, cx| {
7135 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7136 assert_eq!(worktrees.len(), 3);
7137
7138 let first = worktrees[0].read(cx);
7139 let second = worktrees[1].read(cx);
7140 let third = worktrees[2].read(cx);
7141
7142 // check they are now in the right order
7143 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7144 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7145 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7146 });
7147
7148 // move the third worktree to before the first
7149 // [b, c, a] -> [a, b, c]
7150 project
7151 .update(cx, |project, cx| {
7152 let third = worktree_a.read(cx);
7153 let first = worktree_b.read(cx);
7154 project.move_worktree(third.id(), first.id(), cx)
7155 })
7156 .expect("moving third before first");
7157
7158 // check the state after moving
7159 project.update(cx, |project, cx| {
7160 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7161 assert_eq!(worktrees.len(), 3);
7162
7163 let first = worktrees[0].read(cx);
7164 let second = worktrees[1].read(cx);
7165 let third = worktrees[2].read(cx);
7166
7167 // check they are now in the right order
7168 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7169 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7170 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7171 });
7172}
7173
7174#[gpui::test]
7175async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7176 init_test(cx);
7177
7178 let staged_contents = r#"
7179 fn main() {
7180 println!("hello world");
7181 }
7182 "#
7183 .unindent();
7184 let file_contents = r#"
7185 // print goodbye
7186 fn main() {
7187 println!("goodbye world");
7188 }
7189 "#
7190 .unindent();
7191
7192 let fs = FakeFs::new(cx.background_executor.clone());
7193 fs.insert_tree(
7194 "/dir",
7195 json!({
7196 ".git": {},
7197 "src": {
7198 "main.rs": file_contents,
7199 }
7200 }),
7201 )
7202 .await;
7203
7204 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7205
7206 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7207
7208 let buffer = project
7209 .update(cx, |project, cx| {
7210 project.open_local_buffer("/dir/src/main.rs", cx)
7211 })
7212 .await
7213 .unwrap();
7214 let unstaged_diff = project
7215 .update(cx, |project, cx| {
7216 project.open_unstaged_diff(buffer.clone(), cx)
7217 })
7218 .await
7219 .unwrap();
7220
7221 cx.run_until_parked();
7222 unstaged_diff.update(cx, |unstaged_diff, cx| {
7223 let snapshot = buffer.read(cx).snapshot();
7224 assert_hunks(
7225 unstaged_diff.snapshot(cx).hunks(&snapshot),
7226 &snapshot,
7227 &unstaged_diff.base_text_string(cx).unwrap(),
7228 &[
7229 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7230 (
7231 2..3,
7232 " println!(\"hello world\");\n",
7233 " println!(\"goodbye world\");\n",
7234 DiffHunkStatus::modified_none(),
7235 ),
7236 ],
7237 );
7238 });
7239
7240 let staged_contents = r#"
7241 // print goodbye
7242 fn main() {
7243 }
7244 "#
7245 .unindent();
7246
7247 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7248
7249 cx.run_until_parked();
7250 unstaged_diff.update(cx, |unstaged_diff, cx| {
7251 let snapshot = buffer.read(cx).snapshot();
7252 assert_hunks(
7253 unstaged_diff
7254 .snapshot(cx)
7255 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7256 &snapshot,
7257 &unstaged_diff.base_text(cx).text(),
7258 &[(
7259 2..3,
7260 "",
7261 " println!(\"goodbye world\");\n",
7262 DiffHunkStatus::added_none(),
7263 )],
7264 );
7265 });
7266}
7267
7268#[gpui::test]
7269async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7270 init_test(cx);
7271
7272 let committed_contents = r#"
7273 fn main() {
7274 println!("hello world");
7275 }
7276 "#
7277 .unindent();
7278 let staged_contents = r#"
7279 fn main() {
7280 println!("goodbye world");
7281 }
7282 "#
7283 .unindent();
7284 let file_contents = r#"
7285 // print goodbye
7286 fn main() {
7287 println!("goodbye world");
7288 }
7289 "#
7290 .unindent();
7291
7292 let fs = FakeFs::new(cx.background_executor.clone());
7293 fs.insert_tree(
7294 "/dir",
7295 json!({
7296 ".git": {},
7297 "src": {
7298 "modification.rs": file_contents,
7299 }
7300 }),
7301 )
7302 .await;
7303
7304 fs.set_head_for_repo(
7305 Path::new("/dir/.git"),
7306 &[
7307 ("src/modification.rs", committed_contents),
7308 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7309 ],
7310 "deadbeef",
7311 );
7312 fs.set_index_for_repo(
7313 Path::new("/dir/.git"),
7314 &[
7315 ("src/modification.rs", staged_contents),
7316 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7317 ],
7318 );
7319
7320 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7321 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7322 let language = rust_lang();
7323 language_registry.add(language.clone());
7324
7325 let buffer_1 = project
7326 .update(cx, |project, cx| {
7327 project.open_local_buffer("/dir/src/modification.rs", cx)
7328 })
7329 .await
7330 .unwrap();
7331 let diff_1 = project
7332 .update(cx, |project, cx| {
7333 project.open_uncommitted_diff(buffer_1.clone(), cx)
7334 })
7335 .await
7336 .unwrap();
7337 diff_1.read_with(cx, |diff, cx| {
7338 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7339 });
7340 cx.run_until_parked();
7341 diff_1.update(cx, |diff, cx| {
7342 let snapshot = buffer_1.read(cx).snapshot();
7343 assert_hunks(
7344 diff.snapshot(cx)
7345 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7346 &snapshot,
7347 &diff.base_text_string(cx).unwrap(),
7348 &[
7349 (
7350 0..1,
7351 "",
7352 "// print goodbye\n",
7353 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7354 ),
7355 (
7356 2..3,
7357 " println!(\"hello world\");\n",
7358 " println!(\"goodbye world\");\n",
7359 DiffHunkStatus::modified_none(),
7360 ),
7361 ],
7362 );
7363 });
7364
7365 // Reset HEAD to a version that differs from both the buffer and the index.
7366 let committed_contents = r#"
7367 // print goodbye
7368 fn main() {
7369 }
7370 "#
7371 .unindent();
7372 fs.set_head_for_repo(
7373 Path::new("/dir/.git"),
7374 &[
7375 ("src/modification.rs", committed_contents.clone()),
7376 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7377 ],
7378 "deadbeef",
7379 );
7380
7381 // Buffer now has an unstaged hunk.
7382 cx.run_until_parked();
7383 diff_1.update(cx, |diff, cx| {
7384 let snapshot = buffer_1.read(cx).snapshot();
7385 assert_hunks(
7386 diff.snapshot(cx)
7387 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7388 &snapshot,
7389 &diff.base_text(cx).text(),
7390 &[(
7391 2..3,
7392 "",
7393 " println!(\"goodbye world\");\n",
7394 DiffHunkStatus::added_none(),
7395 )],
7396 );
7397 });
7398
7399 // Open a buffer for a file that's been deleted.
7400 let buffer_2 = project
7401 .update(cx, |project, cx| {
7402 project.open_local_buffer("/dir/src/deletion.rs", cx)
7403 })
7404 .await
7405 .unwrap();
7406 let diff_2 = project
7407 .update(cx, |project, cx| {
7408 project.open_uncommitted_diff(buffer_2.clone(), cx)
7409 })
7410 .await
7411 .unwrap();
7412 cx.run_until_parked();
7413 diff_2.update(cx, |diff, cx| {
7414 let snapshot = buffer_2.read(cx).snapshot();
7415 assert_hunks(
7416 diff.snapshot(cx)
7417 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7418 &snapshot,
7419 &diff.base_text_string(cx).unwrap(),
7420 &[(
7421 0..0,
7422 "// the-deleted-contents\n",
7423 "",
7424 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7425 )],
7426 );
7427 });
7428
7429 // Stage the deletion of this file
7430 fs.set_index_for_repo(
7431 Path::new("/dir/.git"),
7432 &[("src/modification.rs", committed_contents.clone())],
7433 );
7434 cx.run_until_parked();
7435 diff_2.update(cx, |diff, cx| {
7436 let snapshot = buffer_2.read(cx).snapshot();
7437 assert_hunks(
7438 diff.snapshot(cx)
7439 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7440 &snapshot,
7441 &diff.base_text_string(cx).unwrap(),
7442 &[(
7443 0..0,
7444 "// the-deleted-contents\n",
7445 "",
7446 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7447 )],
7448 );
7449 });
7450}
7451
7452#[gpui::test]
7453async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7454 use DiffHunkSecondaryStatus::*;
7455 init_test(cx);
7456
7457 let committed_contents = r#"
7458 zero
7459 one
7460 two
7461 three
7462 four
7463 five
7464 "#
7465 .unindent();
7466 let file_contents = r#"
7467 one
7468 TWO
7469 three
7470 FOUR
7471 five
7472 "#
7473 .unindent();
7474
7475 let fs = FakeFs::new(cx.background_executor.clone());
7476 fs.insert_tree(
7477 "/dir",
7478 json!({
7479 ".git": {},
7480 "file.txt": file_contents.clone()
7481 }),
7482 )
7483 .await;
7484
7485 fs.set_head_and_index_for_repo(
7486 path!("/dir/.git").as_ref(),
7487 &[("file.txt", committed_contents.clone())],
7488 );
7489
7490 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7491
7492 let buffer = project
7493 .update(cx, |project, cx| {
7494 project.open_local_buffer("/dir/file.txt", cx)
7495 })
7496 .await
7497 .unwrap();
7498 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7499 let uncommitted_diff = project
7500 .update(cx, |project, cx| {
7501 project.open_uncommitted_diff(buffer.clone(), cx)
7502 })
7503 .await
7504 .unwrap();
7505 let mut diff_events = cx.events(&uncommitted_diff);
7506
7507 // The hunks are initially unstaged.
7508 uncommitted_diff.read_with(cx, |diff, cx| {
7509 assert_hunks(
7510 diff.snapshot(cx).hunks(&snapshot),
7511 &snapshot,
7512 &diff.base_text_string(cx).unwrap(),
7513 &[
7514 (
7515 0..0,
7516 "zero\n",
7517 "",
7518 DiffHunkStatus::deleted(HasSecondaryHunk),
7519 ),
7520 (
7521 1..2,
7522 "two\n",
7523 "TWO\n",
7524 DiffHunkStatus::modified(HasSecondaryHunk),
7525 ),
7526 (
7527 3..4,
7528 "four\n",
7529 "FOUR\n",
7530 DiffHunkStatus::modified(HasSecondaryHunk),
7531 ),
7532 ],
7533 );
7534 });
7535
7536 // Stage a hunk. It appears as optimistically staged.
7537 uncommitted_diff.update(cx, |diff, cx| {
7538 let range =
7539 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7540 let hunks = diff
7541 .snapshot(cx)
7542 .hunks_intersecting_range(range, &snapshot)
7543 .collect::<Vec<_>>();
7544 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7545
7546 assert_hunks(
7547 diff.snapshot(cx).hunks(&snapshot),
7548 &snapshot,
7549 &diff.base_text_string(cx).unwrap(),
7550 &[
7551 (
7552 0..0,
7553 "zero\n",
7554 "",
7555 DiffHunkStatus::deleted(HasSecondaryHunk),
7556 ),
7557 (
7558 1..2,
7559 "two\n",
7560 "TWO\n",
7561 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7562 ),
7563 (
7564 3..4,
7565 "four\n",
7566 "FOUR\n",
7567 DiffHunkStatus::modified(HasSecondaryHunk),
7568 ),
7569 ],
7570 );
7571 });
7572
7573 // The diff emits a change event for the range of the staged hunk.
7574 assert!(matches!(
7575 diff_events.next().await.unwrap(),
7576 BufferDiffEvent::HunksStagedOrUnstaged(_)
7577 ));
7578 let event = diff_events.next().await.unwrap();
7579 if let BufferDiffEvent::DiffChanged {
7580 changed_range: Some(changed_range),
7581 base_text_changed_range: _,
7582 } = event
7583 {
7584 let changed_range = changed_range.to_point(&snapshot);
7585 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7586 } else {
7587 panic!("Unexpected event {event:?}");
7588 }
7589
7590 // When the write to the index completes, it appears as staged.
7591 cx.run_until_parked();
7592 uncommitted_diff.update(cx, |diff, cx| {
7593 assert_hunks(
7594 diff.snapshot(cx).hunks(&snapshot),
7595 &snapshot,
7596 &diff.base_text_string(cx).unwrap(),
7597 &[
7598 (
7599 0..0,
7600 "zero\n",
7601 "",
7602 DiffHunkStatus::deleted(HasSecondaryHunk),
7603 ),
7604 (
7605 1..2,
7606 "two\n",
7607 "TWO\n",
7608 DiffHunkStatus::modified(NoSecondaryHunk),
7609 ),
7610 (
7611 3..4,
7612 "four\n",
7613 "FOUR\n",
7614 DiffHunkStatus::modified(HasSecondaryHunk),
7615 ),
7616 ],
7617 );
7618 });
7619
7620 // The diff emits a change event for the changed index text.
7621 let event = diff_events.next().await.unwrap();
7622 if let BufferDiffEvent::DiffChanged {
7623 changed_range: Some(changed_range),
7624 base_text_changed_range: _,
7625 } = event
7626 {
7627 let changed_range = changed_range.to_point(&snapshot);
7628 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7629 } else {
7630 panic!("Unexpected event {event:?}");
7631 }
7632
7633 // Simulate a problem writing to the git index.
7634 fs.set_error_message_for_index_write(
7635 "/dir/.git".as_ref(),
7636 Some("failed to write git index".into()),
7637 );
7638
7639 // Stage another hunk.
7640 uncommitted_diff.update(cx, |diff, cx| {
7641 let range =
7642 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7643 let hunks = diff
7644 .snapshot(cx)
7645 .hunks_intersecting_range(range, &snapshot)
7646 .collect::<Vec<_>>();
7647 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7648
7649 assert_hunks(
7650 diff.snapshot(cx).hunks(&snapshot),
7651 &snapshot,
7652 &diff.base_text_string(cx).unwrap(),
7653 &[
7654 (
7655 0..0,
7656 "zero\n",
7657 "",
7658 DiffHunkStatus::deleted(HasSecondaryHunk),
7659 ),
7660 (
7661 1..2,
7662 "two\n",
7663 "TWO\n",
7664 DiffHunkStatus::modified(NoSecondaryHunk),
7665 ),
7666 (
7667 3..4,
7668 "four\n",
7669 "FOUR\n",
7670 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7671 ),
7672 ],
7673 );
7674 });
7675 assert!(matches!(
7676 diff_events.next().await.unwrap(),
7677 BufferDiffEvent::HunksStagedOrUnstaged(_)
7678 ));
7679 let event = diff_events.next().await.unwrap();
7680 if let BufferDiffEvent::DiffChanged {
7681 changed_range: Some(changed_range),
7682 base_text_changed_range: _,
7683 } = event
7684 {
7685 let changed_range = changed_range.to_point(&snapshot);
7686 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7687 } else {
7688 panic!("Unexpected event {event:?}");
7689 }
7690
7691 // When the write fails, the hunk returns to being unstaged.
7692 cx.run_until_parked();
7693 uncommitted_diff.update(cx, |diff, cx| {
7694 assert_hunks(
7695 diff.snapshot(cx).hunks(&snapshot),
7696 &snapshot,
7697 &diff.base_text_string(cx).unwrap(),
7698 &[
7699 (
7700 0..0,
7701 "zero\n",
7702 "",
7703 DiffHunkStatus::deleted(HasSecondaryHunk),
7704 ),
7705 (
7706 1..2,
7707 "two\n",
7708 "TWO\n",
7709 DiffHunkStatus::modified(NoSecondaryHunk),
7710 ),
7711 (
7712 3..4,
7713 "four\n",
7714 "FOUR\n",
7715 DiffHunkStatus::modified(HasSecondaryHunk),
7716 ),
7717 ],
7718 );
7719 });
7720
7721 let event = diff_events.next().await.unwrap();
7722 if let BufferDiffEvent::DiffChanged {
7723 changed_range: Some(changed_range),
7724 base_text_changed_range: _,
7725 } = event
7726 {
7727 let changed_range = changed_range.to_point(&snapshot);
7728 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7729 } else {
7730 panic!("Unexpected event {event:?}");
7731 }
7732
7733 // Allow writing to the git index to succeed again.
7734 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7735
7736 // Stage two hunks with separate operations.
7737 uncommitted_diff.update(cx, |diff, cx| {
7738 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
7739 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7740 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7741 });
7742
7743 // Both staged hunks appear as pending.
7744 uncommitted_diff.update(cx, |diff, cx| {
7745 assert_hunks(
7746 diff.snapshot(cx).hunks(&snapshot),
7747 &snapshot,
7748 &diff.base_text_string(cx).unwrap(),
7749 &[
7750 (
7751 0..0,
7752 "zero\n",
7753 "",
7754 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7755 ),
7756 (
7757 1..2,
7758 "two\n",
7759 "TWO\n",
7760 DiffHunkStatus::modified(NoSecondaryHunk),
7761 ),
7762 (
7763 3..4,
7764 "four\n",
7765 "FOUR\n",
7766 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7767 ),
7768 ],
7769 );
7770 });
7771
7772 // Both staging operations take effect.
7773 cx.run_until_parked();
7774 uncommitted_diff.update(cx, |diff, cx| {
7775 assert_hunks(
7776 diff.snapshot(cx).hunks(&snapshot),
7777 &snapshot,
7778 &diff.base_text_string(cx).unwrap(),
7779 &[
7780 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7781 (
7782 1..2,
7783 "two\n",
7784 "TWO\n",
7785 DiffHunkStatus::modified(NoSecondaryHunk),
7786 ),
7787 (
7788 3..4,
7789 "four\n",
7790 "FOUR\n",
7791 DiffHunkStatus::modified(NoSecondaryHunk),
7792 ),
7793 ],
7794 );
7795 });
7796}
7797
7798#[gpui::test(seeds(340, 472))]
7799async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7800 use DiffHunkSecondaryStatus::*;
7801 init_test(cx);
7802
7803 let committed_contents = r#"
7804 zero
7805 one
7806 two
7807 three
7808 four
7809 five
7810 "#
7811 .unindent();
7812 let file_contents = r#"
7813 one
7814 TWO
7815 three
7816 FOUR
7817 five
7818 "#
7819 .unindent();
7820
7821 let fs = FakeFs::new(cx.background_executor.clone());
7822 fs.insert_tree(
7823 "/dir",
7824 json!({
7825 ".git": {},
7826 "file.txt": file_contents.clone()
7827 }),
7828 )
7829 .await;
7830
7831 fs.set_head_for_repo(
7832 "/dir/.git".as_ref(),
7833 &[("file.txt", committed_contents.clone())],
7834 "deadbeef",
7835 );
7836 fs.set_index_for_repo(
7837 "/dir/.git".as_ref(),
7838 &[("file.txt", committed_contents.clone())],
7839 );
7840
7841 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7842
7843 let buffer = project
7844 .update(cx, |project, cx| {
7845 project.open_local_buffer("/dir/file.txt", cx)
7846 })
7847 .await
7848 .unwrap();
7849 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7850 let uncommitted_diff = project
7851 .update(cx, |project, cx| {
7852 project.open_uncommitted_diff(buffer.clone(), cx)
7853 })
7854 .await
7855 .unwrap();
7856
7857 // The hunks are initially unstaged.
7858 uncommitted_diff.read_with(cx, |diff, cx| {
7859 assert_hunks(
7860 diff.snapshot(cx).hunks(&snapshot),
7861 &snapshot,
7862 &diff.base_text_string(cx).unwrap(),
7863 &[
7864 (
7865 0..0,
7866 "zero\n",
7867 "",
7868 DiffHunkStatus::deleted(HasSecondaryHunk),
7869 ),
7870 (
7871 1..2,
7872 "two\n",
7873 "TWO\n",
7874 DiffHunkStatus::modified(HasSecondaryHunk),
7875 ),
7876 (
7877 3..4,
7878 "four\n",
7879 "FOUR\n",
7880 DiffHunkStatus::modified(HasSecondaryHunk),
7881 ),
7882 ],
7883 );
7884 });
7885
7886 // Pause IO events
7887 fs.pause_events();
7888
7889 // Stage the first hunk.
7890 uncommitted_diff.update(cx, |diff, cx| {
7891 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
7892 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7893 assert_hunks(
7894 diff.snapshot(cx).hunks(&snapshot),
7895 &snapshot,
7896 &diff.base_text_string(cx).unwrap(),
7897 &[
7898 (
7899 0..0,
7900 "zero\n",
7901 "",
7902 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7903 ),
7904 (
7905 1..2,
7906 "two\n",
7907 "TWO\n",
7908 DiffHunkStatus::modified(HasSecondaryHunk),
7909 ),
7910 (
7911 3..4,
7912 "four\n",
7913 "FOUR\n",
7914 DiffHunkStatus::modified(HasSecondaryHunk),
7915 ),
7916 ],
7917 );
7918 });
7919
7920 // Stage the second hunk *before* receiving the FS event for the first hunk.
7921 cx.run_until_parked();
7922 uncommitted_diff.update(cx, |diff, cx| {
7923 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
7924 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7925 assert_hunks(
7926 diff.snapshot(cx).hunks(&snapshot),
7927 &snapshot,
7928 &diff.base_text_string(cx).unwrap(),
7929 &[
7930 (
7931 0..0,
7932 "zero\n",
7933 "",
7934 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7935 ),
7936 (
7937 1..2,
7938 "two\n",
7939 "TWO\n",
7940 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7941 ),
7942 (
7943 3..4,
7944 "four\n",
7945 "FOUR\n",
7946 DiffHunkStatus::modified(HasSecondaryHunk),
7947 ),
7948 ],
7949 );
7950 });
7951
7952 // Process the FS event for staging the first hunk (second event is still pending).
7953 fs.flush_events(1);
7954 cx.run_until_parked();
7955
7956 // Stage the third hunk before receiving the second FS event.
7957 uncommitted_diff.update(cx, |diff, cx| {
7958 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
7959 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7960 });
7961
7962 // Wait for all remaining IO.
7963 cx.run_until_parked();
7964 fs.flush_events(fs.buffered_event_count());
7965
7966 // Now all hunks are staged.
7967 cx.run_until_parked();
7968 uncommitted_diff.update(cx, |diff, cx| {
7969 assert_hunks(
7970 diff.snapshot(cx).hunks(&snapshot),
7971 &snapshot,
7972 &diff.base_text_string(cx).unwrap(),
7973 &[
7974 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7975 (
7976 1..2,
7977 "two\n",
7978 "TWO\n",
7979 DiffHunkStatus::modified(NoSecondaryHunk),
7980 ),
7981 (
7982 3..4,
7983 "four\n",
7984 "FOUR\n",
7985 DiffHunkStatus::modified(NoSecondaryHunk),
7986 ),
7987 ],
7988 );
7989 });
7990}
7991
7992#[gpui::test(iterations = 25)]
7993async fn test_staging_random_hunks(
7994 mut rng: StdRng,
7995 executor: BackgroundExecutor,
7996 cx: &mut gpui::TestAppContext,
7997) {
7998 let operations = env::var("OPERATIONS")
7999 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8000 .unwrap_or(20);
8001
8002 // Try to induce races between diff recalculation and index writes.
8003 if rng.random_bool(0.5) {
8004 executor.deprioritize(*CALCULATE_DIFF_TASK);
8005 }
8006
8007 use DiffHunkSecondaryStatus::*;
8008 init_test(cx);
8009
8010 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8011 let index_text = committed_text.clone();
8012 let buffer_text = (0..30)
8013 .map(|i| match i % 5 {
8014 0 => format!("line {i} (modified)\n"),
8015 _ => format!("line {i}\n"),
8016 })
8017 .collect::<String>();
8018
8019 let fs = FakeFs::new(cx.background_executor.clone());
8020 fs.insert_tree(
8021 path!("/dir"),
8022 json!({
8023 ".git": {},
8024 "file.txt": buffer_text.clone()
8025 }),
8026 )
8027 .await;
8028 fs.set_head_for_repo(
8029 path!("/dir/.git").as_ref(),
8030 &[("file.txt", committed_text.clone())],
8031 "deadbeef",
8032 );
8033 fs.set_index_for_repo(
8034 path!("/dir/.git").as_ref(),
8035 &[("file.txt", index_text.clone())],
8036 );
8037 let repo = fs
8038 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8039 .unwrap();
8040
8041 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8042 let buffer = project
8043 .update(cx, |project, cx| {
8044 project.open_local_buffer(path!("/dir/file.txt"), cx)
8045 })
8046 .await
8047 .unwrap();
8048 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8049 let uncommitted_diff = project
8050 .update(cx, |project, cx| {
8051 project.open_uncommitted_diff(buffer.clone(), cx)
8052 })
8053 .await
8054 .unwrap();
8055
8056 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8057 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8058 });
8059 assert_eq!(hunks.len(), 6);
8060
8061 for _i in 0..operations {
8062 let hunk_ix = rng.random_range(0..hunks.len());
8063 let hunk = &mut hunks[hunk_ix];
8064 let row = hunk.range.start.row;
8065
8066 if hunk.status().has_secondary_hunk() {
8067 log::info!("staging hunk at {row}");
8068 uncommitted_diff.update(cx, |diff, cx| {
8069 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8070 });
8071 hunk.secondary_status = SecondaryHunkRemovalPending;
8072 } else {
8073 log::info!("unstaging hunk at {row}");
8074 uncommitted_diff.update(cx, |diff, cx| {
8075 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8076 });
8077 hunk.secondary_status = SecondaryHunkAdditionPending;
8078 }
8079
8080 for _ in 0..rng.random_range(0..10) {
8081 log::info!("yielding");
8082 cx.executor().simulate_random_delay().await;
8083 }
8084 }
8085
8086 cx.executor().run_until_parked();
8087
8088 for hunk in &mut hunks {
8089 if hunk.secondary_status == SecondaryHunkRemovalPending {
8090 hunk.secondary_status = NoSecondaryHunk;
8091 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8092 hunk.secondary_status = HasSecondaryHunk;
8093 }
8094 }
8095
8096 log::info!(
8097 "index text:\n{}",
8098 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8099 .await
8100 .unwrap()
8101 );
8102
8103 uncommitted_diff.update(cx, |diff, cx| {
8104 let expected_hunks = hunks
8105 .iter()
8106 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8107 .collect::<Vec<_>>();
8108 let actual_hunks = diff
8109 .snapshot(cx)
8110 .hunks(&snapshot)
8111 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8112 .collect::<Vec<_>>();
8113 assert_eq!(actual_hunks, expected_hunks);
8114 });
8115}
8116
8117#[gpui::test]
8118async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8119 init_test(cx);
8120
8121 let committed_contents = r#"
8122 fn main() {
8123 println!("hello from HEAD");
8124 }
8125 "#
8126 .unindent();
8127 let file_contents = r#"
8128 fn main() {
8129 println!("hello from the working copy");
8130 }
8131 "#
8132 .unindent();
8133
8134 let fs = FakeFs::new(cx.background_executor.clone());
8135 fs.insert_tree(
8136 "/dir",
8137 json!({
8138 ".git": {},
8139 "src": {
8140 "main.rs": file_contents,
8141 }
8142 }),
8143 )
8144 .await;
8145
8146 fs.set_head_for_repo(
8147 Path::new("/dir/.git"),
8148 &[("src/main.rs", committed_contents.clone())],
8149 "deadbeef",
8150 );
8151 fs.set_index_for_repo(
8152 Path::new("/dir/.git"),
8153 &[("src/main.rs", committed_contents.clone())],
8154 );
8155
8156 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8157
8158 let buffer = project
8159 .update(cx, |project, cx| {
8160 project.open_local_buffer("/dir/src/main.rs", cx)
8161 })
8162 .await
8163 .unwrap();
8164 let uncommitted_diff = project
8165 .update(cx, |project, cx| {
8166 project.open_uncommitted_diff(buffer.clone(), cx)
8167 })
8168 .await
8169 .unwrap();
8170
8171 cx.run_until_parked();
8172 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8173 let snapshot = buffer.read(cx).snapshot();
8174 assert_hunks(
8175 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8176 &snapshot,
8177 &uncommitted_diff.base_text_string(cx).unwrap(),
8178 &[(
8179 1..2,
8180 " println!(\"hello from HEAD\");\n",
8181 " println!(\"hello from the working copy\");\n",
8182 DiffHunkStatus {
8183 kind: DiffHunkStatusKind::Modified,
8184 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8185 },
8186 )],
8187 );
8188 });
8189}
8190
8191// TODO: Should we test this on Windows also?
8192#[gpui::test]
8193#[cfg(not(windows))]
8194async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8195 use std::os::unix::fs::PermissionsExt;
8196 init_test(cx);
8197 cx.executor().allow_parking();
8198 let committed_contents = "bar\n";
8199 let file_contents = "baz\n";
8200 let root = TempTree::new(json!({
8201 "project": {
8202 "foo": committed_contents
8203 },
8204 }));
8205
8206 let work_dir = root.path().join("project");
8207 let file_path = work_dir.join("foo");
8208 let repo = git_init(work_dir.as_path());
8209 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8210 perms.set_mode(0o755);
8211 std::fs::set_permissions(&file_path, perms).unwrap();
8212 git_add("foo", &repo);
8213 git_commit("Initial commit", &repo);
8214 std::fs::write(&file_path, file_contents).unwrap();
8215
8216 let project = Project::test(
8217 Arc::new(RealFs::new(None, cx.executor())),
8218 [root.path()],
8219 cx,
8220 )
8221 .await;
8222
8223 let buffer = project
8224 .update(cx, |project, cx| {
8225 project.open_local_buffer(file_path.as_path(), cx)
8226 })
8227 .await
8228 .unwrap();
8229
8230 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8231
8232 let uncommitted_diff = project
8233 .update(cx, |project, cx| {
8234 project.open_uncommitted_diff(buffer.clone(), cx)
8235 })
8236 .await
8237 .unwrap();
8238
8239 uncommitted_diff.update(cx, |diff, cx| {
8240 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8241 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8242 });
8243
8244 cx.run_until_parked();
8245
8246 let output = smol::process::Command::new("git")
8247 .current_dir(&work_dir)
8248 .args(["diff", "--staged"])
8249 .output()
8250 .await
8251 .unwrap();
8252
8253 let staged_diff = String::from_utf8_lossy(&output.stdout);
8254
8255 assert!(
8256 !staged_diff.contains("new mode 100644"),
8257 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8258 staged_diff
8259 );
8260
8261 let output = smol::process::Command::new("git")
8262 .current_dir(&work_dir)
8263 .args(["ls-files", "-s"])
8264 .output()
8265 .await
8266 .unwrap();
8267 let index_contents = String::from_utf8_lossy(&output.stdout);
8268
8269 assert!(
8270 index_contents.contains("100755"),
8271 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8272 index_contents
8273 );
8274}
8275
8276#[gpui::test]
8277async fn test_repository_and_path_for_project_path(
8278 background_executor: BackgroundExecutor,
8279 cx: &mut gpui::TestAppContext,
8280) {
8281 init_test(cx);
8282 let fs = FakeFs::new(background_executor);
8283 fs.insert_tree(
8284 path!("/root"),
8285 json!({
8286 "c.txt": "",
8287 "dir1": {
8288 ".git": {},
8289 "deps": {
8290 "dep1": {
8291 ".git": {},
8292 "src": {
8293 "a.txt": ""
8294 }
8295 }
8296 },
8297 "src": {
8298 "b.txt": ""
8299 }
8300 },
8301 }),
8302 )
8303 .await;
8304
8305 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8306 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8307 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8308 project
8309 .update(cx, |project, cx| project.git_scans_complete(cx))
8310 .await;
8311 cx.run_until_parked();
8312
8313 project.read_with(cx, |project, cx| {
8314 let git_store = project.git_store().read(cx);
8315 let pairs = [
8316 ("c.txt", None),
8317 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8318 (
8319 "dir1/deps/dep1/src/a.txt",
8320 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8321 ),
8322 ];
8323 let expected = pairs
8324 .iter()
8325 .map(|(path, result)| {
8326 (
8327 path,
8328 result.map(|(repo, repo_path)| {
8329 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8330 }),
8331 )
8332 })
8333 .collect::<Vec<_>>();
8334 let actual = pairs
8335 .iter()
8336 .map(|(path, _)| {
8337 let project_path = (tree_id, rel_path(path)).into();
8338 let result = maybe!({
8339 let (repo, repo_path) =
8340 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8341 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8342 });
8343 (path, result)
8344 })
8345 .collect::<Vec<_>>();
8346 pretty_assertions::assert_eq!(expected, actual);
8347 });
8348
8349 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8350 .await
8351 .unwrap();
8352 cx.run_until_parked();
8353
8354 project.read_with(cx, |project, cx| {
8355 let git_store = project.git_store().read(cx);
8356 assert_eq!(
8357 git_store.repository_and_path_for_project_path(
8358 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8359 cx
8360 ),
8361 None
8362 );
8363 });
8364}
8365
8366#[gpui::test]
8367async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8368 init_test(cx);
8369 let fs = FakeFs::new(cx.background_executor.clone());
8370 let home = paths::home_dir();
8371 fs.insert_tree(
8372 home,
8373 json!({
8374 ".git": {},
8375 "project": {
8376 "a.txt": "A"
8377 },
8378 }),
8379 )
8380 .await;
8381
8382 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8383 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8384 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8385
8386 project
8387 .update(cx, |project, cx| project.git_scans_complete(cx))
8388 .await;
8389 tree.flush_fs_events(cx).await;
8390
8391 project.read_with(cx, |project, cx| {
8392 let containing = project
8393 .git_store()
8394 .read(cx)
8395 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8396 assert!(containing.is_none());
8397 });
8398
8399 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8400 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8401 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8402 project
8403 .update(cx, |project, cx| project.git_scans_complete(cx))
8404 .await;
8405 tree.flush_fs_events(cx).await;
8406
8407 project.read_with(cx, |project, cx| {
8408 let containing = project
8409 .git_store()
8410 .read(cx)
8411 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8412 assert_eq!(
8413 containing
8414 .unwrap()
8415 .0
8416 .read(cx)
8417 .work_directory_abs_path
8418 .as_ref(),
8419 home,
8420 );
8421 });
8422}
8423
8424#[gpui::test]
8425async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8426 init_test(cx);
8427 cx.executor().allow_parking();
8428
8429 let root = TempTree::new(json!({
8430 "project": {
8431 "a.txt": "a", // Modified
8432 "b.txt": "bb", // Added
8433 "c.txt": "ccc", // Unchanged
8434 "d.txt": "dddd", // Deleted
8435 },
8436 }));
8437
8438 // Set up git repository before creating the project.
8439 let work_dir = root.path().join("project");
8440 let repo = git_init(work_dir.as_path());
8441 git_add("a.txt", &repo);
8442 git_add("c.txt", &repo);
8443 git_add("d.txt", &repo);
8444 git_commit("Initial commit", &repo);
8445 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8446 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8447
8448 let project = Project::test(
8449 Arc::new(RealFs::new(None, cx.executor())),
8450 [root.path()],
8451 cx,
8452 )
8453 .await;
8454
8455 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8456 tree.flush_fs_events(cx).await;
8457 project
8458 .update(cx, |project, cx| project.git_scans_complete(cx))
8459 .await;
8460 cx.executor().run_until_parked();
8461
8462 let repository = project.read_with(cx, |project, cx| {
8463 project.repositories(cx).values().next().unwrap().clone()
8464 });
8465
8466 // Check that the right git state is observed on startup
8467 repository.read_with(cx, |repository, _| {
8468 let entries = repository.cached_status().collect::<Vec<_>>();
8469 assert_eq!(
8470 entries,
8471 [
8472 StatusEntry {
8473 repo_path: repo_path("a.txt"),
8474 status: StatusCode::Modified.worktree(),
8475 },
8476 StatusEntry {
8477 repo_path: repo_path("b.txt"),
8478 status: FileStatus::Untracked,
8479 },
8480 StatusEntry {
8481 repo_path: repo_path("d.txt"),
8482 status: StatusCode::Deleted.worktree(),
8483 },
8484 ]
8485 );
8486 });
8487
8488 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8489
8490 tree.flush_fs_events(cx).await;
8491 project
8492 .update(cx, |project, cx| project.git_scans_complete(cx))
8493 .await;
8494 cx.executor().run_until_parked();
8495
8496 repository.read_with(cx, |repository, _| {
8497 let entries = repository.cached_status().collect::<Vec<_>>();
8498 assert_eq!(
8499 entries,
8500 [
8501 StatusEntry {
8502 repo_path: repo_path("a.txt"),
8503 status: StatusCode::Modified.worktree(),
8504 },
8505 StatusEntry {
8506 repo_path: repo_path("b.txt"),
8507 status: FileStatus::Untracked,
8508 },
8509 StatusEntry {
8510 repo_path: repo_path("c.txt"),
8511 status: StatusCode::Modified.worktree(),
8512 },
8513 StatusEntry {
8514 repo_path: repo_path("d.txt"),
8515 status: StatusCode::Deleted.worktree(),
8516 },
8517 ]
8518 );
8519 });
8520
8521 git_add("a.txt", &repo);
8522 git_add("c.txt", &repo);
8523 git_remove_index(Path::new("d.txt"), &repo);
8524 git_commit("Another commit", &repo);
8525 tree.flush_fs_events(cx).await;
8526 project
8527 .update(cx, |project, cx| project.git_scans_complete(cx))
8528 .await;
8529 cx.executor().run_until_parked();
8530
8531 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8532 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8533 tree.flush_fs_events(cx).await;
8534 project
8535 .update(cx, |project, cx| project.git_scans_complete(cx))
8536 .await;
8537 cx.executor().run_until_parked();
8538
8539 repository.read_with(cx, |repository, _cx| {
8540 let entries = repository.cached_status().collect::<Vec<_>>();
8541
8542 // Deleting an untracked entry, b.txt, should leave no status
8543 // a.txt was tracked, and so should have a status
8544 assert_eq!(
8545 entries,
8546 [StatusEntry {
8547 repo_path: repo_path("a.txt"),
8548 status: StatusCode::Deleted.worktree(),
8549 }]
8550 );
8551 });
8552}
8553
8554#[gpui::test]
8555#[ignore]
8556async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8557 init_test(cx);
8558 cx.executor().allow_parking();
8559
8560 let root = TempTree::new(json!({
8561 "project": {
8562 "sub": {},
8563 "a.txt": "",
8564 },
8565 }));
8566
8567 let work_dir = root.path().join("project");
8568 let repo = git_init(work_dir.as_path());
8569 // a.txt exists in HEAD and the working copy but is deleted in the index.
8570 git_add("a.txt", &repo);
8571 git_commit("Initial commit", &repo);
8572 git_remove_index("a.txt".as_ref(), &repo);
8573 // `sub` is a nested git repository.
8574 let _sub = git_init(&work_dir.join("sub"));
8575
8576 let project = Project::test(
8577 Arc::new(RealFs::new(None, cx.executor())),
8578 [root.path()],
8579 cx,
8580 )
8581 .await;
8582
8583 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8584 tree.flush_fs_events(cx).await;
8585 project
8586 .update(cx, |project, cx| project.git_scans_complete(cx))
8587 .await;
8588 cx.executor().run_until_parked();
8589
8590 let repository = project.read_with(cx, |project, cx| {
8591 project
8592 .repositories(cx)
8593 .values()
8594 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8595 .unwrap()
8596 .clone()
8597 });
8598
8599 repository.read_with(cx, |repository, _cx| {
8600 let entries = repository.cached_status().collect::<Vec<_>>();
8601
8602 // `sub` doesn't appear in our computed statuses.
8603 // a.txt appears with a combined `DA` status.
8604 assert_eq!(
8605 entries,
8606 [StatusEntry {
8607 repo_path: repo_path("a.txt"),
8608 status: TrackedStatus {
8609 index_status: StatusCode::Deleted,
8610 worktree_status: StatusCode::Added
8611 }
8612 .into(),
8613 }]
8614 )
8615 });
8616}
8617
8618#[track_caller]
8619/// We merge lhs into rhs.
8620fn merge_pending_ops_snapshots(
8621 source: Vec<pending_op::PendingOps>,
8622 mut target: Vec<pending_op::PendingOps>,
8623) -> Vec<pending_op::PendingOps> {
8624 for s_ops in source {
8625 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8626 if ops.repo_path == s_ops.repo_path {
8627 Some(idx)
8628 } else {
8629 None
8630 }
8631 }) {
8632 let t_ops = &mut target[idx];
8633 for s_op in s_ops.ops {
8634 if let Some(op_idx) = t_ops
8635 .ops
8636 .iter()
8637 .zip(0..)
8638 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8639 {
8640 let t_op = &mut t_ops.ops[op_idx];
8641 match (s_op.job_status, t_op.job_status) {
8642 (pending_op::JobStatus::Running, _) => {}
8643 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8644 (s_st, t_st) if s_st == t_st => {}
8645 _ => unreachable!(),
8646 }
8647 } else {
8648 t_ops.ops.push(s_op);
8649 }
8650 }
8651 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8652 } else {
8653 target.push(s_ops);
8654 }
8655 }
8656 target
8657}
8658
8659#[gpui::test]
8660async fn test_repository_pending_ops_staging(
8661 executor: gpui::BackgroundExecutor,
8662 cx: &mut gpui::TestAppContext,
8663) {
8664 init_test(cx);
8665
8666 let fs = FakeFs::new(executor);
8667 fs.insert_tree(
8668 path!("/root"),
8669 json!({
8670 "my-repo": {
8671 ".git": {},
8672 "a.txt": "a",
8673 }
8674
8675 }),
8676 )
8677 .await;
8678
8679 fs.set_status_for_repo(
8680 path!("/root/my-repo/.git").as_ref(),
8681 &[("a.txt", FileStatus::Untracked)],
8682 );
8683
8684 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8685 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8686 project.update(cx, |project, cx| {
8687 let pending_ops_all = pending_ops_all.clone();
8688 cx.subscribe(project.git_store(), move |_, _, e, _| {
8689 if let GitStoreEvent::RepositoryUpdated(
8690 _,
8691 RepositoryEvent::PendingOpsChanged { pending_ops },
8692 _,
8693 ) = e
8694 {
8695 let merged = merge_pending_ops_snapshots(
8696 pending_ops.items(()),
8697 pending_ops_all.lock().items(()),
8698 );
8699 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8700 }
8701 })
8702 .detach();
8703 });
8704 project
8705 .update(cx, |project, cx| project.git_scans_complete(cx))
8706 .await;
8707
8708 let repo = project.read_with(cx, |project, cx| {
8709 project.repositories(cx).values().next().unwrap().clone()
8710 });
8711
8712 // Ensure we have no pending ops for any of the untracked files
8713 repo.read_with(cx, |repo, _cx| {
8714 assert!(repo.pending_ops().next().is_none());
8715 });
8716
8717 let mut id = 1u16;
8718
8719 let mut assert_stage = async |path: RepoPath, stage| {
8720 let git_status = if stage {
8721 pending_op::GitStatus::Staged
8722 } else {
8723 pending_op::GitStatus::Unstaged
8724 };
8725 repo.update(cx, |repo, cx| {
8726 let task = if stage {
8727 repo.stage_entries(vec![path.clone()], cx)
8728 } else {
8729 repo.unstage_entries(vec![path.clone()], cx)
8730 };
8731 let ops = repo.pending_ops_for_path(&path).unwrap();
8732 assert_eq!(
8733 ops.ops.last(),
8734 Some(&pending_op::PendingOp {
8735 id: id.into(),
8736 git_status,
8737 job_status: pending_op::JobStatus::Running
8738 })
8739 );
8740 task
8741 })
8742 .await
8743 .unwrap();
8744
8745 repo.read_with(cx, |repo, _cx| {
8746 let ops = repo.pending_ops_for_path(&path).unwrap();
8747 assert_eq!(
8748 ops.ops.last(),
8749 Some(&pending_op::PendingOp {
8750 id: id.into(),
8751 git_status,
8752 job_status: pending_op::JobStatus::Finished
8753 })
8754 );
8755 });
8756
8757 id += 1;
8758 };
8759
8760 assert_stage(repo_path("a.txt"), true).await;
8761 assert_stage(repo_path("a.txt"), false).await;
8762 assert_stage(repo_path("a.txt"), true).await;
8763 assert_stage(repo_path("a.txt"), false).await;
8764 assert_stage(repo_path("a.txt"), true).await;
8765
8766 cx.run_until_parked();
8767
8768 assert_eq!(
8769 pending_ops_all
8770 .lock()
8771 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8772 .unwrap()
8773 .ops,
8774 vec![
8775 pending_op::PendingOp {
8776 id: 1u16.into(),
8777 git_status: pending_op::GitStatus::Staged,
8778 job_status: pending_op::JobStatus::Finished
8779 },
8780 pending_op::PendingOp {
8781 id: 2u16.into(),
8782 git_status: pending_op::GitStatus::Unstaged,
8783 job_status: pending_op::JobStatus::Finished
8784 },
8785 pending_op::PendingOp {
8786 id: 3u16.into(),
8787 git_status: pending_op::GitStatus::Staged,
8788 job_status: pending_op::JobStatus::Finished
8789 },
8790 pending_op::PendingOp {
8791 id: 4u16.into(),
8792 git_status: pending_op::GitStatus::Unstaged,
8793 job_status: pending_op::JobStatus::Finished
8794 },
8795 pending_op::PendingOp {
8796 id: 5u16.into(),
8797 git_status: pending_op::GitStatus::Staged,
8798 job_status: pending_op::JobStatus::Finished
8799 }
8800 ],
8801 );
8802
8803 repo.update(cx, |repo, _cx| {
8804 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8805
8806 assert_eq!(
8807 git_statuses,
8808 [StatusEntry {
8809 repo_path: repo_path("a.txt"),
8810 status: TrackedStatus {
8811 index_status: StatusCode::Added,
8812 worktree_status: StatusCode::Unmodified
8813 }
8814 .into(),
8815 }]
8816 );
8817 });
8818}
8819
8820#[gpui::test]
8821async fn test_repository_pending_ops_long_running_staging(
8822 executor: gpui::BackgroundExecutor,
8823 cx: &mut gpui::TestAppContext,
8824) {
8825 init_test(cx);
8826
8827 let fs = FakeFs::new(executor);
8828 fs.insert_tree(
8829 path!("/root"),
8830 json!({
8831 "my-repo": {
8832 ".git": {},
8833 "a.txt": "a",
8834 }
8835
8836 }),
8837 )
8838 .await;
8839
8840 fs.set_status_for_repo(
8841 path!("/root/my-repo/.git").as_ref(),
8842 &[("a.txt", FileStatus::Untracked)],
8843 );
8844
8845 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8846 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8847 project.update(cx, |project, cx| {
8848 let pending_ops_all = pending_ops_all.clone();
8849 cx.subscribe(project.git_store(), move |_, _, e, _| {
8850 if let GitStoreEvent::RepositoryUpdated(
8851 _,
8852 RepositoryEvent::PendingOpsChanged { pending_ops },
8853 _,
8854 ) = e
8855 {
8856 let merged = merge_pending_ops_snapshots(
8857 pending_ops.items(()),
8858 pending_ops_all.lock().items(()),
8859 );
8860 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8861 }
8862 })
8863 .detach();
8864 });
8865
8866 project
8867 .update(cx, |project, cx| project.git_scans_complete(cx))
8868 .await;
8869
8870 let repo = project.read_with(cx, |project, cx| {
8871 project.repositories(cx).values().next().unwrap().clone()
8872 });
8873
8874 repo.update(cx, |repo, cx| {
8875 repo.stage_entries(vec![repo_path("a.txt")], cx)
8876 })
8877 .detach();
8878
8879 repo.update(cx, |repo, cx| {
8880 repo.stage_entries(vec![repo_path("a.txt")], cx)
8881 })
8882 .unwrap()
8883 .with_timeout(Duration::from_secs(1), &cx.executor())
8884 .await
8885 .unwrap();
8886
8887 cx.run_until_parked();
8888
8889 assert_eq!(
8890 pending_ops_all
8891 .lock()
8892 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8893 .unwrap()
8894 .ops,
8895 vec![
8896 pending_op::PendingOp {
8897 id: 1u16.into(),
8898 git_status: pending_op::GitStatus::Staged,
8899 job_status: pending_op::JobStatus::Skipped
8900 },
8901 pending_op::PendingOp {
8902 id: 2u16.into(),
8903 git_status: pending_op::GitStatus::Staged,
8904 job_status: pending_op::JobStatus::Finished
8905 }
8906 ],
8907 );
8908
8909 repo.update(cx, |repo, _cx| {
8910 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8911
8912 assert_eq!(
8913 git_statuses,
8914 [StatusEntry {
8915 repo_path: repo_path("a.txt"),
8916 status: TrackedStatus {
8917 index_status: StatusCode::Added,
8918 worktree_status: StatusCode::Unmodified
8919 }
8920 .into(),
8921 }]
8922 );
8923 });
8924}
8925
8926#[gpui::test]
8927async fn test_repository_pending_ops_stage_all(
8928 executor: gpui::BackgroundExecutor,
8929 cx: &mut gpui::TestAppContext,
8930) {
8931 init_test(cx);
8932
8933 let fs = FakeFs::new(executor);
8934 fs.insert_tree(
8935 path!("/root"),
8936 json!({
8937 "my-repo": {
8938 ".git": {},
8939 "a.txt": "a",
8940 "b.txt": "b"
8941 }
8942
8943 }),
8944 )
8945 .await;
8946
8947 fs.set_status_for_repo(
8948 path!("/root/my-repo/.git").as_ref(),
8949 &[
8950 ("a.txt", FileStatus::Untracked),
8951 ("b.txt", FileStatus::Untracked),
8952 ],
8953 );
8954
8955 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8956 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8957 project.update(cx, |project, cx| {
8958 let pending_ops_all = pending_ops_all.clone();
8959 cx.subscribe(project.git_store(), move |_, _, e, _| {
8960 if let GitStoreEvent::RepositoryUpdated(
8961 _,
8962 RepositoryEvent::PendingOpsChanged { pending_ops },
8963 _,
8964 ) = e
8965 {
8966 let merged = merge_pending_ops_snapshots(
8967 pending_ops.items(()),
8968 pending_ops_all.lock().items(()),
8969 );
8970 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8971 }
8972 })
8973 .detach();
8974 });
8975 project
8976 .update(cx, |project, cx| project.git_scans_complete(cx))
8977 .await;
8978
8979 let repo = project.read_with(cx, |project, cx| {
8980 project.repositories(cx).values().next().unwrap().clone()
8981 });
8982
8983 repo.update(cx, |repo, cx| {
8984 repo.stage_entries(vec![repo_path("a.txt")], cx)
8985 })
8986 .await
8987 .unwrap();
8988 repo.update(cx, |repo, cx| repo.stage_all(cx))
8989 .await
8990 .unwrap();
8991 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8992 .await
8993 .unwrap();
8994
8995 cx.run_until_parked();
8996
8997 assert_eq!(
8998 pending_ops_all
8999 .lock()
9000 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9001 .unwrap()
9002 .ops,
9003 vec![
9004 pending_op::PendingOp {
9005 id: 1u16.into(),
9006 git_status: pending_op::GitStatus::Staged,
9007 job_status: pending_op::JobStatus::Finished
9008 },
9009 pending_op::PendingOp {
9010 id: 2u16.into(),
9011 git_status: pending_op::GitStatus::Unstaged,
9012 job_status: pending_op::JobStatus::Finished
9013 },
9014 ],
9015 );
9016 assert_eq!(
9017 pending_ops_all
9018 .lock()
9019 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9020 .unwrap()
9021 .ops,
9022 vec![
9023 pending_op::PendingOp {
9024 id: 1u16.into(),
9025 git_status: pending_op::GitStatus::Staged,
9026 job_status: pending_op::JobStatus::Finished
9027 },
9028 pending_op::PendingOp {
9029 id: 2u16.into(),
9030 git_status: pending_op::GitStatus::Unstaged,
9031 job_status: pending_op::JobStatus::Finished
9032 },
9033 ],
9034 );
9035
9036 repo.update(cx, |repo, _cx| {
9037 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9038
9039 assert_eq!(
9040 git_statuses,
9041 [
9042 StatusEntry {
9043 repo_path: repo_path("a.txt"),
9044 status: FileStatus::Untracked,
9045 },
9046 StatusEntry {
9047 repo_path: repo_path("b.txt"),
9048 status: FileStatus::Untracked,
9049 },
9050 ]
9051 );
9052 });
9053}
9054
9055#[gpui::test]
9056async fn test_repository_subfolder_git_status(
9057 executor: gpui::BackgroundExecutor,
9058 cx: &mut gpui::TestAppContext,
9059) {
9060 init_test(cx);
9061
9062 let fs = FakeFs::new(executor);
9063 fs.insert_tree(
9064 path!("/root"),
9065 json!({
9066 "my-repo": {
9067 ".git": {},
9068 "a.txt": "a",
9069 "sub-folder-1": {
9070 "sub-folder-2": {
9071 "c.txt": "cc",
9072 "d": {
9073 "e.txt": "eee"
9074 }
9075 },
9076 }
9077 },
9078 }),
9079 )
9080 .await;
9081
9082 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9083 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9084
9085 fs.set_status_for_repo(
9086 path!("/root/my-repo/.git").as_ref(),
9087 &[(E_TXT, FileStatus::Untracked)],
9088 );
9089
9090 let project = Project::test(
9091 fs.clone(),
9092 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9093 cx,
9094 )
9095 .await;
9096
9097 project
9098 .update(cx, |project, cx| project.git_scans_complete(cx))
9099 .await;
9100 cx.run_until_parked();
9101
9102 let repository = project.read_with(cx, |project, cx| {
9103 project.repositories(cx).values().next().unwrap().clone()
9104 });
9105
9106 // Ensure that the git status is loaded correctly
9107 repository.read_with(cx, |repository, _cx| {
9108 assert_eq!(
9109 repository.work_directory_abs_path,
9110 Path::new(path!("/root/my-repo")).into()
9111 );
9112
9113 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9114 assert_eq!(
9115 repository
9116 .status_for_path(&repo_path(E_TXT))
9117 .unwrap()
9118 .status,
9119 FileStatus::Untracked
9120 );
9121 });
9122
9123 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9124 project
9125 .update(cx, |project, cx| project.git_scans_complete(cx))
9126 .await;
9127 cx.run_until_parked();
9128
9129 repository.read_with(cx, |repository, _cx| {
9130 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9131 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9132 });
9133}
9134
9135// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9136#[cfg(any())]
9137#[gpui::test]
9138async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9139 init_test(cx);
9140 cx.executor().allow_parking();
9141
9142 let root = TempTree::new(json!({
9143 "project": {
9144 "a.txt": "a",
9145 },
9146 }));
9147 let root_path = root.path();
9148
9149 let repo = git_init(&root_path.join("project"));
9150 git_add("a.txt", &repo);
9151 git_commit("init", &repo);
9152
9153 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9154
9155 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9156 tree.flush_fs_events(cx).await;
9157 project
9158 .update(cx, |project, cx| project.git_scans_complete(cx))
9159 .await;
9160 cx.executor().run_until_parked();
9161
9162 let repository = project.read_with(cx, |project, cx| {
9163 project.repositories(cx).values().next().unwrap().clone()
9164 });
9165
9166 git_branch("other-branch", &repo);
9167 git_checkout("refs/heads/other-branch", &repo);
9168 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9169 git_add("a.txt", &repo);
9170 git_commit("capitalize", &repo);
9171 let commit = repo
9172 .head()
9173 .expect("Failed to get HEAD")
9174 .peel_to_commit()
9175 .expect("HEAD is not a commit");
9176 git_checkout("refs/heads/main", &repo);
9177 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9178 git_add("a.txt", &repo);
9179 git_commit("improve letter", &repo);
9180 git_cherry_pick(&commit, &repo);
9181 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9182 .expect("No CHERRY_PICK_HEAD");
9183 pretty_assertions::assert_eq!(
9184 git_status(&repo),
9185 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9186 );
9187 tree.flush_fs_events(cx).await;
9188 project
9189 .update(cx, |project, cx| project.git_scans_complete(cx))
9190 .await;
9191 cx.executor().run_until_parked();
9192 let conflicts = repository.update(cx, |repository, _| {
9193 repository
9194 .merge_conflicts
9195 .iter()
9196 .cloned()
9197 .collect::<Vec<_>>()
9198 });
9199 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9200
9201 git_add("a.txt", &repo);
9202 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9203 git_commit("whatevs", &repo);
9204 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9205 .expect("Failed to remove CHERRY_PICK_HEAD");
9206 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9207 tree.flush_fs_events(cx).await;
9208 let conflicts = repository.update(cx, |repository, _| {
9209 repository
9210 .merge_conflicts
9211 .iter()
9212 .cloned()
9213 .collect::<Vec<_>>()
9214 });
9215 pretty_assertions::assert_eq!(conflicts, []);
9216}
9217
9218#[gpui::test]
9219async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9220 init_test(cx);
9221 let fs = FakeFs::new(cx.background_executor.clone());
9222 fs.insert_tree(
9223 path!("/root"),
9224 json!({
9225 ".git": {},
9226 ".gitignore": "*.txt\n",
9227 "a.xml": "<a></a>",
9228 "b.txt": "Some text"
9229 }),
9230 )
9231 .await;
9232
9233 fs.set_head_and_index_for_repo(
9234 path!("/root/.git").as_ref(),
9235 &[
9236 (".gitignore", "*.txt\n".into()),
9237 ("a.xml", "<a></a>".into()),
9238 ],
9239 );
9240
9241 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9242
9243 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9244 tree.flush_fs_events(cx).await;
9245 project
9246 .update(cx, |project, cx| project.git_scans_complete(cx))
9247 .await;
9248 cx.executor().run_until_parked();
9249
9250 let repository = project.read_with(cx, |project, cx| {
9251 project.repositories(cx).values().next().unwrap().clone()
9252 });
9253
9254 // One file is unmodified, the other is ignored.
9255 cx.read(|cx| {
9256 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9257 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9258 });
9259
9260 // Change the gitignore, and stage the newly non-ignored file.
9261 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9262 .await
9263 .unwrap();
9264 fs.set_index_for_repo(
9265 Path::new(path!("/root/.git")),
9266 &[
9267 (".gitignore", "*.txt\n".into()),
9268 ("a.xml", "<a></a>".into()),
9269 ("b.txt", "Some text".into()),
9270 ],
9271 );
9272
9273 cx.executor().run_until_parked();
9274 cx.read(|cx| {
9275 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9276 assert_entry_git_state(
9277 tree.read(cx),
9278 repository.read(cx),
9279 "b.txt",
9280 Some(StatusCode::Added),
9281 false,
9282 );
9283 });
9284}
9285
9286// NOTE:
9287// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9288// a directory which some program has already open.
9289// This is a limitation of the Windows.
9290// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9291// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9292#[gpui::test]
9293#[cfg_attr(target_os = "windows", ignore)]
9294async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9295 init_test(cx);
9296 cx.executor().allow_parking();
9297 let root = TempTree::new(json!({
9298 "projects": {
9299 "project1": {
9300 "a": "",
9301 "b": "",
9302 }
9303 },
9304
9305 }));
9306 let root_path = root.path();
9307
9308 let repo = git_init(&root_path.join("projects/project1"));
9309 git_add("a", &repo);
9310 git_commit("init", &repo);
9311 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9312
9313 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9314
9315 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9316 tree.flush_fs_events(cx).await;
9317 project
9318 .update(cx, |project, cx| project.git_scans_complete(cx))
9319 .await;
9320 cx.executor().run_until_parked();
9321
9322 let repository = project.read_with(cx, |project, cx| {
9323 project.repositories(cx).values().next().unwrap().clone()
9324 });
9325
9326 repository.read_with(cx, |repository, _| {
9327 assert_eq!(
9328 repository.work_directory_abs_path.as_ref(),
9329 root_path.join("projects/project1").as_path()
9330 );
9331 assert_eq!(
9332 repository
9333 .status_for_path(&repo_path("a"))
9334 .map(|entry| entry.status),
9335 Some(StatusCode::Modified.worktree()),
9336 );
9337 assert_eq!(
9338 repository
9339 .status_for_path(&repo_path("b"))
9340 .map(|entry| entry.status),
9341 Some(FileStatus::Untracked),
9342 );
9343 });
9344
9345 std::fs::rename(
9346 root_path.join("projects/project1"),
9347 root_path.join("projects/project2"),
9348 )
9349 .unwrap();
9350 tree.flush_fs_events(cx).await;
9351
9352 repository.read_with(cx, |repository, _| {
9353 assert_eq!(
9354 repository.work_directory_abs_path.as_ref(),
9355 root_path.join("projects/project2").as_path()
9356 );
9357 assert_eq!(
9358 repository.status_for_path(&repo_path("a")).unwrap().status,
9359 StatusCode::Modified.worktree(),
9360 );
9361 assert_eq!(
9362 repository.status_for_path(&repo_path("b")).unwrap().status,
9363 FileStatus::Untracked,
9364 );
9365 });
9366}
9367
9368// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9369// you can't rename a directory which some program has already open. This is a
9370// limitation of the Windows. See:
9371// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9372// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9373#[gpui::test]
9374#[cfg_attr(target_os = "windows", ignore)]
9375async fn test_file_status(cx: &mut gpui::TestAppContext) {
9376 init_test(cx);
9377 cx.executor().allow_parking();
9378 const IGNORE_RULE: &str = "**/target";
9379
9380 let root = TempTree::new(json!({
9381 "project": {
9382 "a.txt": "a",
9383 "b.txt": "bb",
9384 "c": {
9385 "d": {
9386 "e.txt": "eee"
9387 }
9388 },
9389 "f.txt": "ffff",
9390 "target": {
9391 "build_file": "???"
9392 },
9393 ".gitignore": IGNORE_RULE
9394 },
9395
9396 }));
9397 let root_path = root.path();
9398
9399 const A_TXT: &str = "a.txt";
9400 const B_TXT: &str = "b.txt";
9401 const E_TXT: &str = "c/d/e.txt";
9402 const F_TXT: &str = "f.txt";
9403 const DOTGITIGNORE: &str = ".gitignore";
9404 const BUILD_FILE: &str = "target/build_file";
9405
9406 // Set up git repository before creating the worktree.
9407 let work_dir = root.path().join("project");
9408 let mut repo = git_init(work_dir.as_path());
9409 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9410 git_add(A_TXT, &repo);
9411 git_add(E_TXT, &repo);
9412 git_add(DOTGITIGNORE, &repo);
9413 git_commit("Initial commit", &repo);
9414
9415 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9416
9417 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9418 tree.flush_fs_events(cx).await;
9419 project
9420 .update(cx, |project, cx| project.git_scans_complete(cx))
9421 .await;
9422 cx.executor().run_until_parked();
9423
9424 let repository = project.read_with(cx, |project, cx| {
9425 project.repositories(cx).values().next().unwrap().clone()
9426 });
9427
9428 // Check that the right git state is observed on startup
9429 repository.read_with(cx, |repository, _cx| {
9430 assert_eq!(
9431 repository.work_directory_abs_path.as_ref(),
9432 root_path.join("project").as_path()
9433 );
9434
9435 assert_eq!(
9436 repository
9437 .status_for_path(&repo_path(B_TXT))
9438 .unwrap()
9439 .status,
9440 FileStatus::Untracked,
9441 );
9442 assert_eq!(
9443 repository
9444 .status_for_path(&repo_path(F_TXT))
9445 .unwrap()
9446 .status,
9447 FileStatus::Untracked,
9448 );
9449 });
9450
9451 // Modify a file in the working copy.
9452 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9453 tree.flush_fs_events(cx).await;
9454 project
9455 .update(cx, |project, cx| project.git_scans_complete(cx))
9456 .await;
9457 cx.executor().run_until_parked();
9458
9459 // The worktree detects that the file's git status has changed.
9460 repository.read_with(cx, |repository, _| {
9461 assert_eq!(
9462 repository
9463 .status_for_path(&repo_path(A_TXT))
9464 .unwrap()
9465 .status,
9466 StatusCode::Modified.worktree(),
9467 );
9468 });
9469
9470 // Create a commit in the git repository.
9471 git_add(A_TXT, &repo);
9472 git_add(B_TXT, &repo);
9473 git_commit("Committing modified and added", &repo);
9474 tree.flush_fs_events(cx).await;
9475 project
9476 .update(cx, |project, cx| project.git_scans_complete(cx))
9477 .await;
9478 cx.executor().run_until_parked();
9479
9480 // The worktree detects that the files' git status have changed.
9481 repository.read_with(cx, |repository, _cx| {
9482 assert_eq!(
9483 repository
9484 .status_for_path(&repo_path(F_TXT))
9485 .unwrap()
9486 .status,
9487 FileStatus::Untracked,
9488 );
9489 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9490 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9491 });
9492
9493 // Modify files in the working copy and perform git operations on other files.
9494 git_reset(0, &repo);
9495 git_remove_index(Path::new(B_TXT), &repo);
9496 git_stash(&mut repo);
9497 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9498 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9499 tree.flush_fs_events(cx).await;
9500 project
9501 .update(cx, |project, cx| project.git_scans_complete(cx))
9502 .await;
9503 cx.executor().run_until_parked();
9504
9505 // Check that more complex repo changes are tracked
9506 repository.read_with(cx, |repository, _cx| {
9507 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9508 assert_eq!(
9509 repository
9510 .status_for_path(&repo_path(B_TXT))
9511 .unwrap()
9512 .status,
9513 FileStatus::Untracked,
9514 );
9515 assert_eq!(
9516 repository
9517 .status_for_path(&repo_path(E_TXT))
9518 .unwrap()
9519 .status,
9520 StatusCode::Modified.worktree(),
9521 );
9522 });
9523
9524 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9525 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9526 std::fs::write(
9527 work_dir.join(DOTGITIGNORE),
9528 [IGNORE_RULE, "f.txt"].join("\n"),
9529 )
9530 .unwrap();
9531
9532 git_add(Path::new(DOTGITIGNORE), &repo);
9533 git_commit("Committing modified git ignore", &repo);
9534
9535 tree.flush_fs_events(cx).await;
9536 cx.executor().run_until_parked();
9537
9538 let mut renamed_dir_name = "first_directory/second_directory";
9539 const RENAMED_FILE: &str = "rf.txt";
9540
9541 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9542 std::fs::write(
9543 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9544 "new-contents",
9545 )
9546 .unwrap();
9547
9548 tree.flush_fs_events(cx).await;
9549 project
9550 .update(cx, |project, cx| project.git_scans_complete(cx))
9551 .await;
9552 cx.executor().run_until_parked();
9553
9554 repository.read_with(cx, |repository, _cx| {
9555 assert_eq!(
9556 repository
9557 .status_for_path(&RepoPath::from_rel_path(
9558 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9559 ))
9560 .unwrap()
9561 .status,
9562 FileStatus::Untracked,
9563 );
9564 });
9565
9566 renamed_dir_name = "new_first_directory/second_directory";
9567
9568 std::fs::rename(
9569 work_dir.join("first_directory"),
9570 work_dir.join("new_first_directory"),
9571 )
9572 .unwrap();
9573
9574 tree.flush_fs_events(cx).await;
9575 project
9576 .update(cx, |project, cx| project.git_scans_complete(cx))
9577 .await;
9578 cx.executor().run_until_parked();
9579
9580 repository.read_with(cx, |repository, _cx| {
9581 assert_eq!(
9582 repository
9583 .status_for_path(&RepoPath::from_rel_path(
9584 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9585 ))
9586 .unwrap()
9587 .status,
9588 FileStatus::Untracked,
9589 );
9590 });
9591}
9592
9593#[gpui::test]
9594#[ignore]
9595async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9596 init_test(cx);
9597 cx.executor().allow_parking();
9598
9599 const IGNORE_RULE: &str = "**/target";
9600
9601 let root = TempTree::new(json!({
9602 "project": {
9603 "src": {
9604 "main.rs": "fn main() {}"
9605 },
9606 "target": {
9607 "debug": {
9608 "important_text.txt": "important text",
9609 },
9610 },
9611 ".gitignore": IGNORE_RULE
9612 },
9613
9614 }));
9615 let root_path = root.path();
9616
9617 // Set up git repository before creating the worktree.
9618 let work_dir = root.path().join("project");
9619 let repo = git_init(work_dir.as_path());
9620 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9621 git_add("src/main.rs", &repo);
9622 git_add(".gitignore", &repo);
9623 git_commit("Initial commit", &repo);
9624
9625 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9626 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9627 let project_events = Arc::new(Mutex::new(Vec::new()));
9628 project.update(cx, |project, cx| {
9629 let repo_events = repository_updates.clone();
9630 cx.subscribe(project.git_store(), move |_, _, e, _| {
9631 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9632 repo_events.lock().push(e.clone());
9633 }
9634 })
9635 .detach();
9636 let project_events = project_events.clone();
9637 cx.subscribe_self(move |_, e, _| {
9638 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9639 project_events.lock().extend(
9640 updates
9641 .iter()
9642 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9643 .filter(|(path, _)| path != "fs-event-sentinel"),
9644 );
9645 }
9646 })
9647 .detach();
9648 });
9649
9650 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9651 tree.flush_fs_events(cx).await;
9652 tree.update(cx, |tree, cx| {
9653 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9654 })
9655 .await
9656 .unwrap();
9657 tree.update(cx, |tree, _| {
9658 assert_eq!(
9659 tree.entries(true, 0)
9660 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9661 .collect::<Vec<_>>(),
9662 vec![
9663 (rel_path(""), false),
9664 (rel_path("project/"), false),
9665 (rel_path("project/.gitignore"), false),
9666 (rel_path("project/src"), false),
9667 (rel_path("project/src/main.rs"), false),
9668 (rel_path("project/target"), true),
9669 (rel_path("project/target/debug"), true),
9670 (rel_path("project/target/debug/important_text.txt"), true),
9671 ]
9672 );
9673 });
9674
9675 assert_eq!(
9676 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9677 vec![
9678 RepositoryEvent::StatusesChanged,
9679 RepositoryEvent::MergeHeadsChanged,
9680 ],
9681 "Initial worktree scan should produce a repo update event"
9682 );
9683 assert_eq!(
9684 project_events.lock().drain(..).collect::<Vec<_>>(),
9685 vec![
9686 ("project/target".to_string(), PathChange::Loaded),
9687 ("project/target/debug".to_string(), PathChange::Loaded),
9688 (
9689 "project/target/debug/important_text.txt".to_string(),
9690 PathChange::Loaded
9691 ),
9692 ],
9693 "Initial project changes should show that all not-ignored and all opened files are loaded"
9694 );
9695
9696 let deps_dir = work_dir.join("target").join("debug").join("deps");
9697 std::fs::create_dir_all(&deps_dir).unwrap();
9698 tree.flush_fs_events(cx).await;
9699 project
9700 .update(cx, |project, cx| project.git_scans_complete(cx))
9701 .await;
9702 cx.executor().run_until_parked();
9703 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9704 tree.flush_fs_events(cx).await;
9705 project
9706 .update(cx, |project, cx| project.git_scans_complete(cx))
9707 .await;
9708 cx.executor().run_until_parked();
9709 std::fs::remove_dir_all(&deps_dir).unwrap();
9710 tree.flush_fs_events(cx).await;
9711 project
9712 .update(cx, |project, cx| project.git_scans_complete(cx))
9713 .await;
9714 cx.executor().run_until_parked();
9715
9716 tree.update(cx, |tree, _| {
9717 assert_eq!(
9718 tree.entries(true, 0)
9719 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9720 .collect::<Vec<_>>(),
9721 vec![
9722 (rel_path(""), false),
9723 (rel_path("project/"), false),
9724 (rel_path("project/.gitignore"), false),
9725 (rel_path("project/src"), false),
9726 (rel_path("project/src/main.rs"), false),
9727 (rel_path("project/target"), true),
9728 (rel_path("project/target/debug"), true),
9729 (rel_path("project/target/debug/important_text.txt"), true),
9730 ],
9731 "No stray temp files should be left after the flycheck changes"
9732 );
9733 });
9734
9735 assert_eq!(
9736 repository_updates
9737 .lock()
9738 .iter()
9739 .cloned()
9740 .collect::<Vec<_>>(),
9741 Vec::new(),
9742 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9743 );
9744 assert_eq!(
9745 project_events.lock().as_slice(),
9746 vec![
9747 ("project/target/debug/deps".to_string(), PathChange::Added),
9748 ("project/target/debug/deps".to_string(), PathChange::Removed),
9749 ],
9750 "Due to `debug` directory being tracked, it should get updates for entries inside it.
9751 No updates for more nested directories should happen as those are ignored",
9752 );
9753}
9754
9755// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
9756// to different timings/ordering of events.
9757#[ignore]
9758#[gpui::test]
9759async fn test_odd_events_for_ignored_dirs(
9760 executor: BackgroundExecutor,
9761 cx: &mut gpui::TestAppContext,
9762) {
9763 init_test(cx);
9764 let fs = FakeFs::new(executor);
9765 fs.insert_tree(
9766 path!("/root"),
9767 json!({
9768 ".git": {},
9769 ".gitignore": "**/target/",
9770 "src": {
9771 "main.rs": "fn main() {}",
9772 },
9773 "target": {
9774 "debug": {
9775 "foo.txt": "foo",
9776 "deps": {}
9777 }
9778 }
9779 }),
9780 )
9781 .await;
9782 fs.set_head_and_index_for_repo(
9783 path!("/root/.git").as_ref(),
9784 &[
9785 (".gitignore", "**/target/".into()),
9786 ("src/main.rs", "fn main() {}".into()),
9787 ],
9788 );
9789
9790 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9791 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9792 let project_events = Arc::new(Mutex::new(Vec::new()));
9793 project.update(cx, |project, cx| {
9794 let repository_updates = repository_updates.clone();
9795 cx.subscribe(project.git_store(), move |_, _, e, _| {
9796 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9797 repository_updates.lock().push(e.clone());
9798 }
9799 })
9800 .detach();
9801 let project_events = project_events.clone();
9802 cx.subscribe_self(move |_, e, _| {
9803 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9804 project_events.lock().extend(
9805 updates
9806 .iter()
9807 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9808 .filter(|(path, _)| path != "fs-event-sentinel"),
9809 );
9810 }
9811 })
9812 .detach();
9813 });
9814
9815 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9816 tree.update(cx, |tree, cx| {
9817 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9818 })
9819 .await
9820 .unwrap();
9821 tree.flush_fs_events(cx).await;
9822 project
9823 .update(cx, |project, cx| project.git_scans_complete(cx))
9824 .await;
9825 cx.run_until_parked();
9826 tree.update(cx, |tree, _| {
9827 assert_eq!(
9828 tree.entries(true, 0)
9829 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9830 .collect::<Vec<_>>(),
9831 vec![
9832 (rel_path(""), false),
9833 (rel_path(".gitignore"), false),
9834 (rel_path("src"), false),
9835 (rel_path("src/main.rs"), false),
9836 (rel_path("target"), true),
9837 (rel_path("target/debug"), true),
9838 (rel_path("target/debug/deps"), true),
9839 (rel_path("target/debug/foo.txt"), true),
9840 ]
9841 );
9842 });
9843
9844 assert_eq!(
9845 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9846 vec![
9847 RepositoryEvent::MergeHeadsChanged,
9848 RepositoryEvent::BranchChanged,
9849 RepositoryEvent::StatusesChanged,
9850 RepositoryEvent::StatusesChanged,
9851 ],
9852 "Initial worktree scan should produce a repo update event"
9853 );
9854 assert_eq!(
9855 project_events.lock().drain(..).collect::<Vec<_>>(),
9856 vec![
9857 ("target".to_string(), PathChange::Loaded),
9858 ("target/debug".to_string(), PathChange::Loaded),
9859 ("target/debug/deps".to_string(), PathChange::Loaded),
9860 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9861 ],
9862 "All non-ignored entries and all opened firs should be getting a project event",
9863 );
9864
9865 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9866 // This may happen multiple times during a single flycheck, but once is enough for testing.
9867 fs.emit_fs_event("/root/target/debug/deps", None);
9868 tree.flush_fs_events(cx).await;
9869 project
9870 .update(cx, |project, cx| project.git_scans_complete(cx))
9871 .await;
9872 cx.executor().run_until_parked();
9873
9874 assert_eq!(
9875 repository_updates
9876 .lock()
9877 .iter()
9878 .cloned()
9879 .collect::<Vec<_>>(),
9880 Vec::new(),
9881 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9882 );
9883 assert_eq!(
9884 project_events.lock().as_slice(),
9885 Vec::new(),
9886 "No further project events should happen, as only ignored dirs received FS events",
9887 );
9888}
9889
9890#[gpui::test]
9891async fn test_repos_in_invisible_worktrees(
9892 executor: BackgroundExecutor,
9893 cx: &mut gpui::TestAppContext,
9894) {
9895 init_test(cx);
9896 let fs = FakeFs::new(executor);
9897 fs.insert_tree(
9898 path!("/root"),
9899 json!({
9900 "dir1": {
9901 ".git": {},
9902 "dep1": {
9903 ".git": {},
9904 "src": {
9905 "a.txt": "",
9906 },
9907 },
9908 "b.txt": "",
9909 },
9910 }),
9911 )
9912 .await;
9913
9914 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9915 let _visible_worktree =
9916 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9917 project
9918 .update(cx, |project, cx| project.git_scans_complete(cx))
9919 .await;
9920
9921 let repos = project.read_with(cx, |project, cx| {
9922 project
9923 .repositories(cx)
9924 .values()
9925 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9926 .collect::<Vec<_>>()
9927 });
9928 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9929
9930 let (_invisible_worktree, _) = project
9931 .update(cx, |project, cx| {
9932 project.worktree_store.update(cx, |worktree_store, cx| {
9933 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9934 })
9935 })
9936 .await
9937 .expect("failed to create worktree");
9938 project
9939 .update(cx, |project, cx| project.git_scans_complete(cx))
9940 .await;
9941
9942 let repos = project.read_with(cx, |project, cx| {
9943 project
9944 .repositories(cx)
9945 .values()
9946 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9947 .collect::<Vec<_>>()
9948 });
9949 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9950}
9951
9952#[gpui::test(iterations = 10)]
9953async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9954 init_test(cx);
9955 cx.update(|cx| {
9956 cx.update_global::<SettingsStore, _>(|store, cx| {
9957 store.update_user_settings(cx, |settings| {
9958 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9959 });
9960 });
9961 });
9962 let fs = FakeFs::new(cx.background_executor.clone());
9963 fs.insert_tree(
9964 path!("/root"),
9965 json!({
9966 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9967 "tree": {
9968 ".git": {},
9969 ".gitignore": "ignored-dir\n",
9970 "tracked-dir": {
9971 "tracked-file1": "",
9972 "ancestor-ignored-file1": "",
9973 },
9974 "ignored-dir": {
9975 "ignored-file1": ""
9976 }
9977 }
9978 }),
9979 )
9980 .await;
9981 fs.set_head_and_index_for_repo(
9982 path!("/root/tree/.git").as_ref(),
9983 &[
9984 (".gitignore", "ignored-dir\n".into()),
9985 ("tracked-dir/tracked-file1", "".into()),
9986 ],
9987 );
9988
9989 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9990
9991 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9992 tree.flush_fs_events(cx).await;
9993 project
9994 .update(cx, |project, cx| project.git_scans_complete(cx))
9995 .await;
9996 cx.executor().run_until_parked();
9997
9998 let repository = project.read_with(cx, |project, cx| {
9999 project.repositories(cx).values().next().unwrap().clone()
10000 });
10001
10002 tree.read_with(cx, |tree, _| {
10003 tree.as_local()
10004 .unwrap()
10005 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10006 })
10007 .recv()
10008 .await;
10009
10010 cx.read(|cx| {
10011 assert_entry_git_state(
10012 tree.read(cx),
10013 repository.read(cx),
10014 "tracked-dir/tracked-file1",
10015 None,
10016 false,
10017 );
10018 assert_entry_git_state(
10019 tree.read(cx),
10020 repository.read(cx),
10021 "tracked-dir/ancestor-ignored-file1",
10022 None,
10023 false,
10024 );
10025 assert_entry_git_state(
10026 tree.read(cx),
10027 repository.read(cx),
10028 "ignored-dir/ignored-file1",
10029 None,
10030 true,
10031 );
10032 });
10033
10034 fs.create_file(
10035 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10036 Default::default(),
10037 )
10038 .await
10039 .unwrap();
10040 fs.set_index_for_repo(
10041 path!("/root/tree/.git").as_ref(),
10042 &[
10043 (".gitignore", "ignored-dir\n".into()),
10044 ("tracked-dir/tracked-file1", "".into()),
10045 ("tracked-dir/tracked-file2", "".into()),
10046 ],
10047 );
10048 fs.create_file(
10049 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10050 Default::default(),
10051 )
10052 .await
10053 .unwrap();
10054 fs.create_file(
10055 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10056 Default::default(),
10057 )
10058 .await
10059 .unwrap();
10060
10061 cx.executor().run_until_parked();
10062 cx.read(|cx| {
10063 assert_entry_git_state(
10064 tree.read(cx),
10065 repository.read(cx),
10066 "tracked-dir/tracked-file2",
10067 Some(StatusCode::Added),
10068 false,
10069 );
10070 assert_entry_git_state(
10071 tree.read(cx),
10072 repository.read(cx),
10073 "tracked-dir/ancestor-ignored-file2",
10074 None,
10075 false,
10076 );
10077 assert_entry_git_state(
10078 tree.read(cx),
10079 repository.read(cx),
10080 "ignored-dir/ignored-file2",
10081 None,
10082 true,
10083 );
10084 assert!(
10085 tree.read(cx)
10086 .entry_for_path(&rel_path(".git"))
10087 .unwrap()
10088 .is_ignored
10089 );
10090 });
10091}
10092
10093#[gpui::test]
10094async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10095 init_test(cx);
10096
10097 let fs = FakeFs::new(cx.executor());
10098 fs.insert_tree(
10099 path!("/project"),
10100 json!({
10101 ".git": {
10102 "worktrees": {
10103 "some-worktree": {
10104 "commondir": "../..\n",
10105 // For is_git_dir
10106 "HEAD": "",
10107 "config": ""
10108 }
10109 },
10110 "modules": {
10111 "subdir": {
10112 "some-submodule": {
10113 // For is_git_dir
10114 "HEAD": "",
10115 "config": "",
10116 }
10117 }
10118 }
10119 },
10120 "src": {
10121 "a.txt": "A",
10122 },
10123 "some-worktree": {
10124 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10125 "src": {
10126 "b.txt": "B",
10127 }
10128 },
10129 "subdir": {
10130 "some-submodule": {
10131 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10132 "c.txt": "C",
10133 }
10134 }
10135 }),
10136 )
10137 .await;
10138
10139 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10140 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10141 scan_complete.await;
10142
10143 let mut repositories = project.update(cx, |project, cx| {
10144 project
10145 .repositories(cx)
10146 .values()
10147 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10148 .collect::<Vec<_>>()
10149 });
10150 repositories.sort();
10151 pretty_assertions::assert_eq!(
10152 repositories,
10153 [
10154 Path::new(path!("/project")).into(),
10155 Path::new(path!("/project/some-worktree")).into(),
10156 Path::new(path!("/project/subdir/some-submodule")).into(),
10157 ]
10158 );
10159
10160 // Generate a git-related event for the worktree and check that it's refreshed.
10161 fs.with_git_state(
10162 path!("/project/some-worktree/.git").as_ref(),
10163 true,
10164 |state| {
10165 state
10166 .head_contents
10167 .insert(repo_path("src/b.txt"), "b".to_owned());
10168 state
10169 .index_contents
10170 .insert(repo_path("src/b.txt"), "b".to_owned());
10171 },
10172 )
10173 .unwrap();
10174 cx.run_until_parked();
10175
10176 let buffer = project
10177 .update(cx, |project, cx| {
10178 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10179 })
10180 .await
10181 .unwrap();
10182 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10183 let (repo, _) = project
10184 .git_store()
10185 .read(cx)
10186 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10187 .unwrap();
10188 pretty_assertions::assert_eq!(
10189 repo.read(cx).work_directory_abs_path,
10190 Path::new(path!("/project/some-worktree")).into(),
10191 );
10192 let barrier = repo.update(cx, |repo, _| repo.barrier());
10193 (repo.clone(), barrier)
10194 });
10195 barrier.await.unwrap();
10196 worktree_repo.update(cx, |repo, _| {
10197 pretty_assertions::assert_eq!(
10198 repo.status_for_path(&repo_path("src/b.txt"))
10199 .unwrap()
10200 .status,
10201 StatusCode::Modified.worktree(),
10202 );
10203 });
10204
10205 // The same for the submodule.
10206 fs.with_git_state(
10207 path!("/project/subdir/some-submodule/.git").as_ref(),
10208 true,
10209 |state| {
10210 state
10211 .head_contents
10212 .insert(repo_path("c.txt"), "c".to_owned());
10213 state
10214 .index_contents
10215 .insert(repo_path("c.txt"), "c".to_owned());
10216 },
10217 )
10218 .unwrap();
10219 cx.run_until_parked();
10220
10221 let buffer = project
10222 .update(cx, |project, cx| {
10223 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10224 })
10225 .await
10226 .unwrap();
10227 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10228 let (repo, _) = project
10229 .git_store()
10230 .read(cx)
10231 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10232 .unwrap();
10233 pretty_assertions::assert_eq!(
10234 repo.read(cx).work_directory_abs_path,
10235 Path::new(path!("/project/subdir/some-submodule")).into(),
10236 );
10237 let barrier = repo.update(cx, |repo, _| repo.barrier());
10238 (repo.clone(), barrier)
10239 });
10240 barrier.await.unwrap();
10241 submodule_repo.update(cx, |repo, _| {
10242 pretty_assertions::assert_eq!(
10243 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10244 StatusCode::Modified.worktree(),
10245 );
10246 });
10247}
10248
10249#[gpui::test]
10250async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10251 init_test(cx);
10252 let fs = FakeFs::new(cx.background_executor.clone());
10253 fs.insert_tree(
10254 path!("/root"),
10255 json!({
10256 "project": {
10257 ".git": {},
10258 "child1": {
10259 "a.txt": "A",
10260 },
10261 "child2": {
10262 "b.txt": "B",
10263 }
10264 }
10265 }),
10266 )
10267 .await;
10268
10269 let project = Project::test(
10270 fs.clone(),
10271 [
10272 path!("/root/project/child1").as_ref(),
10273 path!("/root/project/child2").as_ref(),
10274 ],
10275 cx,
10276 )
10277 .await;
10278
10279 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10280 tree.flush_fs_events(cx).await;
10281 project
10282 .update(cx, |project, cx| project.git_scans_complete(cx))
10283 .await;
10284 cx.executor().run_until_parked();
10285
10286 let repos = project.read_with(cx, |project, cx| {
10287 project
10288 .repositories(cx)
10289 .values()
10290 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10291 .collect::<Vec<_>>()
10292 });
10293 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10294}
10295
10296#[gpui::test]
10297async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10298 init_test(cx);
10299
10300 let file_1_committed = String::from(r#"file_1_committed"#);
10301 let file_1_staged = String::from(r#"file_1_staged"#);
10302 let file_2_committed = String::from(r#"file_2_committed"#);
10303 let file_2_staged = String::from(r#"file_2_staged"#);
10304 let buffer_contents = String::from(r#"buffer"#);
10305
10306 let fs = FakeFs::new(cx.background_executor.clone());
10307 fs.insert_tree(
10308 path!("/dir"),
10309 json!({
10310 ".git": {},
10311 "src": {
10312 "file_1.rs": file_1_committed.clone(),
10313 "file_2.rs": file_2_committed.clone(),
10314 }
10315 }),
10316 )
10317 .await;
10318
10319 fs.set_head_for_repo(
10320 path!("/dir/.git").as_ref(),
10321 &[
10322 ("src/file_1.rs", file_1_committed.clone()),
10323 ("src/file_2.rs", file_2_committed.clone()),
10324 ],
10325 "deadbeef",
10326 );
10327 fs.set_index_for_repo(
10328 path!("/dir/.git").as_ref(),
10329 &[
10330 ("src/file_1.rs", file_1_staged.clone()),
10331 ("src/file_2.rs", file_2_staged.clone()),
10332 ],
10333 );
10334
10335 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10336
10337 let buffer = project
10338 .update(cx, |project, cx| {
10339 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10340 })
10341 .await
10342 .unwrap();
10343
10344 buffer.update(cx, |buffer, cx| {
10345 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10346 });
10347
10348 let unstaged_diff = project
10349 .update(cx, |project, cx| {
10350 project.open_unstaged_diff(buffer.clone(), cx)
10351 })
10352 .await
10353 .unwrap();
10354
10355 cx.run_until_parked();
10356
10357 unstaged_diff.update(cx, |unstaged_diff, cx| {
10358 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10359 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10360 });
10361
10362 // Save the buffer as `file_2.rs`, which should trigger the
10363 // `BufferChangedFilePath` event.
10364 project
10365 .update(cx, |project, cx| {
10366 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10367 let path = ProjectPath {
10368 worktree_id,
10369 path: rel_path("src/file_2.rs").into(),
10370 };
10371 project.save_buffer_as(buffer.clone(), path, cx)
10372 })
10373 .await
10374 .unwrap();
10375
10376 cx.run_until_parked();
10377
10378 // Verify that the diff bases have been updated to file_2's contents due to
10379 // the `BufferChangedFilePath` event being handled.
10380 unstaged_diff.update(cx, |unstaged_diff, cx| {
10381 let snapshot = buffer.read(cx).snapshot();
10382 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10383 assert_eq!(
10384 base_text, file_2_staged,
10385 "Diff bases should be automatically updated to file_2 staged content"
10386 );
10387
10388 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10389 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10390 });
10391
10392 let uncommitted_diff = project
10393 .update(cx, |project, cx| {
10394 project.open_uncommitted_diff(buffer.clone(), cx)
10395 })
10396 .await
10397 .unwrap();
10398
10399 cx.run_until_parked();
10400
10401 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10402 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10403 assert_eq!(
10404 base_text, file_2_committed,
10405 "Uncommitted diff should compare against file_2 committed content"
10406 );
10407 });
10408}
10409
10410async fn search(
10411 project: &Entity<Project>,
10412 query: SearchQuery,
10413 cx: &mut gpui::TestAppContext,
10414) -> Result<HashMap<String, Vec<Range<usize>>>> {
10415 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10416 let mut results = HashMap::default();
10417 while let Ok(search_result) = search_rx.recv().await {
10418 match search_result {
10419 SearchResult::Buffer { buffer, ranges } => {
10420 results.entry(buffer).or_insert(ranges);
10421 }
10422 SearchResult::LimitReached => {}
10423 }
10424 }
10425 Ok(results
10426 .into_iter()
10427 .map(|(buffer, ranges)| {
10428 buffer.update(cx, |buffer, cx| {
10429 let path = buffer
10430 .file()
10431 .unwrap()
10432 .full_path(cx)
10433 .to_string_lossy()
10434 .to_string();
10435 let ranges = ranges
10436 .into_iter()
10437 .map(|range| range.to_offset(buffer))
10438 .collect::<Vec<_>>();
10439 (path, ranges)
10440 })
10441 })
10442 .collect())
10443}
10444
10445pub fn init_test(cx: &mut gpui::TestAppContext) {
10446 zlog::init_test();
10447
10448 cx.update(|cx| {
10449 let settings_store = SettingsStore::test(cx);
10450 cx.set_global(settings_store);
10451 release_channel::init(semver::Version::new(0, 0, 0), cx);
10452 });
10453}
10454
10455fn json_lang() -> Arc<Language> {
10456 Arc::new(Language::new(
10457 LanguageConfig {
10458 name: "JSON".into(),
10459 matcher: LanguageMatcher {
10460 path_suffixes: vec!["json".to_string()],
10461 ..Default::default()
10462 },
10463 ..Default::default()
10464 },
10465 None,
10466 ))
10467}
10468
10469fn js_lang() -> Arc<Language> {
10470 Arc::new(Language::new(
10471 LanguageConfig {
10472 name: "JavaScript".into(),
10473 matcher: LanguageMatcher {
10474 path_suffixes: vec!["js".to_string()],
10475 ..Default::default()
10476 },
10477 ..Default::default()
10478 },
10479 None,
10480 ))
10481}
10482
10483fn rust_lang() -> Arc<Language> {
10484 Arc::new(Language::new(
10485 LanguageConfig {
10486 name: "Rust".into(),
10487 matcher: LanguageMatcher {
10488 path_suffixes: vec!["rs".to_string()],
10489 ..Default::default()
10490 },
10491 ..Default::default()
10492 },
10493 Some(tree_sitter_rust::LANGUAGE.into()),
10494 ))
10495}
10496
10497fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10498 struct PythonMootToolchainLister(Arc<FakeFs>);
10499 #[async_trait]
10500 impl ToolchainLister for PythonMootToolchainLister {
10501 async fn list(
10502 &self,
10503 worktree_root: PathBuf,
10504 subroot_relative_path: Arc<RelPath>,
10505 _: Option<HashMap<String, String>>,
10506 _: &dyn Fs,
10507 ) -> ToolchainList {
10508 // This lister will always return a path .venv directories within ancestors
10509 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10510 let mut toolchains = vec![];
10511 for ancestor in ancestors {
10512 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10513 if self.0.is_dir(&venv_path).await {
10514 toolchains.push(Toolchain {
10515 name: SharedString::new("Python Venv"),
10516 path: venv_path.to_string_lossy().into_owned().into(),
10517 language_name: LanguageName(SharedString::new_static("Python")),
10518 as_json: serde_json::Value::Null,
10519 })
10520 }
10521 }
10522 ToolchainList {
10523 toolchains,
10524 ..Default::default()
10525 }
10526 }
10527 async fn resolve(
10528 &self,
10529 _: PathBuf,
10530 _: Option<HashMap<String, String>>,
10531 _: &dyn Fs,
10532 ) -> anyhow::Result<Toolchain> {
10533 Err(anyhow::anyhow!("Not implemented"))
10534 }
10535 fn meta(&self) -> ToolchainMetadata {
10536 ToolchainMetadata {
10537 term: SharedString::new_static("Virtual Environment"),
10538 new_toolchain_placeholder: SharedString::new_static(
10539 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10540 ),
10541 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10542 }
10543 }
10544 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10545 vec![]
10546 }
10547 }
10548 Arc::new(
10549 Language::new(
10550 LanguageConfig {
10551 name: "Python".into(),
10552 matcher: LanguageMatcher {
10553 path_suffixes: vec!["py".to_string()],
10554 ..Default::default()
10555 },
10556 ..Default::default()
10557 },
10558 None, // We're not testing Python parsing with this language.
10559 )
10560 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10561 "pyproject.toml",
10562 ))))
10563 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10564 )
10565}
10566
10567fn typescript_lang() -> Arc<Language> {
10568 Arc::new(Language::new(
10569 LanguageConfig {
10570 name: "TypeScript".into(),
10571 matcher: LanguageMatcher {
10572 path_suffixes: vec!["ts".to_string()],
10573 ..Default::default()
10574 },
10575 ..Default::default()
10576 },
10577 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10578 ))
10579}
10580
10581fn tsx_lang() -> Arc<Language> {
10582 Arc::new(Language::new(
10583 LanguageConfig {
10584 name: "tsx".into(),
10585 matcher: LanguageMatcher {
10586 path_suffixes: vec!["tsx".to_string()],
10587 ..Default::default()
10588 },
10589 ..Default::default()
10590 },
10591 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10592 ))
10593}
10594
10595fn get_all_tasks(
10596 project: &Entity<Project>,
10597 task_contexts: Arc<TaskContexts>,
10598 cx: &mut App,
10599) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10600 let new_tasks = project.update(cx, |project, cx| {
10601 project.task_store.update(cx, |task_store, cx| {
10602 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10603 this.used_and_current_resolved_tasks(task_contexts, cx)
10604 })
10605 })
10606 });
10607
10608 cx.background_spawn(async move {
10609 let (mut old, new) = new_tasks.await;
10610 old.extend(new);
10611 old
10612 })
10613}
10614
10615#[track_caller]
10616fn assert_entry_git_state(
10617 tree: &Worktree,
10618 repository: &Repository,
10619 path: &str,
10620 index_status: Option<StatusCode>,
10621 is_ignored: bool,
10622) {
10623 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10624 let entry = tree
10625 .entry_for_path(&rel_path(path))
10626 .unwrap_or_else(|| panic!("entry {path} not found"));
10627 let status = repository
10628 .status_for_path(&repo_path(path))
10629 .map(|entry| entry.status);
10630 let expected = index_status.map(|index_status| {
10631 TrackedStatus {
10632 index_status,
10633 worktree_status: StatusCode::Unmodified,
10634 }
10635 .into()
10636 });
10637 assert_eq!(
10638 status, expected,
10639 "expected {path} to have git status: {expected:?}"
10640 );
10641 assert_eq!(
10642 entry.is_ignored, is_ignored,
10643 "expected {path} to have is_ignored: {is_ignored}"
10644 );
10645}
10646
10647#[track_caller]
10648fn git_init(path: &Path) -> git2::Repository {
10649 let mut init_opts = RepositoryInitOptions::new();
10650 init_opts.initial_head("main");
10651 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10652}
10653
10654#[track_caller]
10655fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10656 let path = path.as_ref();
10657 let mut index = repo.index().expect("Failed to get index");
10658 index.add_path(path).expect("Failed to add file");
10659 index.write().expect("Failed to write index");
10660}
10661
10662#[track_caller]
10663fn git_remove_index(path: &Path, repo: &git2::Repository) {
10664 let mut index = repo.index().expect("Failed to get index");
10665 index.remove_path(path).expect("Failed to add file");
10666 index.write().expect("Failed to write index");
10667}
10668
10669#[track_caller]
10670fn git_commit(msg: &'static str, repo: &git2::Repository) {
10671 use git2::Signature;
10672
10673 let signature = Signature::now("test", "test@zed.dev").unwrap();
10674 let oid = repo.index().unwrap().write_tree().unwrap();
10675 let tree = repo.find_tree(oid).unwrap();
10676 if let Ok(head) = repo.head() {
10677 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10678
10679 let parent_commit = parent_obj.as_commit().unwrap();
10680
10681 repo.commit(
10682 Some("HEAD"),
10683 &signature,
10684 &signature,
10685 msg,
10686 &tree,
10687 &[parent_commit],
10688 )
10689 .expect("Failed to commit with parent");
10690 } else {
10691 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10692 .expect("Failed to commit");
10693 }
10694}
10695
10696#[cfg(any())]
10697#[track_caller]
10698fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10699 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10700}
10701
10702#[track_caller]
10703fn git_stash(repo: &mut git2::Repository) {
10704 use git2::Signature;
10705
10706 let signature = Signature::now("test", "test@zed.dev").unwrap();
10707 repo.stash_save(&signature, "N/A", None)
10708 .expect("Failed to stash");
10709}
10710
10711#[track_caller]
10712fn git_reset(offset: usize, repo: &git2::Repository) {
10713 let head = repo.head().expect("Couldn't get repo head");
10714 let object = head.peel(git2::ObjectType::Commit).unwrap();
10715 let commit = object.as_commit().unwrap();
10716 let new_head = commit
10717 .parents()
10718 .inspect(|parnet| {
10719 parnet.message();
10720 })
10721 .nth(offset)
10722 .expect("Not enough history");
10723 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10724 .expect("Could not reset");
10725}
10726
10727#[cfg(any())]
10728#[track_caller]
10729fn git_branch(name: &str, repo: &git2::Repository) {
10730 let head = repo
10731 .head()
10732 .expect("Couldn't get repo head")
10733 .peel_to_commit()
10734 .expect("HEAD is not a commit");
10735 repo.branch(name, &head, false).expect("Failed to commit");
10736}
10737
10738#[cfg(any())]
10739#[track_caller]
10740fn git_checkout(name: &str, repo: &git2::Repository) {
10741 repo.set_head(name).expect("Failed to set head");
10742 repo.checkout_head(None).expect("Failed to check out head");
10743}
10744
10745#[cfg(any())]
10746#[track_caller]
10747fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10748 repo.statuses(None)
10749 .unwrap()
10750 .iter()
10751 .map(|status| (status.path().unwrap().to_string(), status.status()))
10752 .collect()
10753}
10754
10755#[gpui::test]
10756async fn test_find_project_path_abs(
10757 background_executor: BackgroundExecutor,
10758 cx: &mut gpui::TestAppContext,
10759) {
10760 // find_project_path should work with absolute paths
10761 init_test(cx);
10762
10763 let fs = FakeFs::new(background_executor);
10764 fs.insert_tree(
10765 path!("/root"),
10766 json!({
10767 "project1": {
10768 "file1.txt": "content1",
10769 "subdir": {
10770 "file2.txt": "content2"
10771 }
10772 },
10773 "project2": {
10774 "file3.txt": "content3"
10775 }
10776 }),
10777 )
10778 .await;
10779
10780 let project = Project::test(
10781 fs.clone(),
10782 [
10783 path!("/root/project1").as_ref(),
10784 path!("/root/project2").as_ref(),
10785 ],
10786 cx,
10787 )
10788 .await;
10789
10790 // Make sure the worktrees are fully initialized
10791 project
10792 .update(cx, |project, cx| project.git_scans_complete(cx))
10793 .await;
10794 cx.run_until_parked();
10795
10796 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10797 project.read_with(cx, |project, cx| {
10798 let worktrees: Vec<_> = project.worktrees(cx).collect();
10799 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10800 let id1 = worktrees[0].read(cx).id();
10801 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10802 let id2 = worktrees[1].read(cx).id();
10803 (abs_path1, id1, abs_path2, id2)
10804 });
10805
10806 project.update(cx, |project, cx| {
10807 let abs_path = project1_abs_path.join("file1.txt");
10808 let found_path = project.find_project_path(abs_path, cx).unwrap();
10809 assert_eq!(found_path.worktree_id, project1_id);
10810 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10811
10812 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10813 let found_path = project.find_project_path(abs_path, cx).unwrap();
10814 assert_eq!(found_path.worktree_id, project1_id);
10815 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10816
10817 let abs_path = project2_abs_path.join("file3.txt");
10818 let found_path = project.find_project_path(abs_path, cx).unwrap();
10819 assert_eq!(found_path.worktree_id, project2_id);
10820 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10821
10822 let abs_path = project1_abs_path.join("nonexistent.txt");
10823 let found_path = project.find_project_path(abs_path, cx);
10824 assert!(
10825 found_path.is_some(),
10826 "Should find project path for nonexistent file in worktree"
10827 );
10828
10829 // Test with an absolute path outside any worktree
10830 let abs_path = Path::new("/some/other/path");
10831 let found_path = project.find_project_path(abs_path, cx);
10832 assert!(
10833 found_path.is_none(),
10834 "Should not find project path for path outside any worktree"
10835 );
10836 });
10837}
10838
10839#[gpui::test]
10840async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10841 init_test(cx);
10842
10843 let fs = FakeFs::new(cx.executor());
10844 fs.insert_tree(
10845 path!("/root"),
10846 json!({
10847 "a": {
10848 ".git": {},
10849 "src": {
10850 "main.rs": "fn main() {}",
10851 }
10852 },
10853 "b": {
10854 ".git": {},
10855 "src": {
10856 "main.rs": "fn main() {}",
10857 },
10858 "script": {
10859 "run.sh": "#!/bin/bash"
10860 }
10861 }
10862 }),
10863 )
10864 .await;
10865
10866 let project = Project::test(
10867 fs.clone(),
10868 [
10869 path!("/root/a").as_ref(),
10870 path!("/root/b/script").as_ref(),
10871 path!("/root/b").as_ref(),
10872 ],
10873 cx,
10874 )
10875 .await;
10876 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10877 scan_complete.await;
10878
10879 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10880 assert_eq!(worktrees.len(), 3);
10881
10882 let worktree_id_by_abs_path = worktrees
10883 .into_iter()
10884 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10885 .collect::<HashMap<_, _>>();
10886 let worktree_id = worktree_id_by_abs_path
10887 .get(Path::new(path!("/root/b/script")))
10888 .unwrap();
10889
10890 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10891 assert_eq!(repos.len(), 2);
10892
10893 project.update(cx, |project, cx| {
10894 project.remove_worktree(*worktree_id, cx);
10895 });
10896 cx.run_until_parked();
10897
10898 let mut repo_paths = project
10899 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10900 .values()
10901 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10902 .collect::<Vec<_>>();
10903 repo_paths.sort();
10904
10905 pretty_assertions::assert_eq!(
10906 repo_paths,
10907 [
10908 Path::new(path!("/root/a")).into(),
10909 Path::new(path!("/root/b")).into(),
10910 ]
10911 );
10912
10913 let active_repo_path = project
10914 .read_with(cx, |p, cx| {
10915 p.active_repository(cx)
10916 .map(|r| r.read(cx).work_directory_abs_path.clone())
10917 })
10918 .unwrap();
10919 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10920
10921 let worktree_id = worktree_id_by_abs_path
10922 .get(Path::new(path!("/root/a")))
10923 .unwrap();
10924 project.update(cx, |project, cx| {
10925 project.remove_worktree(*worktree_id, cx);
10926 });
10927 cx.run_until_parked();
10928
10929 let active_repo_path = project
10930 .read_with(cx, |p, cx| {
10931 p.active_repository(cx)
10932 .map(|r| r.read(cx).work_directory_abs_path.clone())
10933 })
10934 .unwrap();
10935 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10936
10937 let worktree_id = worktree_id_by_abs_path
10938 .get(Path::new(path!("/root/b")))
10939 .unwrap();
10940 project.update(cx, |project, cx| {
10941 project.remove_worktree(*worktree_id, cx);
10942 });
10943 cx.run_until_parked();
10944
10945 let active_repo_path = project.read_with(cx, |p, cx| {
10946 p.active_repository(cx)
10947 .map(|r| r.read(cx).work_directory_abs_path.clone())
10948 });
10949 assert!(active_repo_path.is_none());
10950}