1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
13};
14use fs::FakeFs;
15use futures::{StreamExt, future};
16use git::{
17 GitHostingProviderRegistry,
18 repository::{RepoPath, repo_path},
19 status::{StatusCode, TrackedStatus},
20};
21use git2::RepositoryInitOptions;
22use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
23use itertools::Itertools;
24use language::{
25 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
26 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
27 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
28 ToolchainLister,
29 language_settings::{LanguageSettingsContent, language_settings},
30 rust_lang, tree_sitter_typescript,
31};
32use lsp::{
33 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
34 Uri, WillRenameFiles, notification::DidRenameFiles,
35};
36use parking_lot::Mutex;
37use paths::{config_dir, global_gitignore_path, tasks_file};
38use postage::stream::Stream as _;
39use pretty_assertions::{assert_eq, assert_matches};
40use rand::{Rng as _, rngs::StdRng};
41use serde_json::json;
42#[cfg(not(windows))]
43use std::os;
44use std::{
45 env, mem,
46 num::NonZeroU32,
47 ops::Range,
48 str::FromStr,
49 sync::{Arc, OnceLock},
50 task::Poll,
51};
52use sum_tree::SumTree;
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .foreground_executor()
214 .block_on(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250 cx.update(|cx| {
251 GitHostingProviderRegistry::default_global(cx);
252 git_hosting_providers::init(cx);
253 });
254
255 let fs = FakeFs::new(cx.executor());
256 let str_path = path!("/dir");
257 let path = Path::new(str_path);
258
259 fs.insert_tree(
260 path!("/dir"),
261 json!({
262 ".zed": {
263 "settings.json": r#"{
264 "git_hosting_providers": [
265 {
266 "provider": "gitlab",
267 "base_url": "https://google.com",
268 "name": "foo"
269 }
270 ]
271 }"#
272 },
273 }),
274 )
275 .await;
276
277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
278 let (_worktree, _) =
279 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
280 cx.executor().run_until_parked();
281
282 cx.update(|cx| {
283 let provider = GitHostingProviderRegistry::global(cx);
284 assert!(
285 provider
286 .list_hosting_providers()
287 .into_iter()
288 .any(|provider| provider.name() == "foo")
289 );
290 });
291
292 fs.atomic_write(
293 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
294 "{}".into(),
295 )
296 .await
297 .unwrap();
298
299 cx.run_until_parked();
300
301 cx.update(|cx| {
302 let provider = GitHostingProviderRegistry::global(cx);
303 assert!(
304 !provider
305 .list_hosting_providers()
306 .into_iter()
307 .any(|provider| provider.name() == "foo")
308 );
309 });
310}
311
312#[gpui::test]
313async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315 TaskStore::init(None);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 path!("/dir"),
320 json!({
321 ".zed": {
322 "settings.json": r#"{ "tab_size": 8 }"#,
323 "tasks.json": r#"[{
324 "label": "cargo check all",
325 "command": "cargo",
326 "args": ["check", "--all"]
327 },]"#,
328 },
329 "a": {
330 "a.rs": "fn a() {\n A\n}"
331 },
332 "b": {
333 ".zed": {
334 "settings.json": r#"{ "tab_size": 2 }"#,
335 "tasks.json": r#"[{
336 "label": "cargo check",
337 "command": "cargo",
338 "args": ["check"]
339 },]"#,
340 },
341 "b.rs": "fn b() {\n B\n}"
342 }
343 }),
344 )
345 .await;
346
347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
348 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
349
350 cx.executor().run_until_parked();
351 let worktree_id = cx.update(|cx| {
352 project.update(cx, |project, cx| {
353 project.worktrees(cx).next().unwrap().read(cx).id()
354 })
355 });
356
357 let mut task_contexts = TaskContexts::default();
358 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
359 let task_contexts = Arc::new(task_contexts);
360
361 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
362 id: worktree_id,
363 directory_in_worktree: rel_path(".zed").into(),
364 id_base: "local worktree tasks from directory \".zed\"".into(),
365 };
366
367 let all_tasks = cx
368 .update(|cx| {
369 let tree = worktree.read(cx);
370
371 let file_a = File::for_entry(
372 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
373 worktree.clone(),
374 ) as _;
375 let settings_a = language_settings(None, Some(&file_a), cx);
376 let file_b = File::for_entry(
377 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
378 worktree.clone(),
379 ) as _;
380 let settings_b = language_settings(None, Some(&file_b), cx);
381
382 assert_eq!(settings_a.tab_size.get(), 8);
383 assert_eq!(settings_b.tab_size.get(), 2);
384
385 get_all_tasks(&project, task_contexts.clone(), cx)
386 })
387 .await
388 .into_iter()
389 .map(|(source_kind, task)| {
390 let resolved = task.resolved;
391 (
392 source_kind,
393 task.resolved_label,
394 resolved.args,
395 resolved.env,
396 )
397 })
398 .collect::<Vec<_>>();
399 assert_eq!(
400 all_tasks,
401 vec![
402 (
403 TaskSourceKind::Worktree {
404 id: worktree_id,
405 directory_in_worktree: rel_path("b/.zed").into(),
406 id_base: "local worktree tasks from directory \"b/.zed\"".into()
407 },
408 "cargo check".to_string(),
409 vec!["check".to_string()],
410 HashMap::default(),
411 ),
412 (
413 topmost_local_task_source_kind.clone(),
414 "cargo check all".to_string(),
415 vec!["check".to_string(), "--all".to_string()],
416 HashMap::default(),
417 ),
418 ]
419 );
420
421 let (_, resolved_task) = cx
422 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
423 .await
424 .into_iter()
425 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
426 .expect("should have one global task");
427 project.update(cx, |project, cx| {
428 let task_inventory = project
429 .task_store
430 .read(cx)
431 .task_inventory()
432 .cloned()
433 .unwrap();
434 task_inventory.update(cx, |inventory, _| {
435 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
436 inventory
437 .update_file_based_tasks(
438 TaskSettingsLocation::Global(tasks_file()),
439 Some(
440 &json!([{
441 "label": "cargo check unstable",
442 "command": "cargo",
443 "args": [
444 "check",
445 "--all",
446 "--all-targets"
447 ],
448 "env": {
449 "RUSTFLAGS": "-Zunstable-options"
450 }
451 }])
452 .to_string(),
453 ),
454 )
455 .unwrap();
456 });
457 });
458 cx.run_until_parked();
459
460 let all_tasks = cx
461 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
462 .await
463 .into_iter()
464 .map(|(source_kind, task)| {
465 let resolved = task.resolved;
466 (
467 source_kind,
468 task.resolved_label,
469 resolved.args,
470 resolved.env,
471 )
472 })
473 .collect::<Vec<_>>();
474 assert_eq!(
475 all_tasks,
476 vec![
477 (
478 topmost_local_task_source_kind.clone(),
479 "cargo check all".to_string(),
480 vec!["check".to_string(), "--all".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: rel_path("b/.zed").into(),
487 id_base: "local worktree tasks from directory \"b/.zed\"".into()
488 },
489 "cargo check".to_string(),
490 vec!["check".to_string()],
491 HashMap::default(),
492 ),
493 (
494 TaskSourceKind::AbsPath {
495 abs_path: paths::tasks_file().clone(),
496 id_base: "global tasks.json".into(),
497 },
498 "cargo check unstable".to_string(),
499 vec![
500 "check".to_string(),
501 "--all".to_string(),
502 "--all-targets".to_string(),
503 ],
504 HashMap::from_iter(Some((
505 "RUSTFLAGS".to_string(),
506 "-Zunstable-options".to_string()
507 ))),
508 ),
509 ]
510 );
511}
512
513#[gpui::test]
514async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
515 init_test(cx);
516 TaskStore::init(None);
517
518 let fs = FakeFs::new(cx.executor());
519 fs.insert_tree(
520 path!("/dir"),
521 json!({
522 ".zed": {
523 "tasks.json": r#"[{
524 "label": "test worktree root",
525 "command": "echo $ZED_WORKTREE_ROOT"
526 }]"#,
527 },
528 "a": {
529 "a.rs": "fn a() {\n A\n}"
530 },
531 }),
532 )
533 .await;
534
535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
536 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
537
538 cx.executor().run_until_parked();
539 let worktree_id = cx.update(|cx| {
540 project.update(cx, |project, cx| {
541 project.worktrees(cx).next().unwrap().read(cx).id()
542 })
543 });
544
545 let active_non_worktree_item_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: None,
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 }),
556 cx,
557 )
558 })
559 .await;
560 assert!(
561 active_non_worktree_item_tasks.is_empty(),
562 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
563 );
564
565 let active_worktree_tasks = cx
566 .update(|cx| {
567 get_all_tasks(
568 &project,
569 Arc::new(TaskContexts {
570 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
571 active_worktree_context: Some((worktree_id, {
572 let mut worktree_context = TaskContext::default();
573 worktree_context
574 .task_variables
575 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
576 worktree_context
577 })),
578 other_worktree_contexts: Vec::new(),
579 lsp_task_sources: HashMap::default(),
580 latest_selection: None,
581 }),
582 cx,
583 )
584 })
585 .await;
586 assert_eq!(
587 active_worktree_tasks
588 .into_iter()
589 .map(|(source_kind, task)| {
590 let resolved = task.resolved;
591 (source_kind, resolved.command.unwrap())
592 })
593 .collect::<Vec<_>>(),
594 vec![(
595 TaskSourceKind::Worktree {
596 id: worktree_id,
597 directory_in_worktree: rel_path(".zed").into(),
598 id_base: "local worktree tasks from directory \".zed\"".into(),
599 },
600 "echo /dir".to_string(),
601 )]
602 );
603}
604
605#[gpui::test]
606async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
607 cx: &mut gpui::TestAppContext,
608) {
609 pub(crate) struct PyprojectTomlManifestProvider;
610
611 impl ManifestProvider for PyprojectTomlManifestProvider {
612 fn name(&self) -> ManifestName {
613 SharedString::new_static("pyproject.toml").into()
614 }
615
616 fn search(
617 &self,
618 ManifestQuery {
619 path,
620 depth,
621 delegate,
622 }: ManifestQuery,
623 ) -> Option<Arc<RelPath>> {
624 for path in path.ancestors().take(depth) {
625 let p = path.join(rel_path("pyproject.toml"));
626 if delegate.exists(&p, Some(false)) {
627 return Some(path.into());
628 }
629 }
630
631 None
632 }
633 }
634
635 init_test(cx);
636 let fs = FakeFs::new(cx.executor());
637
638 fs.insert_tree(
639 path!("/the-root"),
640 json!({
641 ".zed": {
642 "settings.json": r#"
643 {
644 "languages": {
645 "Python": {
646 "language_servers": ["ty"]
647 }
648 }
649 }"#
650 },
651 "project-a": {
652 ".venv": {},
653 "file.py": "",
654 "pyproject.toml": ""
655 },
656 "project-b": {
657 ".venv": {},
658 "source_file.py":"",
659 "another_file.py": "",
660 "pyproject.toml": ""
661 }
662 }),
663 )
664 .await;
665 cx.update(|cx| {
666 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
667 });
668
669 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
671 let _fake_python_server = language_registry.register_fake_lsp(
672 "Python",
673 FakeLspAdapter {
674 name: "ty",
675 capabilities: lsp::ServerCapabilities {
676 ..Default::default()
677 },
678 ..Default::default()
679 },
680 );
681
682 language_registry.add(python_lang(fs.clone()));
683 let (first_buffer, _handle) = project
684 .update(cx, |project, cx| {
685 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
686 })
687 .await
688 .unwrap();
689 cx.executor().run_until_parked();
690 let servers = project.update(cx, |project, cx| {
691 project.lsp_store.update(cx, |this, cx| {
692 first_buffer.update(cx, |buffer, cx| {
693 this.running_language_servers_for_local_buffer(buffer, cx)
694 .map(|(adapter, server)| (adapter.clone(), server.clone()))
695 .collect::<Vec<_>>()
696 })
697 })
698 });
699 cx.executor().run_until_parked();
700 assert_eq!(servers.len(), 1);
701 let (adapter, server) = servers.into_iter().next().unwrap();
702 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
703 assert_eq!(server.server_id(), LanguageServerId(0));
704 // `workspace_folders` are set to the rooting point.
705 assert_eq!(
706 server.workspace_folders(),
707 BTreeSet::from_iter(
708 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
709 )
710 );
711
712 let (second_project_buffer, _other_handle) = project
713 .update(cx, |project, cx| {
714 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
715 })
716 .await
717 .unwrap();
718 cx.executor().run_until_parked();
719 let servers = project.update(cx, |project, cx| {
720 project.lsp_store.update(cx, |this, cx| {
721 second_project_buffer.update(cx, |buffer, cx| {
722 this.running_language_servers_for_local_buffer(buffer, cx)
723 .map(|(adapter, server)| (adapter.clone(), server.clone()))
724 .collect::<Vec<_>>()
725 })
726 })
727 });
728 cx.executor().run_until_parked();
729 assert_eq!(servers.len(), 1);
730 let (adapter, server) = servers.into_iter().next().unwrap();
731 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
732 // We're not using venvs at all here, so both folders should fall under the same root.
733 assert_eq!(server.server_id(), LanguageServerId(0));
734 // Now, let's select a different toolchain for one of subprojects.
735
736 let Toolchains {
737 toolchains: available_toolchains_for_b,
738 root_path,
739 ..
740 } = project
741 .update(cx, |this, cx| {
742 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
743 this.available_toolchains(
744 ProjectPath {
745 worktree_id,
746 path: rel_path("project-b/source_file.py").into(),
747 },
748 LanguageName::new_static("Python"),
749 cx,
750 )
751 })
752 .await
753 .expect("A toolchain to be discovered");
754 assert_eq!(root_path.as_ref(), rel_path("project-b"));
755 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
756 let currently_active_toolchain = project
757 .update(cx, |this, cx| {
758 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
759 this.active_toolchain(
760 ProjectPath {
761 worktree_id,
762 path: rel_path("project-b/source_file.py").into(),
763 },
764 LanguageName::new_static("Python"),
765 cx,
766 )
767 })
768 .await;
769
770 assert!(currently_active_toolchain.is_none());
771 let _ = project
772 .update(cx, |this, cx| {
773 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
774 this.activate_toolchain(
775 ProjectPath {
776 worktree_id,
777 path: root_path,
778 },
779 available_toolchains_for_b
780 .toolchains
781 .into_iter()
782 .next()
783 .unwrap(),
784 cx,
785 )
786 })
787 .await
788 .unwrap();
789 cx.run_until_parked();
790 let servers = project.update(cx, |project, cx| {
791 project.lsp_store.update(cx, |this, cx| {
792 second_project_buffer.update(cx, |buffer, cx| {
793 this.running_language_servers_for_local_buffer(buffer, cx)
794 .map(|(adapter, server)| (adapter.clone(), server.clone()))
795 .collect::<Vec<_>>()
796 })
797 })
798 });
799 cx.executor().run_until_parked();
800 assert_eq!(servers.len(), 1);
801 let (adapter, server) = servers.into_iter().next().unwrap();
802 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
803 // There's a new language server in town.
804 assert_eq!(server.server_id(), LanguageServerId(1));
805}
806
807#[gpui::test]
808async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
809 init_test(cx);
810
811 let fs = FakeFs::new(cx.executor());
812 fs.insert_tree(
813 path!("/dir"),
814 json!({
815 "test.rs": "const A: i32 = 1;",
816 "test2.rs": "",
817 "Cargo.toml": "a = 1",
818 "package.json": "{\"a\": 1}",
819 }),
820 )
821 .await;
822
823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825
826 let mut fake_rust_servers = language_registry.register_fake_lsp(
827 "Rust",
828 FakeLspAdapter {
829 name: "the-rust-language-server",
830 capabilities: lsp::ServerCapabilities {
831 completion_provider: Some(lsp::CompletionOptions {
832 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
833 ..Default::default()
834 }),
835 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
836 lsp::TextDocumentSyncOptions {
837 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
838 ..Default::default()
839 },
840 )),
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 );
846 let mut fake_json_servers = language_registry.register_fake_lsp(
847 "JSON",
848 FakeLspAdapter {
849 name: "the-json-language-server",
850 capabilities: lsp::ServerCapabilities {
851 completion_provider: Some(lsp::CompletionOptions {
852 trigger_characters: Some(vec![":".to_string()]),
853 ..Default::default()
854 }),
855 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
856 lsp::TextDocumentSyncOptions {
857 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
858 ..Default::default()
859 },
860 )),
861 ..Default::default()
862 },
863 ..Default::default()
864 },
865 );
866
867 // Open a buffer without an associated language server.
868 let (toml_buffer, _handle) = project
869 .update(cx, |project, cx| {
870 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
871 })
872 .await
873 .unwrap();
874
875 // Open a buffer with an associated language server before the language for it has been loaded.
876 let (rust_buffer, _handle2) = project
877 .update(cx, |project, cx| {
878 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
879 })
880 .await
881 .unwrap();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), None);
884 });
885
886 // Now we add the languages to the project, and ensure they get assigned to all
887 // the relevant open buffers.
888 language_registry.add(json_lang());
889 language_registry.add(rust_lang());
890 cx.executor().run_until_parked();
891 rust_buffer.update(cx, |buffer, _| {
892 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
893 });
894
895 // A server is started up, and it is notified about Rust files.
896 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
897 assert_eq!(
898 fake_rust_server
899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
900 .await
901 .text_document,
902 lsp::TextDocumentItem {
903 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
904 version: 0,
905 text: "const A: i32 = 1;".to_string(),
906 language_id: "rust".to_string(),
907 }
908 );
909
910 // The buffer is configured based on the language server's capabilities.
911 rust_buffer.update(cx, |buffer, _| {
912 assert_eq!(
913 buffer
914 .completion_triggers()
915 .iter()
916 .cloned()
917 .collect::<Vec<_>>(),
918 &[".".to_string(), "::".to_string()]
919 );
920 });
921 toml_buffer.update(cx, |buffer, _| {
922 assert!(buffer.completion_triggers().is_empty());
923 });
924
925 // Edit a buffer. The changes are reported to the language server.
926 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
927 assert_eq!(
928 fake_rust_server
929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
930 .await
931 .text_document,
932 lsp::VersionedTextDocumentIdentifier::new(
933 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
934 1
935 )
936 );
937
938 // Open a third buffer with a different associated language server.
939 let (json_buffer, _json_handle) = project
940 .update(cx, |project, cx| {
941 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
942 })
943 .await
944 .unwrap();
945
946 // A json language server is started up and is only notified about the json buffer.
947 let mut fake_json_server = fake_json_servers.next().await.unwrap();
948 assert_eq!(
949 fake_json_server
950 .receive_notification::<lsp::notification::DidOpenTextDocument>()
951 .await
952 .text_document,
953 lsp::TextDocumentItem {
954 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
955 version: 0,
956 text: "{\"a\": 1}".to_string(),
957 language_id: "json".to_string(),
958 }
959 );
960
961 // This buffer is configured based on the second language server's
962 // capabilities.
963 json_buffer.update(cx, |buffer, _| {
964 assert_eq!(
965 buffer
966 .completion_triggers()
967 .iter()
968 .cloned()
969 .collect::<Vec<_>>(),
970 &[":".to_string()]
971 );
972 });
973
974 // When opening another buffer whose language server is already running,
975 // it is also configured based on the existing language server's capabilities.
976 let (rust_buffer2, _handle4) = project
977 .update(cx, |project, cx| {
978 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
979 })
980 .await
981 .unwrap();
982 rust_buffer2.update(cx, |buffer, _| {
983 assert_eq!(
984 buffer
985 .completion_triggers()
986 .iter()
987 .cloned()
988 .collect::<Vec<_>>(),
989 &[".".to_string(), "::".to_string()]
990 );
991 });
992
993 // Changes are reported only to servers matching the buffer's language.
994 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
995 rust_buffer2.update(cx, |buffer, cx| {
996 buffer.edit([(0..0, "let x = 1;")], None, cx)
997 });
998 assert_eq!(
999 fake_rust_server
1000 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1001 .await
1002 .text_document,
1003 lsp::VersionedTextDocumentIdentifier::new(
1004 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1005 1
1006 )
1007 );
1008
1009 // Save notifications are reported to all servers.
1010 project
1011 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1012 .await
1013 .unwrap();
1014 assert_eq!(
1015 fake_rust_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023 assert_eq!(
1024 fake_json_server
1025 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1026 .await
1027 .text_document,
1028 lsp::TextDocumentIdentifier::new(
1029 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1030 )
1031 );
1032
1033 // Renames are reported only to servers matching the buffer's language.
1034 fs.rename(
1035 Path::new(path!("/dir/test2.rs")),
1036 Path::new(path!("/dir/test3.rs")),
1037 Default::default(),
1038 )
1039 .await
1040 .unwrap();
1041 assert_eq!(
1042 fake_rust_server
1043 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1044 .await
1045 .text_document,
1046 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1047 );
1048 assert_eq!(
1049 fake_rust_server
1050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1051 .await
1052 .text_document,
1053 lsp::TextDocumentItem {
1054 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1055 version: 0,
1056 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1057 language_id: "rust".to_string(),
1058 },
1059 );
1060
1061 rust_buffer2.update(cx, |buffer, cx| {
1062 buffer.update_diagnostics(
1063 LanguageServerId(0),
1064 DiagnosticSet::from_sorted_entries(
1065 vec![DiagnosticEntry {
1066 diagnostic: Default::default(),
1067 range: Anchor::MIN..Anchor::MAX,
1068 }],
1069 &buffer.snapshot(),
1070 ),
1071 cx,
1072 );
1073 assert_eq!(
1074 buffer
1075 .snapshot()
1076 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1077 .count(),
1078 1
1079 );
1080 });
1081
1082 // When the rename changes the extension of the file, the buffer gets closed on the old
1083 // language server and gets opened on the new one.
1084 fs.rename(
1085 Path::new(path!("/dir/test3.rs")),
1086 Path::new(path!("/dir/test3.json")),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091 assert_eq!(
1092 fake_rust_server
1093 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1094 .await
1095 .text_document,
1096 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1097 );
1098 assert_eq!(
1099 fake_json_server
1100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1101 .await
1102 .text_document,
1103 lsp::TextDocumentItem {
1104 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1105 version: 0,
1106 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1107 language_id: "json".to_string(),
1108 },
1109 );
1110
1111 // We clear the diagnostics, since the language has changed.
1112 rust_buffer2.update(cx, |buffer, _| {
1113 assert_eq!(
1114 buffer
1115 .snapshot()
1116 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1117 .count(),
1118 0
1119 );
1120 });
1121
1122 // The renamed file's version resets after changing language server.
1123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1124 assert_eq!(
1125 fake_json_server
1126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1127 .await
1128 .text_document,
1129 lsp::VersionedTextDocumentIdentifier::new(
1130 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1131 1
1132 )
1133 );
1134
1135 // Restart language servers
1136 project.update(cx, |project, cx| {
1137 project.restart_language_servers_for_buffers(
1138 vec![rust_buffer.clone(), json_buffer.clone()],
1139 HashSet::default(),
1140 cx,
1141 );
1142 });
1143
1144 let mut rust_shutdown_requests = fake_rust_server
1145 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1146 let mut json_shutdown_requests = fake_json_server
1147 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1148 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1149
1150 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1151 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1152
1153 // Ensure rust document is reopened in new rust language server
1154 assert_eq!(
1155 fake_rust_server
1156 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1157 .await
1158 .text_document,
1159 lsp::TextDocumentItem {
1160 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1161 version: 0,
1162 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1163 language_id: "rust".to_string(),
1164 }
1165 );
1166
1167 // Ensure json documents are reopened in new json language server
1168 assert_set_eq!(
1169 [
1170 fake_json_server
1171 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1172 .await
1173 .text_document,
1174 fake_json_server
1175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1176 .await
1177 .text_document,
1178 ],
1179 [
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1182 version: 0,
1183 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 },
1186 lsp::TextDocumentItem {
1187 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1188 version: 0,
1189 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1190 language_id: "json".to_string(),
1191 }
1192 ]
1193 );
1194
1195 // Close notifications are reported only to servers matching the buffer's language.
1196 cx.update(|_| drop(_json_handle));
1197 let close_message = lsp::DidCloseTextDocumentParams {
1198 text_document: lsp::TextDocumentIdentifier::new(
1199 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1200 ),
1201 };
1202 assert_eq!(
1203 fake_json_server
1204 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1205 .await,
1206 close_message,
1207 );
1208}
1209
1210#[gpui::test]
1211async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let settings_json_contents = json!({
1215 "languages": {
1216 "Rust": {
1217 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1218 }
1219 },
1220 "lsp": {
1221 "my_fake_lsp": {
1222 "binary": {
1223 // file exists, so this is treated as a relative path
1224 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1225 }
1226 },
1227 "lsp_on_path": {
1228 "binary": {
1229 // file doesn't exist, so it will fall back on PATH env var
1230 "path": path!("lsp_on_path.exe").to_string(),
1231 }
1232 }
1233 },
1234 });
1235
1236 let fs = FakeFs::new(cx.executor());
1237 fs.insert_tree(
1238 path!("/the-root"),
1239 json!({
1240 ".zed": {
1241 "settings.json": settings_json_contents.to_string(),
1242 },
1243 ".relative_path": {
1244 "to": {
1245 "my_fake_lsp.exe": "",
1246 },
1247 },
1248 "src": {
1249 "main.rs": "",
1250 }
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1257 language_registry.add(rust_lang());
1258
1259 let mut my_fake_lsp = language_registry.register_fake_lsp(
1260 "Rust",
1261 FakeLspAdapter {
1262 name: "my_fake_lsp",
1263 ..Default::default()
1264 },
1265 );
1266 let mut lsp_on_path = language_registry.register_fake_lsp(
1267 "Rust",
1268 FakeLspAdapter {
1269 name: "lsp_on_path",
1270 ..Default::default()
1271 },
1272 );
1273
1274 cx.run_until_parked();
1275
1276 // Start the language server by opening a buffer with a compatible file extension.
1277 project
1278 .update(cx, |project, cx| {
1279 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1280 })
1281 .await
1282 .unwrap();
1283
1284 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1285 assert_eq!(
1286 lsp_path.to_string_lossy(),
1287 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1288 );
1289
1290 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1291 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1292}
1293
1294#[gpui::test]
1295async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1296 init_test(cx);
1297
1298 let settings_json_contents = json!({
1299 "languages": {
1300 "Rust": {
1301 "language_servers": ["tilde_lsp"]
1302 }
1303 },
1304 "lsp": {
1305 "tilde_lsp": {
1306 "binary": {
1307 "path": "~/.local/bin/rust-analyzer",
1308 }
1309 }
1310 },
1311 });
1312
1313 let fs = FakeFs::new(cx.executor());
1314 fs.insert_tree(
1315 path!("/root"),
1316 json!({
1317 ".zed": {
1318 "settings.json": settings_json_contents.to_string(),
1319 },
1320 "src": {
1321 "main.rs": "fn main() {}",
1322 }
1323 }),
1324 )
1325 .await;
1326
1327 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1328 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1329 language_registry.add(rust_lang());
1330
1331 let mut tilde_lsp = language_registry.register_fake_lsp(
1332 "Rust",
1333 FakeLspAdapter {
1334 name: "tilde_lsp",
1335 ..Default::default()
1336 },
1337 );
1338 cx.run_until_parked();
1339
1340 project
1341 .update(cx, |project, cx| {
1342 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1343 })
1344 .await
1345 .unwrap();
1346
1347 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1348 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1349 assert_eq!(
1350 lsp_path, expected_path,
1351 "Tilde path should expand to home directory"
1352 );
1353}
1354
1355#[gpui::test]
1356async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1357 init_test(cx);
1358
1359 let fs = FakeFs::new(cx.executor());
1360 fs.insert_tree(
1361 path!("/the-root"),
1362 json!({
1363 ".gitignore": "target\n",
1364 "Cargo.lock": "",
1365 "src": {
1366 "a.rs": "",
1367 "b.rs": "",
1368 },
1369 "target": {
1370 "x": {
1371 "out": {
1372 "x.rs": ""
1373 }
1374 },
1375 "y": {
1376 "out": {
1377 "y.rs": "",
1378 }
1379 },
1380 "z": {
1381 "out": {
1382 "z.rs": ""
1383 }
1384 }
1385 }
1386 }),
1387 )
1388 .await;
1389 fs.insert_tree(
1390 path!("/the-registry"),
1391 json!({
1392 "dep1": {
1393 "src": {
1394 "dep1.rs": "",
1395 }
1396 },
1397 "dep2": {
1398 "src": {
1399 "dep2.rs": "",
1400 }
1401 },
1402 }),
1403 )
1404 .await;
1405 fs.insert_tree(
1406 path!("/the/stdlib"),
1407 json!({
1408 "LICENSE": "",
1409 "src": {
1410 "string.rs": "",
1411 }
1412 }),
1413 )
1414 .await;
1415
1416 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1417 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1418 (project.languages().clone(), project.lsp_store())
1419 });
1420 language_registry.add(rust_lang());
1421 let mut fake_servers = language_registry.register_fake_lsp(
1422 "Rust",
1423 FakeLspAdapter {
1424 name: "the-language-server",
1425 ..Default::default()
1426 },
1427 );
1428
1429 cx.executor().run_until_parked();
1430
1431 // Start the language server by opening a buffer with a compatible file extension.
1432 project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1435 })
1436 .await
1437 .unwrap();
1438
1439 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1440 project.update(cx, |project, cx| {
1441 let worktree = project.worktrees(cx).next().unwrap();
1442 assert_eq!(
1443 worktree
1444 .read(cx)
1445 .snapshot()
1446 .entries(true, 0)
1447 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1448 .collect::<Vec<_>>(),
1449 &[
1450 ("", false),
1451 (".gitignore", false),
1452 ("Cargo.lock", false),
1453 ("src", false),
1454 ("src/a.rs", false),
1455 ("src/b.rs", false),
1456 ("target", true),
1457 ]
1458 );
1459 });
1460
1461 let prev_read_dir_count = fs.read_dir_call_count();
1462
1463 let fake_server = fake_servers.next().await.unwrap();
1464 cx.executor().run_until_parked();
1465 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1466 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1467 id
1468 });
1469
1470 // Simulate jumping to a definition in a dependency outside of the worktree.
1471 let _out_of_worktree_buffer = project
1472 .update(cx, |project, cx| {
1473 project.open_local_buffer_via_lsp(
1474 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1475 server_id,
1476 cx,
1477 )
1478 })
1479 .await
1480 .unwrap();
1481
1482 // Keep track of the FS events reported to the language server.
1483 let file_changes = Arc::new(Mutex::new(Vec::new()));
1484 fake_server
1485 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1486 registrations: vec![lsp::Registration {
1487 id: Default::default(),
1488 method: "workspace/didChangeWatchedFiles".to_string(),
1489 register_options: serde_json::to_value(
1490 lsp::DidChangeWatchedFilesRegistrationOptions {
1491 watchers: vec![
1492 lsp::FileSystemWatcher {
1493 glob_pattern: lsp::GlobPattern::String(
1494 path!("/the-root/Cargo.toml").to_string(),
1495 ),
1496 kind: None,
1497 },
1498 lsp::FileSystemWatcher {
1499 glob_pattern: lsp::GlobPattern::String(
1500 path!("/the-root/src/*.{rs,c}").to_string(),
1501 ),
1502 kind: None,
1503 },
1504 lsp::FileSystemWatcher {
1505 glob_pattern: lsp::GlobPattern::String(
1506 path!("/the-root/target/y/**/*.rs").to_string(),
1507 ),
1508 kind: None,
1509 },
1510 lsp::FileSystemWatcher {
1511 glob_pattern: lsp::GlobPattern::String(
1512 path!("/the/stdlib/src/**/*.rs").to_string(),
1513 ),
1514 kind: None,
1515 },
1516 lsp::FileSystemWatcher {
1517 glob_pattern: lsp::GlobPattern::String(
1518 path!("**/Cargo.lock").to_string(),
1519 ),
1520 kind: None,
1521 },
1522 ],
1523 },
1524 )
1525 .ok(),
1526 }],
1527 })
1528 .await
1529 .into_response()
1530 .unwrap();
1531 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1532 let file_changes = file_changes.clone();
1533 move |params, _| {
1534 let mut file_changes = file_changes.lock();
1535 file_changes.extend(params.changes);
1536 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1537 }
1538 });
1539
1540 cx.executor().run_until_parked();
1541 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1542 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1543
1544 let mut new_watched_paths = fs.watched_paths();
1545 new_watched_paths.retain(|path| {
1546 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1547 });
1548 assert_eq!(
1549 &new_watched_paths,
1550 &[
1551 Path::new(path!("/the-root")),
1552 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1553 Path::new(path!("/the/stdlib/src"))
1554 ]
1555 );
1556
1557 // Now the language server has asked us to watch an ignored directory path,
1558 // so we recursively load it.
1559 project.update(cx, |project, cx| {
1560 let worktree = project.visible_worktrees(cx).next().unwrap();
1561 assert_eq!(
1562 worktree
1563 .read(cx)
1564 .snapshot()
1565 .entries(true, 0)
1566 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1567 .collect::<Vec<_>>(),
1568 &[
1569 ("", false),
1570 (".gitignore", false),
1571 ("Cargo.lock", false),
1572 ("src", false),
1573 ("src/a.rs", false),
1574 ("src/b.rs", false),
1575 ("target", true),
1576 ("target/x", true),
1577 ("target/y", true),
1578 ("target/y/out", true),
1579 ("target/y/out/y.rs", true),
1580 ("target/z", true),
1581 ]
1582 );
1583 });
1584
1585 // Perform some file system mutations, two of which match the watched patterns,
1586 // and one of which does not.
1587 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1588 .await
1589 .unwrap();
1590 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1591 .await
1592 .unwrap();
1593 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1594 .await
1595 .unwrap();
1596 fs.create_file(
1597 path!("/the-root/target/x/out/x2.rs").as_ref(),
1598 Default::default(),
1599 )
1600 .await
1601 .unwrap();
1602 fs.create_file(
1603 path!("/the-root/target/y/out/y2.rs").as_ref(),
1604 Default::default(),
1605 )
1606 .await
1607 .unwrap();
1608 fs.save(
1609 path!("/the-root/Cargo.lock").as_ref(),
1610 &"".into(),
1611 Default::default(),
1612 )
1613 .await
1614 .unwrap();
1615 fs.save(
1616 path!("/the-stdlib/LICENSE").as_ref(),
1617 &"".into(),
1618 Default::default(),
1619 )
1620 .await
1621 .unwrap();
1622 fs.save(
1623 path!("/the/stdlib/src/string.rs").as_ref(),
1624 &"".into(),
1625 Default::default(),
1626 )
1627 .await
1628 .unwrap();
1629
1630 // The language server receives events for the FS mutations that match its watch patterns.
1631 cx.executor().run_until_parked();
1632 assert_eq!(
1633 &*file_changes.lock(),
1634 &[
1635 lsp::FileEvent {
1636 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1637 typ: lsp::FileChangeType::CHANGED,
1638 },
1639 lsp::FileEvent {
1640 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1641 typ: lsp::FileChangeType::DELETED,
1642 },
1643 lsp::FileEvent {
1644 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1645 typ: lsp::FileChangeType::CREATED,
1646 },
1647 lsp::FileEvent {
1648 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1649 typ: lsp::FileChangeType::CREATED,
1650 },
1651 lsp::FileEvent {
1652 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1653 typ: lsp::FileChangeType::CHANGED,
1654 },
1655 ]
1656 );
1657}
1658
1659#[gpui::test]
1660async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1661 init_test(cx);
1662
1663 let fs = FakeFs::new(cx.executor());
1664 fs.insert_tree(
1665 path!("/dir"),
1666 json!({
1667 "a.rs": "let a = 1;",
1668 "b.rs": "let b = 2;"
1669 }),
1670 )
1671 .await;
1672
1673 let project = Project::test(
1674 fs,
1675 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1676 cx,
1677 )
1678 .await;
1679 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1680
1681 let buffer_a = project
1682 .update(cx, |project, cx| {
1683 project.open_local_buffer(path!("/dir/a.rs"), cx)
1684 })
1685 .await
1686 .unwrap();
1687 let buffer_b = project
1688 .update(cx, |project, cx| {
1689 project.open_local_buffer(path!("/dir/b.rs"), cx)
1690 })
1691 .await
1692 .unwrap();
1693
1694 lsp_store.update(cx, |lsp_store, cx| {
1695 lsp_store
1696 .update_diagnostics(
1697 LanguageServerId(0),
1698 lsp::PublishDiagnosticsParams {
1699 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1700 version: None,
1701 diagnostics: vec![lsp::Diagnostic {
1702 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1703 severity: Some(lsp::DiagnosticSeverity::ERROR),
1704 message: "error 1".to_string(),
1705 ..Default::default()
1706 }],
1707 },
1708 None,
1709 DiagnosticSourceKind::Pushed,
1710 &[],
1711 cx,
1712 )
1713 .unwrap();
1714 lsp_store
1715 .update_diagnostics(
1716 LanguageServerId(0),
1717 lsp::PublishDiagnosticsParams {
1718 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1719 version: None,
1720 diagnostics: vec![lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1722 severity: Some(DiagnosticSeverity::WARNING),
1723 message: "error 2".to_string(),
1724 ..Default::default()
1725 }],
1726 },
1727 None,
1728 DiagnosticSourceKind::Pushed,
1729 &[],
1730 cx,
1731 )
1732 .unwrap();
1733 });
1734
1735 buffer_a.update(cx, |buffer, _| {
1736 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1737 assert_eq!(
1738 chunks
1739 .iter()
1740 .map(|(s, d)| (s.as_str(), *d))
1741 .collect::<Vec<_>>(),
1742 &[
1743 ("let ", None),
1744 ("a", Some(DiagnosticSeverity::ERROR)),
1745 (" = 1;", None),
1746 ]
1747 );
1748 });
1749 buffer_b.update(cx, |buffer, _| {
1750 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1751 assert_eq!(
1752 chunks
1753 .iter()
1754 .map(|(s, d)| (s.as_str(), *d))
1755 .collect::<Vec<_>>(),
1756 &[
1757 ("let ", None),
1758 ("b", Some(DiagnosticSeverity::WARNING)),
1759 (" = 2;", None),
1760 ]
1761 );
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(
1771 path!("/root"),
1772 json!({
1773 "dir": {
1774 ".git": {
1775 "HEAD": "ref: refs/heads/main",
1776 },
1777 ".gitignore": "b.rs",
1778 "a.rs": "let a = 1;",
1779 "b.rs": "let b = 2;",
1780 },
1781 "other.rs": "let b = c;"
1782 }),
1783 )
1784 .await;
1785
1786 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1787 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1788 let (worktree, _) = project
1789 .update(cx, |project, cx| {
1790 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1791 })
1792 .await
1793 .unwrap();
1794 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1795
1796 let (worktree, _) = project
1797 .update(cx, |project, cx| {
1798 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1799 })
1800 .await
1801 .unwrap();
1802 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1803
1804 let server_id = LanguageServerId(0);
1805 lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store
1807 .update_diagnostics(
1808 server_id,
1809 lsp::PublishDiagnosticsParams {
1810 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1811 version: None,
1812 diagnostics: vec![lsp::Diagnostic {
1813 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1814 severity: Some(lsp::DiagnosticSeverity::ERROR),
1815 message: "unused variable 'b'".to_string(),
1816 ..Default::default()
1817 }],
1818 },
1819 None,
1820 DiagnosticSourceKind::Pushed,
1821 &[],
1822 cx,
1823 )
1824 .unwrap();
1825 lsp_store
1826 .update_diagnostics(
1827 server_id,
1828 lsp::PublishDiagnosticsParams {
1829 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1830 version: None,
1831 diagnostics: vec![lsp::Diagnostic {
1832 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1833 severity: Some(lsp::DiagnosticSeverity::ERROR),
1834 message: "unknown variable 'c'".to_string(),
1835 ..Default::default()
1836 }],
1837 },
1838 None,
1839 DiagnosticSourceKind::Pushed,
1840 &[],
1841 cx,
1842 )
1843 .unwrap();
1844 });
1845
1846 let main_ignored_buffer = project
1847 .update(cx, |project, cx| {
1848 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1849 })
1850 .await
1851 .unwrap();
1852 main_ignored_buffer.update(cx, |buffer, _| {
1853 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1854 assert_eq!(
1855 chunks
1856 .iter()
1857 .map(|(s, d)| (s.as_str(), *d))
1858 .collect::<Vec<_>>(),
1859 &[
1860 ("let ", None),
1861 ("b", Some(DiagnosticSeverity::ERROR)),
1862 (" = 2;", None),
1863 ],
1864 "Gigitnored buffers should still get in-buffer diagnostics",
1865 );
1866 });
1867 let other_buffer = project
1868 .update(cx, |project, cx| {
1869 project.open_buffer((other_worktree_id, rel_path("")), cx)
1870 })
1871 .await
1872 .unwrap();
1873 other_buffer.update(cx, |buffer, _| {
1874 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1875 assert_eq!(
1876 chunks
1877 .iter()
1878 .map(|(s, d)| (s.as_str(), *d))
1879 .collect::<Vec<_>>(),
1880 &[
1881 ("let b = ", None),
1882 ("c", Some(DiagnosticSeverity::ERROR)),
1883 (";", None),
1884 ],
1885 "Buffers from hidden projects should still get in-buffer diagnostics"
1886 );
1887 });
1888
1889 project.update(cx, |project, cx| {
1890 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1891 assert_eq!(
1892 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1893 vec![(
1894 ProjectPath {
1895 worktree_id: main_worktree_id,
1896 path: rel_path("b.rs").into(),
1897 },
1898 server_id,
1899 DiagnosticSummary {
1900 error_count: 1,
1901 warning_count: 0,
1902 }
1903 )]
1904 );
1905 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1906 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1907 });
1908}
1909
1910#[gpui::test]
1911async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1912 init_test(cx);
1913
1914 let progress_token = "the-progress-token";
1915
1916 let fs = FakeFs::new(cx.executor());
1917 fs.insert_tree(
1918 path!("/dir"),
1919 json!({
1920 "a.rs": "fn a() { A }",
1921 "b.rs": "const y: i32 = 1",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1928
1929 language_registry.add(rust_lang());
1930 let mut fake_servers = language_registry.register_fake_lsp(
1931 "Rust",
1932 FakeLspAdapter {
1933 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1934 disk_based_diagnostics_sources: vec!["disk".into()],
1935 ..Default::default()
1936 },
1937 );
1938
1939 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1940
1941 // Cause worktree to start the fake language server
1942 let _ = project
1943 .update(cx, |project, cx| {
1944 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1945 })
1946 .await
1947 .unwrap();
1948
1949 let mut events = cx.events(&project);
1950
1951 let fake_server = fake_servers.next().await.unwrap();
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(0),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 ),
1959 );
1960
1961 fake_server
1962 .start_progress(format!("{}/0", progress_token))
1963 .await;
1964 assert_eq!(
1965 events.next().await.unwrap(),
1966 Event::DiskBasedDiagnosticsStarted {
1967 language_server_id: LanguageServerId(0),
1968 }
1969 );
1970
1971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1973 version: None,
1974 diagnostics: vec![lsp::Diagnostic {
1975 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1976 severity: Some(lsp::DiagnosticSeverity::ERROR),
1977 message: "undefined variable 'A'".to_string(),
1978 ..Default::default()
1979 }],
1980 });
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiagnosticsUpdated {
1984 language_server_id: LanguageServerId(0),
1985 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1986 }
1987 );
1988
1989 fake_server.end_progress(format!("{}/0", progress_token));
1990 assert_eq!(
1991 events.next().await.unwrap(),
1992 Event::DiskBasedDiagnosticsFinished {
1993 language_server_id: LanguageServerId(0)
1994 }
1995 );
1996
1997 let buffer = project
1998 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1999 .await
2000 .unwrap();
2001
2002 buffer.update(cx, |buffer, _| {
2003 let snapshot = buffer.snapshot();
2004 let diagnostics = snapshot
2005 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2006 .collect::<Vec<_>>();
2007 assert_eq!(
2008 diagnostics,
2009 &[DiagnosticEntryRef {
2010 range: Point::new(0, 9)..Point::new(0, 10),
2011 diagnostic: &Diagnostic {
2012 severity: lsp::DiagnosticSeverity::ERROR,
2013 message: "undefined variable 'A'".to_string(),
2014 group_id: 0,
2015 is_primary: true,
2016 source_kind: DiagnosticSourceKind::Pushed,
2017 ..Diagnostic::default()
2018 }
2019 }]
2020 )
2021 });
2022
2023 // Ensure publishing empty diagnostics twice only results in one update event.
2024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2025 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2026 version: None,
2027 diagnostics: Default::default(),
2028 });
2029 assert_eq!(
2030 events.next().await.unwrap(),
2031 Event::DiagnosticsUpdated {
2032 language_server_id: LanguageServerId(0),
2033 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2034 }
2035 );
2036
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: Default::default(),
2041 });
2042 cx.executor().run_until_parked();
2043 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2044}
2045
2046#[gpui::test]
2047async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2048 init_test(cx);
2049
2050 let progress_token = "the-progress-token";
2051
2052 let fs = FakeFs::new(cx.executor());
2053 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2054
2055 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2056
2057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2058 language_registry.add(rust_lang());
2059 let mut fake_servers = language_registry.register_fake_lsp(
2060 "Rust",
2061 FakeLspAdapter {
2062 name: "the-language-server",
2063 disk_based_diagnostics_sources: vec!["disk".into()],
2064 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2065 ..FakeLspAdapter::default()
2066 },
2067 );
2068
2069 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2070
2071 let (buffer, _handle) = project
2072 .update(cx, |project, cx| {
2073 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2074 })
2075 .await
2076 .unwrap();
2077 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2078 // Simulate diagnostics starting to update.
2079 let fake_server = fake_servers.next().await.unwrap();
2080 cx.executor().run_until_parked();
2081 fake_server.start_progress(progress_token).await;
2082
2083 // Restart the server before the diagnostics finish updating.
2084 project.update(cx, |project, cx| {
2085 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2086 });
2087 let mut events = cx.events(&project);
2088
2089 // Simulate the newly started server sending more diagnostics.
2090 let fake_server = fake_servers.next().await.unwrap();
2091 cx.executor().run_until_parked();
2092 assert_eq!(
2093 events.next().await.unwrap(),
2094 Event::LanguageServerRemoved(LanguageServerId(0))
2095 );
2096 assert_eq!(
2097 events.next().await.unwrap(),
2098 Event::LanguageServerAdded(
2099 LanguageServerId(1),
2100 fake_server.server.name(),
2101 Some(worktree_id)
2102 )
2103 );
2104 fake_server.start_progress(progress_token).await;
2105 assert_eq!(
2106 events.next().await.unwrap(),
2107 Event::LanguageServerBufferRegistered {
2108 server_id: LanguageServerId(1),
2109 buffer_id,
2110 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2111 name: Some(fake_server.server.name())
2112 }
2113 );
2114 assert_eq!(
2115 events.next().await.unwrap(),
2116 Event::DiskBasedDiagnosticsStarted {
2117 language_server_id: LanguageServerId(1)
2118 }
2119 );
2120 project.update(cx, |project, cx| {
2121 assert_eq!(
2122 project
2123 .language_servers_running_disk_based_diagnostics(cx)
2124 .collect::<Vec<_>>(),
2125 [LanguageServerId(1)]
2126 );
2127 });
2128
2129 // All diagnostics are considered done, despite the old server's diagnostic
2130 // task never completing.
2131 fake_server.end_progress(progress_token);
2132 assert_eq!(
2133 events.next().await.unwrap(),
2134 Event::DiskBasedDiagnosticsFinished {
2135 language_server_id: LanguageServerId(1)
2136 }
2137 );
2138 project.update(cx, |project, cx| {
2139 assert_eq!(
2140 project
2141 .language_servers_running_disk_based_diagnostics(cx)
2142 .collect::<Vec<_>>(),
2143 [] as [language::LanguageServerId; 0]
2144 );
2145 });
2146}
2147
2148#[gpui::test]
2149async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2150 init_test(cx);
2151
2152 let fs = FakeFs::new(cx.executor());
2153 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2154
2155 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2156
2157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2158 language_registry.add(rust_lang());
2159 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2160
2161 let (buffer, _) = project
2162 .update(cx, |project, cx| {
2163 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2164 })
2165 .await
2166 .unwrap();
2167
2168 // Publish diagnostics
2169 let fake_server = fake_servers.next().await.unwrap();
2170 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2171 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2172 version: None,
2173 diagnostics: vec![lsp::Diagnostic {
2174 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2175 severity: Some(lsp::DiagnosticSeverity::ERROR),
2176 message: "the message".to_string(),
2177 ..Default::default()
2178 }],
2179 });
2180
2181 cx.executor().run_until_parked();
2182 buffer.update(cx, |buffer, _| {
2183 assert_eq!(
2184 buffer
2185 .snapshot()
2186 .diagnostics_in_range::<_, usize>(0..1, false)
2187 .map(|entry| entry.diagnostic.message.clone())
2188 .collect::<Vec<_>>(),
2189 ["the message".to_string()]
2190 );
2191 });
2192 project.update(cx, |project, cx| {
2193 assert_eq!(
2194 project.diagnostic_summary(false, cx),
2195 DiagnosticSummary {
2196 error_count: 1,
2197 warning_count: 0,
2198 }
2199 );
2200 });
2201
2202 project.update(cx, |project, cx| {
2203 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2204 });
2205
2206 // The diagnostics are cleared.
2207 cx.executor().run_until_parked();
2208 buffer.update(cx, |buffer, _| {
2209 assert_eq!(
2210 buffer
2211 .snapshot()
2212 .diagnostics_in_range::<_, usize>(0..1, false)
2213 .map(|entry| entry.diagnostic.message.clone())
2214 .collect::<Vec<_>>(),
2215 Vec::<String>::new(),
2216 );
2217 });
2218 project.update(cx, |project, cx| {
2219 assert_eq!(
2220 project.diagnostic_summary(false, cx),
2221 DiagnosticSummary {
2222 error_count: 0,
2223 warning_count: 0,
2224 }
2225 );
2226 });
2227}
2228
2229#[gpui::test]
2230async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2231 init_test(cx);
2232
2233 let fs = FakeFs::new(cx.executor());
2234 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2235
2236 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2237 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2238
2239 language_registry.add(rust_lang());
2240 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2241
2242 let (buffer, _handle) = project
2243 .update(cx, |project, cx| {
2244 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2245 })
2246 .await
2247 .unwrap();
2248
2249 // Before restarting the server, report diagnostics with an unknown buffer version.
2250 let fake_server = fake_servers.next().await.unwrap();
2251 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2252 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2253 version: Some(10000),
2254 diagnostics: Vec::new(),
2255 });
2256 cx.executor().run_until_parked();
2257 project.update(cx, |project, cx| {
2258 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2259 });
2260
2261 let mut fake_server = fake_servers.next().await.unwrap();
2262 let notification = fake_server
2263 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2264 .await
2265 .text_document;
2266 assert_eq!(notification.version, 0);
2267}
2268
2269#[gpui::test]
2270async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2271 init_test(cx);
2272
2273 let progress_token = "the-progress-token";
2274
2275 let fs = FakeFs::new(cx.executor());
2276 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2277
2278 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2279
2280 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2281 language_registry.add(rust_lang());
2282 let mut fake_servers = language_registry.register_fake_lsp(
2283 "Rust",
2284 FakeLspAdapter {
2285 name: "the-language-server",
2286 disk_based_diagnostics_sources: vec!["disk".into()],
2287 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2288 ..Default::default()
2289 },
2290 );
2291
2292 let (buffer, _handle) = project
2293 .update(cx, |project, cx| {
2294 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2295 })
2296 .await
2297 .unwrap();
2298
2299 // Simulate diagnostics starting to update.
2300 let mut fake_server = fake_servers.next().await.unwrap();
2301 fake_server
2302 .start_progress_with(
2303 "another-token",
2304 lsp::WorkDoneProgressBegin {
2305 cancellable: Some(false),
2306 ..Default::default()
2307 },
2308 )
2309 .await;
2310 // Ensure progress notification is fully processed before starting the next one
2311 cx.executor().run_until_parked();
2312
2313 fake_server
2314 .start_progress_with(
2315 progress_token,
2316 lsp::WorkDoneProgressBegin {
2317 cancellable: Some(true),
2318 ..Default::default()
2319 },
2320 )
2321 .await;
2322 // Ensure progress notification is fully processed before cancelling
2323 cx.executor().run_until_parked();
2324
2325 project.update(cx, |project, cx| {
2326 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2327 });
2328 cx.executor().run_until_parked();
2329
2330 let cancel_notification = fake_server
2331 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2332 .await;
2333 assert_eq!(
2334 cancel_notification.token,
2335 NumberOrString::String(progress_token.into())
2336 );
2337}
2338
2339#[gpui::test]
2340async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2341 init_test(cx);
2342
2343 let fs = FakeFs::new(cx.executor());
2344 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2345 .await;
2346
2347 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2348 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2349
2350 let mut fake_rust_servers = language_registry.register_fake_lsp(
2351 "Rust",
2352 FakeLspAdapter {
2353 name: "rust-lsp",
2354 ..Default::default()
2355 },
2356 );
2357 let mut fake_js_servers = language_registry.register_fake_lsp(
2358 "JavaScript",
2359 FakeLspAdapter {
2360 name: "js-lsp",
2361 ..Default::default()
2362 },
2363 );
2364 language_registry.add(rust_lang());
2365 language_registry.add(js_lang());
2366
2367 let _rs_buffer = project
2368 .update(cx, |project, cx| {
2369 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2370 })
2371 .await
2372 .unwrap();
2373 let _js_buffer = project
2374 .update(cx, |project, cx| {
2375 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2376 })
2377 .await
2378 .unwrap();
2379
2380 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2381 assert_eq!(
2382 fake_rust_server_1
2383 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2384 .await
2385 .text_document
2386 .uri
2387 .as_str(),
2388 uri!("file:///dir/a.rs")
2389 );
2390
2391 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2392 assert_eq!(
2393 fake_js_server
2394 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2395 .await
2396 .text_document
2397 .uri
2398 .as_str(),
2399 uri!("file:///dir/b.js")
2400 );
2401
2402 // Disable Rust language server, ensuring only that server gets stopped.
2403 cx.update(|cx| {
2404 SettingsStore::update_global(cx, |settings, cx| {
2405 settings.update_user_settings(cx, |settings| {
2406 settings.languages_mut().insert(
2407 "Rust".into(),
2408 LanguageSettingsContent {
2409 enable_language_server: Some(false),
2410 ..Default::default()
2411 },
2412 );
2413 });
2414 })
2415 });
2416 fake_rust_server_1
2417 .receive_notification::<lsp::notification::Exit>()
2418 .await;
2419
2420 // Enable Rust and disable JavaScript language servers, ensuring that the
2421 // former gets started again and that the latter stops.
2422 cx.update(|cx| {
2423 SettingsStore::update_global(cx, |settings, cx| {
2424 settings.update_user_settings(cx, |settings| {
2425 settings.languages_mut().insert(
2426 "Rust".into(),
2427 LanguageSettingsContent {
2428 enable_language_server: Some(true),
2429 ..Default::default()
2430 },
2431 );
2432 settings.languages_mut().insert(
2433 "JavaScript".into(),
2434 LanguageSettingsContent {
2435 enable_language_server: Some(false),
2436 ..Default::default()
2437 },
2438 );
2439 });
2440 })
2441 });
2442 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2443 assert_eq!(
2444 fake_rust_server_2
2445 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2446 .await
2447 .text_document
2448 .uri
2449 .as_str(),
2450 uri!("file:///dir/a.rs")
2451 );
2452 fake_js_server
2453 .receive_notification::<lsp::notification::Exit>()
2454 .await;
2455}
2456
2457#[gpui::test(iterations = 3)]
2458async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2459 init_test(cx);
2460
2461 let text = "
2462 fn a() { A }
2463 fn b() { BB }
2464 fn c() { CCC }
2465 "
2466 .unindent();
2467
2468 let fs = FakeFs::new(cx.executor());
2469 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2470
2471 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2472 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2473
2474 language_registry.add(rust_lang());
2475 let mut fake_servers = language_registry.register_fake_lsp(
2476 "Rust",
2477 FakeLspAdapter {
2478 disk_based_diagnostics_sources: vec!["disk".into()],
2479 ..Default::default()
2480 },
2481 );
2482
2483 let buffer = project
2484 .update(cx, |project, cx| {
2485 project.open_local_buffer(path!("/dir/a.rs"), cx)
2486 })
2487 .await
2488 .unwrap();
2489
2490 let _handle = project.update(cx, |project, cx| {
2491 project.register_buffer_with_language_servers(&buffer, cx)
2492 });
2493
2494 let mut fake_server = fake_servers.next().await.unwrap();
2495 let open_notification = fake_server
2496 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2497 .await;
2498
2499 // Edit the buffer, moving the content down
2500 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2501 let change_notification_1 = fake_server
2502 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2503 .await;
2504 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2505
2506 // Report some diagnostics for the initial version of the buffer
2507 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2508 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2509 version: Some(open_notification.text_document.version),
2510 diagnostics: vec![
2511 lsp::Diagnostic {
2512 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2513 severity: Some(DiagnosticSeverity::ERROR),
2514 message: "undefined variable 'A'".to_string(),
2515 source: Some("disk".to_string()),
2516 ..Default::default()
2517 },
2518 lsp::Diagnostic {
2519 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2520 severity: Some(DiagnosticSeverity::ERROR),
2521 message: "undefined variable 'BB'".to_string(),
2522 source: Some("disk".to_string()),
2523 ..Default::default()
2524 },
2525 lsp::Diagnostic {
2526 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2527 severity: Some(DiagnosticSeverity::ERROR),
2528 source: Some("disk".to_string()),
2529 message: "undefined variable 'CCC'".to_string(),
2530 ..Default::default()
2531 },
2532 ],
2533 });
2534
2535 // The diagnostics have moved down since they were created.
2536 cx.executor().run_until_parked();
2537 buffer.update(cx, |buffer, _| {
2538 assert_eq!(
2539 buffer
2540 .snapshot()
2541 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2542 .collect::<Vec<_>>(),
2543 &[
2544 DiagnosticEntry {
2545 range: Point::new(3, 9)..Point::new(3, 11),
2546 diagnostic: Diagnostic {
2547 source: Some("disk".into()),
2548 severity: DiagnosticSeverity::ERROR,
2549 message: "undefined variable 'BB'".to_string(),
2550 is_disk_based: true,
2551 group_id: 1,
2552 is_primary: true,
2553 source_kind: DiagnosticSourceKind::Pushed,
2554 ..Diagnostic::default()
2555 },
2556 },
2557 DiagnosticEntry {
2558 range: Point::new(4, 9)..Point::new(4, 12),
2559 diagnostic: Diagnostic {
2560 source: Some("disk".into()),
2561 severity: DiagnosticSeverity::ERROR,
2562 message: "undefined variable 'CCC'".to_string(),
2563 is_disk_based: true,
2564 group_id: 2,
2565 is_primary: true,
2566 source_kind: DiagnosticSourceKind::Pushed,
2567 ..Diagnostic::default()
2568 }
2569 }
2570 ]
2571 );
2572 assert_eq!(
2573 chunks_with_diagnostics(buffer, 0..buffer.len()),
2574 [
2575 ("\n\nfn a() { ".to_string(), None),
2576 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2577 (" }\nfn b() { ".to_string(), None),
2578 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2579 (" }\nfn c() { ".to_string(), None),
2580 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2581 (" }\n".to_string(), None),
2582 ]
2583 );
2584 assert_eq!(
2585 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2586 [
2587 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2588 (" }\nfn c() { ".to_string(), None),
2589 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2590 ]
2591 );
2592 });
2593
2594 // Ensure overlapping diagnostics are highlighted correctly.
2595 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2596 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2597 version: Some(open_notification.text_document.version),
2598 diagnostics: vec![
2599 lsp::Diagnostic {
2600 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2601 severity: Some(DiagnosticSeverity::ERROR),
2602 message: "undefined variable 'A'".to_string(),
2603 source: Some("disk".to_string()),
2604 ..Default::default()
2605 },
2606 lsp::Diagnostic {
2607 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2608 severity: Some(DiagnosticSeverity::WARNING),
2609 message: "unreachable statement".to_string(),
2610 source: Some("disk".to_string()),
2611 ..Default::default()
2612 },
2613 ],
2614 });
2615
2616 cx.executor().run_until_parked();
2617 buffer.update(cx, |buffer, _| {
2618 assert_eq!(
2619 buffer
2620 .snapshot()
2621 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2622 .collect::<Vec<_>>(),
2623 &[
2624 DiagnosticEntry {
2625 range: Point::new(2, 9)..Point::new(2, 12),
2626 diagnostic: Diagnostic {
2627 source: Some("disk".into()),
2628 severity: DiagnosticSeverity::WARNING,
2629 message: "unreachable statement".to_string(),
2630 is_disk_based: true,
2631 group_id: 4,
2632 is_primary: true,
2633 source_kind: DiagnosticSourceKind::Pushed,
2634 ..Diagnostic::default()
2635 }
2636 },
2637 DiagnosticEntry {
2638 range: Point::new(2, 9)..Point::new(2, 10),
2639 diagnostic: Diagnostic {
2640 source: Some("disk".into()),
2641 severity: DiagnosticSeverity::ERROR,
2642 message: "undefined variable 'A'".to_string(),
2643 is_disk_based: true,
2644 group_id: 3,
2645 is_primary: true,
2646 source_kind: DiagnosticSourceKind::Pushed,
2647 ..Diagnostic::default()
2648 },
2649 }
2650 ]
2651 );
2652 assert_eq!(
2653 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2654 [
2655 ("fn a() { ".to_string(), None),
2656 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2657 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2658 ("\n".to_string(), None),
2659 ]
2660 );
2661 assert_eq!(
2662 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2663 [
2664 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2665 ("\n".to_string(), None),
2666 ]
2667 );
2668 });
2669
2670 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2671 // changes since the last save.
2672 buffer.update(cx, |buffer, cx| {
2673 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2674 buffer.edit(
2675 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2676 None,
2677 cx,
2678 );
2679 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2680 });
2681 let change_notification_2 = fake_server
2682 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2683 .await;
2684 assert!(
2685 change_notification_2.text_document.version > change_notification_1.text_document.version
2686 );
2687
2688 // Handle out-of-order diagnostics
2689 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2690 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2691 version: Some(change_notification_2.text_document.version),
2692 diagnostics: vec![
2693 lsp::Diagnostic {
2694 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2695 severity: Some(DiagnosticSeverity::ERROR),
2696 message: "undefined variable 'BB'".to_string(),
2697 source: Some("disk".to_string()),
2698 ..Default::default()
2699 },
2700 lsp::Diagnostic {
2701 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2702 severity: Some(DiagnosticSeverity::WARNING),
2703 message: "undefined variable 'A'".to_string(),
2704 source: Some("disk".to_string()),
2705 ..Default::default()
2706 },
2707 ],
2708 });
2709
2710 cx.executor().run_until_parked();
2711 buffer.update(cx, |buffer, _| {
2712 assert_eq!(
2713 buffer
2714 .snapshot()
2715 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2716 .collect::<Vec<_>>(),
2717 &[
2718 DiagnosticEntry {
2719 range: Point::new(2, 21)..Point::new(2, 22),
2720 diagnostic: Diagnostic {
2721 source: Some("disk".into()),
2722 severity: DiagnosticSeverity::WARNING,
2723 message: "undefined variable 'A'".to_string(),
2724 is_disk_based: true,
2725 group_id: 6,
2726 is_primary: true,
2727 source_kind: DiagnosticSourceKind::Pushed,
2728 ..Diagnostic::default()
2729 }
2730 },
2731 DiagnosticEntry {
2732 range: Point::new(3, 9)..Point::new(3, 14),
2733 diagnostic: Diagnostic {
2734 source: Some("disk".into()),
2735 severity: DiagnosticSeverity::ERROR,
2736 message: "undefined variable 'BB'".to_string(),
2737 is_disk_based: true,
2738 group_id: 5,
2739 is_primary: true,
2740 source_kind: DiagnosticSourceKind::Pushed,
2741 ..Diagnostic::default()
2742 },
2743 }
2744 ]
2745 );
2746 });
2747}
2748
2749#[gpui::test]
2750async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2751 init_test(cx);
2752
2753 let text = concat!(
2754 "let one = ;\n", //
2755 "let two = \n",
2756 "let three = 3;\n",
2757 );
2758
2759 let fs = FakeFs::new(cx.executor());
2760 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2761
2762 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2763 let buffer = project
2764 .update(cx, |project, cx| {
2765 project.open_local_buffer(path!("/dir/a.rs"), cx)
2766 })
2767 .await
2768 .unwrap();
2769
2770 project.update(cx, |project, cx| {
2771 project.lsp_store.update(cx, |lsp_store, cx| {
2772 lsp_store
2773 .update_diagnostic_entries(
2774 LanguageServerId(0),
2775 PathBuf::from(path!("/dir/a.rs")),
2776 None,
2777 None,
2778 vec![
2779 DiagnosticEntry {
2780 range: Unclipped(PointUtf16::new(0, 10))
2781 ..Unclipped(PointUtf16::new(0, 10)),
2782 diagnostic: Diagnostic {
2783 severity: DiagnosticSeverity::ERROR,
2784 message: "syntax error 1".to_string(),
2785 source_kind: DiagnosticSourceKind::Pushed,
2786 ..Diagnostic::default()
2787 },
2788 },
2789 DiagnosticEntry {
2790 range: Unclipped(PointUtf16::new(1, 10))
2791 ..Unclipped(PointUtf16::new(1, 10)),
2792 diagnostic: Diagnostic {
2793 severity: DiagnosticSeverity::ERROR,
2794 message: "syntax error 2".to_string(),
2795 source_kind: DiagnosticSourceKind::Pushed,
2796 ..Diagnostic::default()
2797 },
2798 },
2799 ],
2800 cx,
2801 )
2802 .unwrap();
2803 })
2804 });
2805
2806 // An empty range is extended forward to include the following character.
2807 // At the end of a line, an empty range is extended backward to include
2808 // the preceding character.
2809 buffer.update(cx, |buffer, _| {
2810 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2811 assert_eq!(
2812 chunks
2813 .iter()
2814 .map(|(s, d)| (s.as_str(), *d))
2815 .collect::<Vec<_>>(),
2816 &[
2817 ("let one = ", None),
2818 (";", Some(DiagnosticSeverity::ERROR)),
2819 ("\nlet two =", None),
2820 (" ", Some(DiagnosticSeverity::ERROR)),
2821 ("\nlet three = 3;\n", None)
2822 ]
2823 );
2824 });
2825}
2826
2827#[gpui::test]
2828async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2829 init_test(cx);
2830
2831 let fs = FakeFs::new(cx.executor());
2832 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
2833 .await;
2834
2835 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2836 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2837
2838 lsp_store.update(cx, |lsp_store, cx| {
2839 lsp_store
2840 .update_diagnostic_entries(
2841 LanguageServerId(0),
2842 Path::new(path!("/dir/a.rs")).to_owned(),
2843 None,
2844 None,
2845 vec![DiagnosticEntry {
2846 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2847 diagnostic: Diagnostic {
2848 severity: DiagnosticSeverity::ERROR,
2849 is_primary: true,
2850 message: "syntax error a1".to_string(),
2851 source_kind: DiagnosticSourceKind::Pushed,
2852 ..Diagnostic::default()
2853 },
2854 }],
2855 cx,
2856 )
2857 .unwrap();
2858 lsp_store
2859 .update_diagnostic_entries(
2860 LanguageServerId(1),
2861 Path::new(path!("/dir/a.rs")).to_owned(),
2862 None,
2863 None,
2864 vec![DiagnosticEntry {
2865 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2866 diagnostic: Diagnostic {
2867 severity: DiagnosticSeverity::ERROR,
2868 is_primary: true,
2869 message: "syntax error b1".to_string(),
2870 source_kind: DiagnosticSourceKind::Pushed,
2871 ..Diagnostic::default()
2872 },
2873 }],
2874 cx,
2875 )
2876 .unwrap();
2877
2878 assert_eq!(
2879 lsp_store.diagnostic_summary(false, cx),
2880 DiagnosticSummary {
2881 error_count: 2,
2882 warning_count: 0,
2883 }
2884 );
2885 });
2886}
2887
2888#[gpui::test]
2889async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2890 init_test(cx);
2891
2892 let text = "
2893 fn a() {
2894 f1();
2895 }
2896 fn b() {
2897 f2();
2898 }
2899 fn c() {
2900 f3();
2901 }
2902 "
2903 .unindent();
2904
2905 let fs = FakeFs::new(cx.executor());
2906 fs.insert_tree(
2907 path!("/dir"),
2908 json!({
2909 "a.rs": text.clone(),
2910 }),
2911 )
2912 .await;
2913
2914 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2915 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2916
2917 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2918 language_registry.add(rust_lang());
2919 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2920
2921 let (buffer, _handle) = project
2922 .update(cx, |project, cx| {
2923 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2924 })
2925 .await
2926 .unwrap();
2927
2928 let mut fake_server = fake_servers.next().await.unwrap();
2929 let lsp_document_version = fake_server
2930 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2931 .await
2932 .text_document
2933 .version;
2934
2935 // Simulate editing the buffer after the language server computes some edits.
2936 buffer.update(cx, |buffer, cx| {
2937 buffer.edit(
2938 [(
2939 Point::new(0, 0)..Point::new(0, 0),
2940 "// above first function\n",
2941 )],
2942 None,
2943 cx,
2944 );
2945 buffer.edit(
2946 [(
2947 Point::new(2, 0)..Point::new(2, 0),
2948 " // inside first function\n",
2949 )],
2950 None,
2951 cx,
2952 );
2953 buffer.edit(
2954 [(
2955 Point::new(6, 4)..Point::new(6, 4),
2956 "// inside second function ",
2957 )],
2958 None,
2959 cx,
2960 );
2961
2962 assert_eq!(
2963 buffer.text(),
2964 "
2965 // above first function
2966 fn a() {
2967 // inside first function
2968 f1();
2969 }
2970 fn b() {
2971 // inside second function f2();
2972 }
2973 fn c() {
2974 f3();
2975 }
2976 "
2977 .unindent()
2978 );
2979 });
2980
2981 let edits = lsp_store
2982 .update(cx, |lsp_store, cx| {
2983 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2984 &buffer,
2985 vec![
2986 // replace body of first function
2987 lsp::TextEdit {
2988 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2989 new_text: "
2990 fn a() {
2991 f10();
2992 }
2993 "
2994 .unindent(),
2995 },
2996 // edit inside second function
2997 lsp::TextEdit {
2998 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2999 new_text: "00".into(),
3000 },
3001 // edit inside third function via two distinct edits
3002 lsp::TextEdit {
3003 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
3004 new_text: "4000".into(),
3005 },
3006 lsp::TextEdit {
3007 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3008 new_text: "".into(),
3009 },
3010 ],
3011 LanguageServerId(0),
3012 Some(lsp_document_version),
3013 cx,
3014 )
3015 })
3016 .await
3017 .unwrap();
3018
3019 buffer.update(cx, |buffer, cx| {
3020 for (range, new_text) in edits {
3021 buffer.edit([(range, new_text)], None, cx);
3022 }
3023 assert_eq!(
3024 buffer.text(),
3025 "
3026 // above first function
3027 fn a() {
3028 // inside first function
3029 f10();
3030 }
3031 fn b() {
3032 // inside second function f200();
3033 }
3034 fn c() {
3035 f4000();
3036 }
3037 "
3038 .unindent()
3039 );
3040 });
3041}
3042
3043#[gpui::test]
3044async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3045 init_test(cx);
3046
3047 let text = "
3048 use a::b;
3049 use a::c;
3050
3051 fn f() {
3052 b();
3053 c();
3054 }
3055 "
3056 .unindent();
3057
3058 let fs = FakeFs::new(cx.executor());
3059 fs.insert_tree(
3060 path!("/dir"),
3061 json!({
3062 "a.rs": text.clone(),
3063 }),
3064 )
3065 .await;
3066
3067 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3068 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3069 let buffer = project
3070 .update(cx, |project, cx| {
3071 project.open_local_buffer(path!("/dir/a.rs"), cx)
3072 })
3073 .await
3074 .unwrap();
3075
3076 // Simulate the language server sending us a small edit in the form of a very large diff.
3077 // Rust-analyzer does this when performing a merge-imports code action.
3078 let edits = lsp_store
3079 .update(cx, |lsp_store, cx| {
3080 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3081 &buffer,
3082 [
3083 // Replace the first use statement without editing the semicolon.
3084 lsp::TextEdit {
3085 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3086 new_text: "a::{b, c}".into(),
3087 },
3088 // Reinsert the remainder of the file between the semicolon and the final
3089 // newline of the file.
3090 lsp::TextEdit {
3091 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3092 new_text: "\n\n".into(),
3093 },
3094 lsp::TextEdit {
3095 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3096 new_text: "
3097 fn f() {
3098 b();
3099 c();
3100 }"
3101 .unindent(),
3102 },
3103 // Delete everything after the first newline of the file.
3104 lsp::TextEdit {
3105 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3106 new_text: "".into(),
3107 },
3108 ],
3109 LanguageServerId(0),
3110 None,
3111 cx,
3112 )
3113 })
3114 .await
3115 .unwrap();
3116
3117 buffer.update(cx, |buffer, cx| {
3118 let edits = edits
3119 .into_iter()
3120 .map(|(range, text)| {
3121 (
3122 range.start.to_point(buffer)..range.end.to_point(buffer),
3123 text,
3124 )
3125 })
3126 .collect::<Vec<_>>();
3127
3128 assert_eq!(
3129 edits,
3130 [
3131 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3132 (Point::new(1, 0)..Point::new(2, 0), "".into())
3133 ]
3134 );
3135
3136 for (range, new_text) in edits {
3137 buffer.edit([(range, new_text)], None, cx);
3138 }
3139 assert_eq!(
3140 buffer.text(),
3141 "
3142 use a::{b, c};
3143
3144 fn f() {
3145 b();
3146 c();
3147 }
3148 "
3149 .unindent()
3150 );
3151 });
3152}
3153
3154#[gpui::test]
3155async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3156 cx: &mut gpui::TestAppContext,
3157) {
3158 init_test(cx);
3159
3160 let text = "Path()";
3161
3162 let fs = FakeFs::new(cx.executor());
3163 fs.insert_tree(
3164 path!("/dir"),
3165 json!({
3166 "a.rs": text
3167 }),
3168 )
3169 .await;
3170
3171 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3172 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3173 let buffer = project
3174 .update(cx, |project, cx| {
3175 project.open_local_buffer(path!("/dir/a.rs"), cx)
3176 })
3177 .await
3178 .unwrap();
3179
3180 // Simulate the language server sending us a pair of edits at the same location,
3181 // with an insertion following a replacement (which violates the LSP spec).
3182 let edits = lsp_store
3183 .update(cx, |lsp_store, cx| {
3184 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3185 &buffer,
3186 [
3187 lsp::TextEdit {
3188 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3189 new_text: "Path".into(),
3190 },
3191 lsp::TextEdit {
3192 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3193 new_text: "from path import Path\n\n\n".into(),
3194 },
3195 ],
3196 LanguageServerId(0),
3197 None,
3198 cx,
3199 )
3200 })
3201 .await
3202 .unwrap();
3203
3204 buffer.update(cx, |buffer, cx| {
3205 buffer.edit(edits, None, cx);
3206 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3207 });
3208}
3209
3210#[gpui::test]
3211async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3212 init_test(cx);
3213
3214 let text = "
3215 use a::b;
3216 use a::c;
3217
3218 fn f() {
3219 b();
3220 c();
3221 }
3222 "
3223 .unindent();
3224
3225 let fs = FakeFs::new(cx.executor());
3226 fs.insert_tree(
3227 path!("/dir"),
3228 json!({
3229 "a.rs": text.clone(),
3230 }),
3231 )
3232 .await;
3233
3234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3235 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3236 let buffer = project
3237 .update(cx, |project, cx| {
3238 project.open_local_buffer(path!("/dir/a.rs"), cx)
3239 })
3240 .await
3241 .unwrap();
3242
3243 // Simulate the language server sending us edits in a non-ordered fashion,
3244 // with ranges sometimes being inverted or pointing to invalid locations.
3245 let edits = lsp_store
3246 .update(cx, |lsp_store, cx| {
3247 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3248 &buffer,
3249 [
3250 lsp::TextEdit {
3251 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3252 new_text: "\n\n".into(),
3253 },
3254 lsp::TextEdit {
3255 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3256 new_text: "a::{b, c}".into(),
3257 },
3258 lsp::TextEdit {
3259 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3260 new_text: "".into(),
3261 },
3262 lsp::TextEdit {
3263 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3264 new_text: "
3265 fn f() {
3266 b();
3267 c();
3268 }"
3269 .unindent(),
3270 },
3271 ],
3272 LanguageServerId(0),
3273 None,
3274 cx,
3275 )
3276 })
3277 .await
3278 .unwrap();
3279
3280 buffer.update(cx, |buffer, cx| {
3281 let edits = edits
3282 .into_iter()
3283 .map(|(range, text)| {
3284 (
3285 range.start.to_point(buffer)..range.end.to_point(buffer),
3286 text,
3287 )
3288 })
3289 .collect::<Vec<_>>();
3290
3291 assert_eq!(
3292 edits,
3293 [
3294 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3295 (Point::new(1, 0)..Point::new(2, 0), "".into())
3296 ]
3297 );
3298
3299 for (range, new_text) in edits {
3300 buffer.edit([(range, new_text)], None, cx);
3301 }
3302 assert_eq!(
3303 buffer.text(),
3304 "
3305 use a::{b, c};
3306
3307 fn f() {
3308 b();
3309 c();
3310 }
3311 "
3312 .unindent()
3313 );
3314 });
3315}
3316
3317fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3318 buffer: &Buffer,
3319 range: Range<T>,
3320) -> Vec<(String, Option<DiagnosticSeverity>)> {
3321 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3322 for chunk in buffer.snapshot().chunks(range, true) {
3323 if chunks
3324 .last()
3325 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3326 {
3327 chunks.last_mut().unwrap().0.push_str(chunk.text);
3328 } else {
3329 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3330 }
3331 }
3332 chunks
3333}
3334
3335#[gpui::test(iterations = 10)]
3336async fn test_definition(cx: &mut gpui::TestAppContext) {
3337 init_test(cx);
3338
3339 let fs = FakeFs::new(cx.executor());
3340 fs.insert_tree(
3341 path!("/dir"),
3342 json!({
3343 "a.rs": "const fn a() { A }",
3344 "b.rs": "const y: i32 = crate::a()",
3345 }),
3346 )
3347 .await;
3348
3349 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3350
3351 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3352 language_registry.add(rust_lang());
3353 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3354
3355 let (buffer, _handle) = project
3356 .update(cx, |project, cx| {
3357 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3358 })
3359 .await
3360 .unwrap();
3361
3362 let fake_server = fake_servers.next().await.unwrap();
3363 cx.executor().run_until_parked();
3364
3365 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3366 let params = params.text_document_position_params;
3367 assert_eq!(
3368 params.text_document.uri.to_file_path().unwrap(),
3369 Path::new(path!("/dir/b.rs")),
3370 );
3371 assert_eq!(params.position, lsp::Position::new(0, 22));
3372
3373 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3374 lsp::Location::new(
3375 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3376 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3377 ),
3378 )))
3379 });
3380 let mut definitions = project
3381 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3382 .await
3383 .unwrap()
3384 .unwrap();
3385
3386 // Assert no new language server started
3387 cx.executor().run_until_parked();
3388 assert!(fake_servers.try_next().is_err());
3389
3390 assert_eq!(definitions.len(), 1);
3391 let definition = definitions.pop().unwrap();
3392 cx.update(|cx| {
3393 let target_buffer = definition.target.buffer.read(cx);
3394 assert_eq!(
3395 target_buffer
3396 .file()
3397 .unwrap()
3398 .as_local()
3399 .unwrap()
3400 .abs_path(cx),
3401 Path::new(path!("/dir/a.rs")),
3402 );
3403 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3404 assert_eq!(
3405 list_worktrees(&project, cx),
3406 [
3407 (path!("/dir/a.rs").as_ref(), false),
3408 (path!("/dir/b.rs").as_ref(), true)
3409 ],
3410 );
3411
3412 drop(definition);
3413 });
3414 cx.update(|cx| {
3415 assert_eq!(
3416 list_worktrees(&project, cx),
3417 [(path!("/dir/b.rs").as_ref(), true)]
3418 );
3419 });
3420
3421 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3422 project
3423 .read(cx)
3424 .worktrees(cx)
3425 .map(|worktree| {
3426 let worktree = worktree.read(cx);
3427 (
3428 worktree.as_local().unwrap().abs_path().as_ref(),
3429 worktree.is_visible(),
3430 )
3431 })
3432 .collect::<Vec<_>>()
3433 }
3434}
3435
3436#[gpui::test]
3437async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3438 init_test(cx);
3439
3440 let fs = FakeFs::new(cx.executor());
3441 fs.insert_tree(
3442 path!("/dir"),
3443 json!({
3444 "a.ts": "",
3445 }),
3446 )
3447 .await;
3448
3449 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3450
3451 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3452 language_registry.add(typescript_lang());
3453 let mut fake_language_servers = language_registry.register_fake_lsp(
3454 "TypeScript",
3455 FakeLspAdapter {
3456 capabilities: lsp::ServerCapabilities {
3457 completion_provider: Some(lsp::CompletionOptions {
3458 trigger_characters: Some(vec![".".to_string()]),
3459 ..Default::default()
3460 }),
3461 ..Default::default()
3462 },
3463 ..Default::default()
3464 },
3465 );
3466
3467 let (buffer, _handle) = project
3468 .update(cx, |p, cx| {
3469 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3470 })
3471 .await
3472 .unwrap();
3473
3474 let fake_server = fake_language_servers.next().await.unwrap();
3475 cx.executor().run_until_parked();
3476
3477 // When text_edit exists, it takes precedence over insert_text and label
3478 let text = "let a = obj.fqn";
3479 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3480 let completions = project.update(cx, |project, cx| {
3481 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3482 });
3483
3484 fake_server
3485 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3486 Ok(Some(lsp::CompletionResponse::Array(vec![
3487 lsp::CompletionItem {
3488 label: "labelText".into(),
3489 insert_text: Some("insertText".into()),
3490 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3491 range: lsp::Range::new(
3492 lsp::Position::new(0, text.len() as u32 - 3),
3493 lsp::Position::new(0, text.len() as u32),
3494 ),
3495 new_text: "textEditText".into(),
3496 })),
3497 ..Default::default()
3498 },
3499 ])))
3500 })
3501 .next()
3502 .await;
3503
3504 let completions = completions
3505 .await
3506 .unwrap()
3507 .into_iter()
3508 .flat_map(|response| response.completions)
3509 .collect::<Vec<_>>();
3510 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3511
3512 assert_eq!(completions.len(), 1);
3513 assert_eq!(completions[0].new_text, "textEditText");
3514 assert_eq!(
3515 completions[0].replace_range.to_offset(&snapshot),
3516 text.len() - 3..text.len()
3517 );
3518}
3519
3520#[gpui::test]
3521async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3522 init_test(cx);
3523
3524 let fs = FakeFs::new(cx.executor());
3525 fs.insert_tree(
3526 path!("/dir"),
3527 json!({
3528 "a.ts": "",
3529 }),
3530 )
3531 .await;
3532
3533 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3534
3535 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3536 language_registry.add(typescript_lang());
3537 let mut fake_language_servers = language_registry.register_fake_lsp(
3538 "TypeScript",
3539 FakeLspAdapter {
3540 capabilities: lsp::ServerCapabilities {
3541 completion_provider: Some(lsp::CompletionOptions {
3542 trigger_characters: Some(vec![".".to_string()]),
3543 ..Default::default()
3544 }),
3545 ..Default::default()
3546 },
3547 ..Default::default()
3548 },
3549 );
3550
3551 let (buffer, _handle) = project
3552 .update(cx, |p, cx| {
3553 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3554 })
3555 .await
3556 .unwrap();
3557
3558 let fake_server = fake_language_servers.next().await.unwrap();
3559 cx.executor().run_until_parked();
3560 let text = "let a = obj.fqn";
3561
3562 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3563 {
3564 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3565 let completions = project.update(cx, |project, cx| {
3566 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3567 });
3568
3569 fake_server
3570 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3571 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3572 is_incomplete: false,
3573 item_defaults: Some(lsp::CompletionListItemDefaults {
3574 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3575 lsp::Range::new(
3576 lsp::Position::new(0, text.len() as u32 - 3),
3577 lsp::Position::new(0, text.len() as u32),
3578 ),
3579 )),
3580 ..Default::default()
3581 }),
3582 items: vec![lsp::CompletionItem {
3583 label: "labelText".into(),
3584 text_edit_text: Some("textEditText".into()),
3585 text_edit: None,
3586 ..Default::default()
3587 }],
3588 })))
3589 })
3590 .next()
3591 .await;
3592
3593 let completions = completions
3594 .await
3595 .unwrap()
3596 .into_iter()
3597 .flat_map(|response| response.completions)
3598 .collect::<Vec<_>>();
3599 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3600
3601 assert_eq!(completions.len(), 1);
3602 assert_eq!(completions[0].new_text, "textEditText");
3603 assert_eq!(
3604 completions[0].replace_range.to_offset(&snapshot),
3605 text.len() - 3..text.len()
3606 );
3607 }
3608
3609 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3610 {
3611 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3612 let completions = project.update(cx, |project, cx| {
3613 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3614 });
3615
3616 fake_server
3617 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3618 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3619 is_incomplete: false,
3620 item_defaults: Some(lsp::CompletionListItemDefaults {
3621 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3622 lsp::Range::new(
3623 lsp::Position::new(0, text.len() as u32 - 3),
3624 lsp::Position::new(0, text.len() as u32),
3625 ),
3626 )),
3627 ..Default::default()
3628 }),
3629 items: vec![lsp::CompletionItem {
3630 label: "labelText".into(),
3631 text_edit_text: None,
3632 insert_text: Some("irrelevant".into()),
3633 text_edit: None,
3634 ..Default::default()
3635 }],
3636 })))
3637 })
3638 .next()
3639 .await;
3640
3641 let completions = completions
3642 .await
3643 .unwrap()
3644 .into_iter()
3645 .flat_map(|response| response.completions)
3646 .collect::<Vec<_>>();
3647 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3648
3649 assert_eq!(completions.len(), 1);
3650 assert_eq!(completions[0].new_text, "labelText");
3651 assert_eq!(
3652 completions[0].replace_range.to_offset(&snapshot),
3653 text.len() - 3..text.len()
3654 );
3655 }
3656}
3657
3658#[gpui::test]
3659async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3660 init_test(cx);
3661
3662 let fs = FakeFs::new(cx.executor());
3663 fs.insert_tree(
3664 path!("/dir"),
3665 json!({
3666 "a.ts": "",
3667 }),
3668 )
3669 .await;
3670
3671 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3672
3673 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3674 language_registry.add(typescript_lang());
3675 let mut fake_language_servers = language_registry.register_fake_lsp(
3676 "TypeScript",
3677 FakeLspAdapter {
3678 capabilities: lsp::ServerCapabilities {
3679 completion_provider: Some(lsp::CompletionOptions {
3680 trigger_characters: Some(vec![":".to_string()]),
3681 ..Default::default()
3682 }),
3683 ..Default::default()
3684 },
3685 ..Default::default()
3686 },
3687 );
3688
3689 let (buffer, _handle) = project
3690 .update(cx, |p, cx| {
3691 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3692 })
3693 .await
3694 .unwrap();
3695
3696 let fake_server = fake_language_servers.next().await.unwrap();
3697 cx.executor().run_until_parked();
3698
3699 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3700 let text = "let a = b.fqn";
3701 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3702 let completions = project.update(cx, |project, cx| {
3703 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3704 });
3705
3706 fake_server
3707 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3708 Ok(Some(lsp::CompletionResponse::Array(vec![
3709 lsp::CompletionItem {
3710 label: "fullyQualifiedName?".into(),
3711 insert_text: Some("fullyQualifiedName".into()),
3712 ..Default::default()
3713 },
3714 ])))
3715 })
3716 .next()
3717 .await;
3718 let completions = completions
3719 .await
3720 .unwrap()
3721 .into_iter()
3722 .flat_map(|response| response.completions)
3723 .collect::<Vec<_>>();
3724 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3725 assert_eq!(completions.len(), 1);
3726 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3727 assert_eq!(
3728 completions[0].replace_range.to_offset(&snapshot),
3729 text.len() - 3..text.len()
3730 );
3731
3732 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3733 let text = "let a = \"atoms/cmp\"";
3734 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3735 let completions = project.update(cx, |project, cx| {
3736 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3737 });
3738
3739 fake_server
3740 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3741 Ok(Some(lsp::CompletionResponse::Array(vec![
3742 lsp::CompletionItem {
3743 label: "component".into(),
3744 ..Default::default()
3745 },
3746 ])))
3747 })
3748 .next()
3749 .await;
3750 let completions = completions
3751 .await
3752 .unwrap()
3753 .into_iter()
3754 .flat_map(|response| response.completions)
3755 .collect::<Vec<_>>();
3756 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3757 assert_eq!(completions.len(), 1);
3758 assert_eq!(completions[0].new_text, "component");
3759 assert_eq!(
3760 completions[0].replace_range.to_offset(&snapshot),
3761 text.len() - 4..text.len() - 1
3762 );
3763}
3764
3765#[gpui::test]
3766async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3767 init_test(cx);
3768
3769 let fs = FakeFs::new(cx.executor());
3770 fs.insert_tree(
3771 path!("/dir"),
3772 json!({
3773 "a.ts": "",
3774 }),
3775 )
3776 .await;
3777
3778 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3779
3780 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3781 language_registry.add(typescript_lang());
3782 let mut fake_language_servers = language_registry.register_fake_lsp(
3783 "TypeScript",
3784 FakeLspAdapter {
3785 capabilities: lsp::ServerCapabilities {
3786 completion_provider: Some(lsp::CompletionOptions {
3787 trigger_characters: Some(vec![":".to_string()]),
3788 ..Default::default()
3789 }),
3790 ..Default::default()
3791 },
3792 ..Default::default()
3793 },
3794 );
3795
3796 let (buffer, _handle) = project
3797 .update(cx, |p, cx| {
3798 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3799 })
3800 .await
3801 .unwrap();
3802
3803 let fake_server = fake_language_servers.next().await.unwrap();
3804 cx.executor().run_until_parked();
3805
3806 let text = "let a = b.fqn";
3807 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3808 let completions = project.update(cx, |project, cx| {
3809 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3810 });
3811
3812 fake_server
3813 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3814 Ok(Some(lsp::CompletionResponse::Array(vec![
3815 lsp::CompletionItem {
3816 label: "fullyQualifiedName?".into(),
3817 insert_text: Some("fully\rQualified\r\nName".into()),
3818 ..Default::default()
3819 },
3820 ])))
3821 })
3822 .next()
3823 .await;
3824 let completions = completions
3825 .await
3826 .unwrap()
3827 .into_iter()
3828 .flat_map(|response| response.completions)
3829 .collect::<Vec<_>>();
3830 assert_eq!(completions.len(), 1);
3831 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3832}
3833
3834#[gpui::test(iterations = 10)]
3835async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3836 init_test(cx);
3837
3838 let fs = FakeFs::new(cx.executor());
3839 fs.insert_tree(
3840 path!("/dir"),
3841 json!({
3842 "a.ts": "a",
3843 }),
3844 )
3845 .await;
3846
3847 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3848
3849 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3850 language_registry.add(typescript_lang());
3851 let mut fake_language_servers = language_registry.register_fake_lsp(
3852 "TypeScript",
3853 FakeLspAdapter {
3854 capabilities: lsp::ServerCapabilities {
3855 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3856 lsp::CodeActionOptions {
3857 resolve_provider: Some(true),
3858 ..lsp::CodeActionOptions::default()
3859 },
3860 )),
3861 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3862 commands: vec!["_the/command".to_string()],
3863 ..lsp::ExecuteCommandOptions::default()
3864 }),
3865 ..lsp::ServerCapabilities::default()
3866 },
3867 ..FakeLspAdapter::default()
3868 },
3869 );
3870
3871 let (buffer, _handle) = project
3872 .update(cx, |p, cx| {
3873 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3874 })
3875 .await
3876 .unwrap();
3877
3878 let fake_server = fake_language_servers.next().await.unwrap();
3879 cx.executor().run_until_parked();
3880
3881 // Language server returns code actions that contain commands, and not edits.
3882 let actions = project.update(cx, |project, cx| {
3883 project.code_actions(&buffer, 0..0, None, cx)
3884 });
3885 fake_server
3886 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3887 Ok(Some(vec![
3888 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3889 title: "The code action".into(),
3890 data: Some(serde_json::json!({
3891 "command": "_the/command",
3892 })),
3893 ..lsp::CodeAction::default()
3894 }),
3895 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3896 title: "two".into(),
3897 ..lsp::CodeAction::default()
3898 }),
3899 ]))
3900 })
3901 .next()
3902 .await;
3903
3904 let action = actions.await.unwrap().unwrap()[0].clone();
3905 let apply = project.update(cx, |project, cx| {
3906 project.apply_code_action(buffer.clone(), action, true, cx)
3907 });
3908
3909 // Resolving the code action does not populate its edits. In absence of
3910 // edits, we must execute the given command.
3911 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3912 |mut action, _| async move {
3913 if action.data.is_some() {
3914 action.command = Some(lsp::Command {
3915 title: "The command".into(),
3916 command: "_the/command".into(),
3917 arguments: Some(vec![json!("the-argument")]),
3918 });
3919 }
3920 Ok(action)
3921 },
3922 );
3923
3924 // While executing the command, the language server sends the editor
3925 // a `workspaceEdit` request.
3926 fake_server
3927 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3928 let fake = fake_server.clone();
3929 move |params, _| {
3930 assert_eq!(params.command, "_the/command");
3931 let fake = fake.clone();
3932 async move {
3933 fake.server
3934 .request::<lsp::request::ApplyWorkspaceEdit>(
3935 lsp::ApplyWorkspaceEditParams {
3936 label: None,
3937 edit: lsp::WorkspaceEdit {
3938 changes: Some(
3939 [(
3940 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3941 vec![lsp::TextEdit {
3942 range: lsp::Range::new(
3943 lsp::Position::new(0, 0),
3944 lsp::Position::new(0, 0),
3945 ),
3946 new_text: "X".into(),
3947 }],
3948 )]
3949 .into_iter()
3950 .collect(),
3951 ),
3952 ..Default::default()
3953 },
3954 },
3955 )
3956 .await
3957 .into_response()
3958 .unwrap();
3959 Ok(Some(json!(null)))
3960 }
3961 }
3962 })
3963 .next()
3964 .await;
3965
3966 // Applying the code action returns a project transaction containing the edits
3967 // sent by the language server in its `workspaceEdit` request.
3968 let transaction = apply.await.unwrap();
3969 assert!(transaction.0.contains_key(&buffer));
3970 buffer.update(cx, |buffer, cx| {
3971 assert_eq!(buffer.text(), "Xa");
3972 buffer.undo(cx);
3973 assert_eq!(buffer.text(), "a");
3974 });
3975}
3976
3977#[gpui::test]
3978async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3979 init_test(cx);
3980 let fs = FakeFs::new(cx.background_executor.clone());
3981 let expected_contents = "content";
3982 fs.as_fake()
3983 .insert_tree(
3984 "/root",
3985 json!({
3986 "test.txt": expected_contents
3987 }),
3988 )
3989 .await;
3990
3991 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3992
3993 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3994 let worktree = project.worktrees(cx).next().unwrap();
3995 let entry_id = worktree
3996 .read(cx)
3997 .entry_for_path(rel_path("test.txt"))
3998 .unwrap()
3999 .id;
4000 (worktree, entry_id)
4001 });
4002 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
4003 let _result = project
4004 .update(cx, |project, cx| {
4005 project.rename_entry(
4006 entry_id,
4007 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4008 cx,
4009 )
4010 })
4011 .await
4012 .unwrap();
4013 worktree.read_with(cx, |worktree, _| {
4014 assert!(
4015 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4016 "Old file should have been removed"
4017 );
4018 assert!(
4019 worktree
4020 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4021 .is_some(),
4022 "Whole directory hierarchy and the new file should have been created"
4023 );
4024 });
4025 assert_eq!(
4026 worktree
4027 .update(cx, |worktree, cx| {
4028 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4029 })
4030 .await
4031 .unwrap()
4032 .text,
4033 expected_contents,
4034 "Moved file's contents should be preserved"
4035 );
4036
4037 let entry_id = worktree.read_with(cx, |worktree, _| {
4038 worktree
4039 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4040 .unwrap()
4041 .id
4042 });
4043
4044 let _result = project
4045 .update(cx, |project, cx| {
4046 project.rename_entry(
4047 entry_id,
4048 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4049 cx,
4050 )
4051 })
4052 .await
4053 .unwrap();
4054 worktree.read_with(cx, |worktree, _| {
4055 assert!(
4056 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4057 "First file should not reappear"
4058 );
4059 assert!(
4060 worktree
4061 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4062 .is_none(),
4063 "Old file should have been removed"
4064 );
4065 assert!(
4066 worktree
4067 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4068 .is_some(),
4069 "No error should have occurred after moving into existing directory"
4070 );
4071 });
4072 assert_eq!(
4073 worktree
4074 .update(cx, |worktree, cx| {
4075 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4076 })
4077 .await
4078 .unwrap()
4079 .text,
4080 expected_contents,
4081 "Moved file's contents should be preserved"
4082 );
4083}
4084
4085#[gpui::test(iterations = 10)]
4086async fn test_save_file(cx: &mut gpui::TestAppContext) {
4087 init_test(cx);
4088
4089 let fs = FakeFs::new(cx.executor());
4090 fs.insert_tree(
4091 path!("/dir"),
4092 json!({
4093 "file1": "the old contents",
4094 }),
4095 )
4096 .await;
4097
4098 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4099 let buffer = project
4100 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4101 .await
4102 .unwrap();
4103 buffer.update(cx, |buffer, cx| {
4104 assert_eq!(buffer.text(), "the old contents");
4105 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4106 });
4107
4108 project
4109 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4110 .await
4111 .unwrap();
4112
4113 let new_text = fs
4114 .load(Path::new(path!("/dir/file1")))
4115 .await
4116 .unwrap()
4117 .replace("\r\n", "\n");
4118 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4119}
4120
4121#[gpui::test(iterations = 10)]
4122async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4123 // Issue: #24349
4124 init_test(cx);
4125
4126 let fs = FakeFs::new(cx.executor());
4127 fs.insert_tree(path!("/dir"), json!({})).await;
4128
4129 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4130 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4131
4132 language_registry.add(rust_lang());
4133 let mut fake_rust_servers = language_registry.register_fake_lsp(
4134 "Rust",
4135 FakeLspAdapter {
4136 name: "the-rust-language-server",
4137 capabilities: lsp::ServerCapabilities {
4138 completion_provider: Some(lsp::CompletionOptions {
4139 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4140 ..Default::default()
4141 }),
4142 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4143 lsp::TextDocumentSyncOptions {
4144 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4145 ..Default::default()
4146 },
4147 )),
4148 ..Default::default()
4149 },
4150 ..Default::default()
4151 },
4152 );
4153
4154 let buffer = project
4155 .update(cx, |this, cx| this.create_buffer(false, cx))
4156 .unwrap()
4157 .await;
4158 project.update(cx, |this, cx| {
4159 this.register_buffer_with_language_servers(&buffer, cx);
4160 buffer.update(cx, |buffer, cx| {
4161 assert!(!this.has_language_servers_for(buffer, cx));
4162 })
4163 });
4164
4165 project
4166 .update(cx, |this, cx| {
4167 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4168 this.save_buffer_as(
4169 buffer.clone(),
4170 ProjectPath {
4171 worktree_id,
4172 path: rel_path("file.rs").into(),
4173 },
4174 cx,
4175 )
4176 })
4177 .await
4178 .unwrap();
4179 // A server is started up, and it is notified about Rust files.
4180 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4181 assert_eq!(
4182 fake_rust_server
4183 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4184 .await
4185 .text_document,
4186 lsp::TextDocumentItem {
4187 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4188 version: 0,
4189 text: "".to_string(),
4190 language_id: "rust".to_string(),
4191 }
4192 );
4193
4194 project.update(cx, |this, cx| {
4195 buffer.update(cx, |buffer, cx| {
4196 assert!(this.has_language_servers_for(buffer, cx));
4197 })
4198 });
4199}
4200
4201#[gpui::test(iterations = 30)]
4202async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4203 init_test(cx);
4204
4205 let fs = FakeFs::new(cx.executor());
4206 fs.insert_tree(
4207 path!("/dir"),
4208 json!({
4209 "file1": "the original contents",
4210 }),
4211 )
4212 .await;
4213
4214 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4215 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4216 let buffer = project
4217 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4218 .await
4219 .unwrap();
4220
4221 // Change the buffer's file on disk, and then wait for the file change
4222 // to be detected by the worktree, so that the buffer starts reloading.
4223 fs.save(
4224 path!("/dir/file1").as_ref(),
4225 &"the first contents".into(),
4226 Default::default(),
4227 )
4228 .await
4229 .unwrap();
4230 worktree.next_event(cx).await;
4231
4232 // Change the buffer's file again. Depending on the random seed, the
4233 // previous file change may still be in progress.
4234 fs.save(
4235 path!("/dir/file1").as_ref(),
4236 &"the second contents".into(),
4237 Default::default(),
4238 )
4239 .await
4240 .unwrap();
4241 worktree.next_event(cx).await;
4242
4243 cx.executor().run_until_parked();
4244 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4245 buffer.read_with(cx, |buffer, _| {
4246 assert_eq!(buffer.text(), on_disk_text);
4247 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4248 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4249 });
4250}
4251
4252#[gpui::test(iterations = 30)]
4253async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4254 init_test(cx);
4255
4256 let fs = FakeFs::new(cx.executor());
4257 fs.insert_tree(
4258 path!("/dir"),
4259 json!({
4260 "file1": "the original contents",
4261 }),
4262 )
4263 .await;
4264
4265 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4266 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4267 let buffer = project
4268 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4269 .await
4270 .unwrap();
4271
4272 // Change the buffer's file on disk, and then wait for the file change
4273 // to be detected by the worktree, so that the buffer starts reloading.
4274 fs.save(
4275 path!("/dir/file1").as_ref(),
4276 &"the first contents".into(),
4277 Default::default(),
4278 )
4279 .await
4280 .unwrap();
4281 worktree.next_event(cx).await;
4282
4283 cx.executor()
4284 .spawn(cx.executor().simulate_random_delay())
4285 .await;
4286
4287 // Perform a noop edit, causing the buffer's version to increase.
4288 buffer.update(cx, |buffer, cx| {
4289 buffer.edit([(0..0, " ")], None, cx);
4290 buffer.undo(cx);
4291 });
4292
4293 cx.executor().run_until_parked();
4294 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4295 buffer.read_with(cx, |buffer, _| {
4296 let buffer_text = buffer.text();
4297 if buffer_text == on_disk_text {
4298 assert!(
4299 !buffer.is_dirty() && !buffer.has_conflict(),
4300 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4301 );
4302 }
4303 // If the file change occurred while the buffer was processing the first
4304 // change, the buffer will be in a conflicting state.
4305 else {
4306 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4307 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4308 }
4309 });
4310}
4311
4312#[gpui::test]
4313async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4314 init_test(cx);
4315
4316 let fs = FakeFs::new(cx.executor());
4317 fs.insert_tree(
4318 path!("/dir"),
4319 json!({
4320 "file1": "the old contents",
4321 }),
4322 )
4323 .await;
4324
4325 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4326 let buffer = project
4327 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4328 .await
4329 .unwrap();
4330 buffer.update(cx, |buffer, cx| {
4331 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4332 });
4333
4334 project
4335 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4336 .await
4337 .unwrap();
4338
4339 let new_text = fs
4340 .load(Path::new(path!("/dir/file1")))
4341 .await
4342 .unwrap()
4343 .replace("\r\n", "\n");
4344 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4345}
4346
4347#[gpui::test]
4348async fn test_save_as(cx: &mut gpui::TestAppContext) {
4349 init_test(cx);
4350
4351 let fs = FakeFs::new(cx.executor());
4352 fs.insert_tree("/dir", json!({})).await;
4353
4354 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4355
4356 let languages = project.update(cx, |project, _| project.languages().clone());
4357 languages.add(rust_lang());
4358
4359 let buffer = project.update(cx, |project, cx| {
4360 project.create_local_buffer("", None, false, cx)
4361 });
4362 buffer.update(cx, |buffer, cx| {
4363 buffer.edit([(0..0, "abc")], None, cx);
4364 assert!(buffer.is_dirty());
4365 assert!(!buffer.has_conflict());
4366 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4367 });
4368 project
4369 .update(cx, |project, cx| {
4370 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4371 let path = ProjectPath {
4372 worktree_id,
4373 path: rel_path("file1.rs").into(),
4374 };
4375 project.save_buffer_as(buffer.clone(), path, cx)
4376 })
4377 .await
4378 .unwrap();
4379 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4380
4381 cx.executor().run_until_parked();
4382 buffer.update(cx, |buffer, cx| {
4383 assert_eq!(
4384 buffer.file().unwrap().full_path(cx),
4385 Path::new("dir/file1.rs")
4386 );
4387 assert!(!buffer.is_dirty());
4388 assert!(!buffer.has_conflict());
4389 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4390 });
4391
4392 let opened_buffer = project
4393 .update(cx, |project, cx| {
4394 project.open_local_buffer("/dir/file1.rs", cx)
4395 })
4396 .await
4397 .unwrap();
4398 assert_eq!(opened_buffer, buffer);
4399}
4400
4401#[gpui::test]
4402async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4403 init_test(cx);
4404
4405 let fs = FakeFs::new(cx.executor());
4406 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4407
4408 fs.insert_tree(
4409 path!("/dir"),
4410 json!({
4411 "data_a.txt": "data about a"
4412 }),
4413 )
4414 .await;
4415
4416 let buffer = project
4417 .update(cx, |project, cx| {
4418 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4419 })
4420 .await
4421 .unwrap();
4422
4423 buffer.update(cx, |buffer, cx| {
4424 buffer.edit([(11..12, "b")], None, cx);
4425 });
4426
4427 // Save buffer's contents as a new file and confirm that the buffer's now
4428 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4429 // file associated with the buffer has now been updated to `data_b.txt`
4430 project
4431 .update(cx, |project, cx| {
4432 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4433 let new_path = ProjectPath {
4434 worktree_id,
4435 path: rel_path("data_b.txt").into(),
4436 };
4437
4438 project.save_buffer_as(buffer.clone(), new_path, cx)
4439 })
4440 .await
4441 .unwrap();
4442
4443 buffer.update(cx, |buffer, cx| {
4444 assert_eq!(
4445 buffer.file().unwrap().full_path(cx),
4446 Path::new("dir/data_b.txt")
4447 )
4448 });
4449
4450 // Open the original `data_a.txt` file, confirming that its contents are
4451 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4452 let original_buffer = project
4453 .update(cx, |project, cx| {
4454 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4455 })
4456 .await
4457 .unwrap();
4458
4459 original_buffer.update(cx, |buffer, cx| {
4460 assert_eq!(buffer.text(), "data about a");
4461 assert_eq!(
4462 buffer.file().unwrap().full_path(cx),
4463 Path::new("dir/data_a.txt")
4464 )
4465 });
4466}
4467
4468#[gpui::test(retries = 5)]
4469async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4470 use worktree::WorktreeModelHandle as _;
4471
4472 init_test(cx);
4473 cx.executor().allow_parking();
4474
4475 let dir = TempTree::new(json!({
4476 "a": {
4477 "file1": "",
4478 "file2": "",
4479 "file3": "",
4480 },
4481 "b": {
4482 "c": {
4483 "file4": "",
4484 "file5": "",
4485 }
4486 }
4487 }));
4488
4489 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4490
4491 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4492 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4493 async move { buffer.await.unwrap() }
4494 };
4495 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4496 project.update(cx, |project, cx| {
4497 let tree = project.worktrees(cx).next().unwrap();
4498 tree.read(cx)
4499 .entry_for_path(rel_path(path))
4500 .unwrap_or_else(|| panic!("no entry for path {}", path))
4501 .id
4502 })
4503 };
4504
4505 let buffer2 = buffer_for_path("a/file2", cx).await;
4506 let buffer3 = buffer_for_path("a/file3", cx).await;
4507 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4508 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4509
4510 let file2_id = id_for_path("a/file2", cx);
4511 let file3_id = id_for_path("a/file3", cx);
4512 let file4_id = id_for_path("b/c/file4", cx);
4513
4514 // Create a remote copy of this worktree.
4515 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4516 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4517
4518 let updates = Arc::new(Mutex::new(Vec::new()));
4519 tree.update(cx, |tree, cx| {
4520 let updates = updates.clone();
4521 tree.observe_updates(0, cx, move |update| {
4522 updates.lock().push(update);
4523 async { true }
4524 });
4525 });
4526
4527 let remote = cx.update(|cx| {
4528 Worktree::remote(
4529 0,
4530 ReplicaId::REMOTE_SERVER,
4531 metadata,
4532 project.read(cx).client().into(),
4533 project.read(cx).path_style(cx),
4534 cx,
4535 )
4536 });
4537
4538 cx.executor().run_until_parked();
4539
4540 cx.update(|cx| {
4541 assert!(!buffer2.read(cx).is_dirty());
4542 assert!(!buffer3.read(cx).is_dirty());
4543 assert!(!buffer4.read(cx).is_dirty());
4544 assert!(!buffer5.read(cx).is_dirty());
4545 });
4546
4547 // Rename and delete files and directories.
4548 tree.flush_fs_events(cx).await;
4549 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4550 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4551 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4552 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4553 tree.flush_fs_events(cx).await;
4554
4555 cx.update(|app| {
4556 assert_eq!(
4557 tree.read(app).paths().collect::<Vec<_>>(),
4558 vec![
4559 rel_path("a"),
4560 rel_path("a/file1"),
4561 rel_path("a/file2.new"),
4562 rel_path("b"),
4563 rel_path("d"),
4564 rel_path("d/file3"),
4565 rel_path("d/file4"),
4566 ]
4567 );
4568 });
4569
4570 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4571 assert_eq!(id_for_path("d/file3", cx), file3_id);
4572 assert_eq!(id_for_path("d/file4", cx), file4_id);
4573
4574 cx.update(|cx| {
4575 assert_eq!(
4576 buffer2.read(cx).file().unwrap().path().as_ref(),
4577 rel_path("a/file2.new")
4578 );
4579 assert_eq!(
4580 buffer3.read(cx).file().unwrap().path().as_ref(),
4581 rel_path("d/file3")
4582 );
4583 assert_eq!(
4584 buffer4.read(cx).file().unwrap().path().as_ref(),
4585 rel_path("d/file4")
4586 );
4587 assert_eq!(
4588 buffer5.read(cx).file().unwrap().path().as_ref(),
4589 rel_path("b/c/file5")
4590 );
4591
4592 assert_matches!(
4593 buffer2.read(cx).file().unwrap().disk_state(),
4594 DiskState::Present { .. }
4595 );
4596 assert_matches!(
4597 buffer3.read(cx).file().unwrap().disk_state(),
4598 DiskState::Present { .. }
4599 );
4600 assert_matches!(
4601 buffer4.read(cx).file().unwrap().disk_state(),
4602 DiskState::Present { .. }
4603 );
4604 assert_eq!(
4605 buffer5.read(cx).file().unwrap().disk_state(),
4606 DiskState::Deleted
4607 );
4608 });
4609
4610 // Update the remote worktree. Check that it becomes consistent with the
4611 // local worktree.
4612 cx.executor().run_until_parked();
4613
4614 remote.update(cx, |remote, _| {
4615 for update in updates.lock().drain(..) {
4616 remote.as_remote_mut().unwrap().update_from_remote(update);
4617 }
4618 });
4619 cx.executor().run_until_parked();
4620 remote.update(cx, |remote, _| {
4621 assert_eq!(
4622 remote.paths().collect::<Vec<_>>(),
4623 vec![
4624 rel_path("a"),
4625 rel_path("a/file1"),
4626 rel_path("a/file2.new"),
4627 rel_path("b"),
4628 rel_path("d"),
4629 rel_path("d/file3"),
4630 rel_path("d/file4"),
4631 ]
4632 );
4633 });
4634}
4635
4636#[gpui::test(iterations = 10)]
4637async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4638 init_test(cx);
4639
4640 let fs = FakeFs::new(cx.executor());
4641 fs.insert_tree(
4642 path!("/dir"),
4643 json!({
4644 "a": {
4645 "file1": "",
4646 }
4647 }),
4648 )
4649 .await;
4650
4651 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4652 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4653 let tree_id = tree.update(cx, |tree, _| tree.id());
4654
4655 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4656 project.update(cx, |project, cx| {
4657 let tree = project.worktrees(cx).next().unwrap();
4658 tree.read(cx)
4659 .entry_for_path(rel_path(path))
4660 .unwrap_or_else(|| panic!("no entry for path {}", path))
4661 .id
4662 })
4663 };
4664
4665 let dir_id = id_for_path("a", cx);
4666 let file_id = id_for_path("a/file1", cx);
4667 let buffer = project
4668 .update(cx, |p, cx| {
4669 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4670 })
4671 .await
4672 .unwrap();
4673 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4674
4675 project
4676 .update(cx, |project, cx| {
4677 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4678 })
4679 .unwrap()
4680 .await
4681 .into_included()
4682 .unwrap();
4683 cx.executor().run_until_parked();
4684
4685 assert_eq!(id_for_path("b", cx), dir_id);
4686 assert_eq!(id_for_path("b/file1", cx), file_id);
4687 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4688}
4689
4690#[gpui::test]
4691async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4692 init_test(cx);
4693
4694 let fs = FakeFs::new(cx.executor());
4695 fs.insert_tree(
4696 "/dir",
4697 json!({
4698 "a.txt": "a-contents",
4699 "b.txt": "b-contents",
4700 }),
4701 )
4702 .await;
4703
4704 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4705
4706 // Spawn multiple tasks to open paths, repeating some paths.
4707 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4708 (
4709 p.open_local_buffer("/dir/a.txt", cx),
4710 p.open_local_buffer("/dir/b.txt", cx),
4711 p.open_local_buffer("/dir/a.txt", cx),
4712 )
4713 });
4714
4715 let buffer_a_1 = buffer_a_1.await.unwrap();
4716 let buffer_a_2 = buffer_a_2.await.unwrap();
4717 let buffer_b = buffer_b.await.unwrap();
4718 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4719 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4720
4721 // There is only one buffer per path.
4722 let buffer_a_id = buffer_a_1.entity_id();
4723 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4724
4725 // Open the same path again while it is still open.
4726 drop(buffer_a_1);
4727 let buffer_a_3 = project
4728 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4729 .await
4730 .unwrap();
4731
4732 // There's still only one buffer per path.
4733 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4734}
4735
4736#[gpui::test]
4737async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4738 init_test(cx);
4739
4740 let fs = FakeFs::new(cx.executor());
4741 fs.insert_tree(
4742 path!("/dir"),
4743 json!({
4744 "file1": "abc",
4745 "file2": "def",
4746 "file3": "ghi",
4747 }),
4748 )
4749 .await;
4750
4751 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4752
4753 let buffer1 = project
4754 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4755 .await
4756 .unwrap();
4757 let events = Arc::new(Mutex::new(Vec::new()));
4758
4759 // initially, the buffer isn't dirty.
4760 buffer1.update(cx, |buffer, cx| {
4761 cx.subscribe(&buffer1, {
4762 let events = events.clone();
4763 move |_, _, event, _| match event {
4764 BufferEvent::Operation { .. } => {}
4765 _ => events.lock().push(event.clone()),
4766 }
4767 })
4768 .detach();
4769
4770 assert!(!buffer.is_dirty());
4771 assert!(events.lock().is_empty());
4772
4773 buffer.edit([(1..2, "")], None, cx);
4774 });
4775
4776 // after the first edit, the buffer is dirty, and emits a dirtied event.
4777 buffer1.update(cx, |buffer, cx| {
4778 assert!(buffer.text() == "ac");
4779 assert!(buffer.is_dirty());
4780 assert_eq!(
4781 *events.lock(),
4782 &[
4783 language::BufferEvent::Edited,
4784 language::BufferEvent::DirtyChanged
4785 ]
4786 );
4787 events.lock().clear();
4788 buffer.did_save(
4789 buffer.version(),
4790 buffer.file().unwrap().disk_state().mtime(),
4791 cx,
4792 );
4793 });
4794
4795 // after saving, the buffer is not dirty, and emits a saved event.
4796 buffer1.update(cx, |buffer, cx| {
4797 assert!(!buffer.is_dirty());
4798 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4799 events.lock().clear();
4800
4801 buffer.edit([(1..1, "B")], None, cx);
4802 buffer.edit([(2..2, "D")], None, cx);
4803 });
4804
4805 // after editing again, the buffer is dirty, and emits another dirty event.
4806 buffer1.update(cx, |buffer, cx| {
4807 assert!(buffer.text() == "aBDc");
4808 assert!(buffer.is_dirty());
4809 assert_eq!(
4810 *events.lock(),
4811 &[
4812 language::BufferEvent::Edited,
4813 language::BufferEvent::DirtyChanged,
4814 language::BufferEvent::Edited,
4815 ],
4816 );
4817 events.lock().clear();
4818
4819 // After restoring the buffer to its previously-saved state,
4820 // the buffer is not considered dirty anymore.
4821 buffer.edit([(1..3, "")], None, cx);
4822 assert!(buffer.text() == "ac");
4823 assert!(!buffer.is_dirty());
4824 });
4825
4826 assert_eq!(
4827 *events.lock(),
4828 &[
4829 language::BufferEvent::Edited,
4830 language::BufferEvent::DirtyChanged
4831 ]
4832 );
4833
4834 // When a file is deleted, it is not considered dirty.
4835 let events = Arc::new(Mutex::new(Vec::new()));
4836 let buffer2 = project
4837 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4838 .await
4839 .unwrap();
4840 buffer2.update(cx, |_, cx| {
4841 cx.subscribe(&buffer2, {
4842 let events = events.clone();
4843 move |_, _, event, _| match event {
4844 BufferEvent::Operation { .. } => {}
4845 _ => events.lock().push(event.clone()),
4846 }
4847 })
4848 .detach();
4849 });
4850
4851 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4852 .await
4853 .unwrap();
4854 cx.executor().run_until_parked();
4855 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4856 assert_eq!(
4857 mem::take(&mut *events.lock()),
4858 &[language::BufferEvent::FileHandleChanged]
4859 );
4860
4861 // Buffer becomes dirty when edited.
4862 buffer2.update(cx, |buffer, cx| {
4863 buffer.edit([(2..3, "")], None, cx);
4864 assert_eq!(buffer.is_dirty(), true);
4865 });
4866 assert_eq!(
4867 mem::take(&mut *events.lock()),
4868 &[
4869 language::BufferEvent::Edited,
4870 language::BufferEvent::DirtyChanged
4871 ]
4872 );
4873
4874 // Buffer becomes clean again when all of its content is removed, because
4875 // the file was deleted.
4876 buffer2.update(cx, |buffer, cx| {
4877 buffer.edit([(0..2, "")], None, cx);
4878 assert_eq!(buffer.is_empty(), true);
4879 assert_eq!(buffer.is_dirty(), false);
4880 });
4881 assert_eq!(
4882 *events.lock(),
4883 &[
4884 language::BufferEvent::Edited,
4885 language::BufferEvent::DirtyChanged
4886 ]
4887 );
4888
4889 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4890 let events = Arc::new(Mutex::new(Vec::new()));
4891 let buffer3 = project
4892 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4893 .await
4894 .unwrap();
4895 buffer3.update(cx, |_, cx| {
4896 cx.subscribe(&buffer3, {
4897 let events = events.clone();
4898 move |_, _, event, _| match event {
4899 BufferEvent::Operation { .. } => {}
4900 _ => events.lock().push(event.clone()),
4901 }
4902 })
4903 .detach();
4904 });
4905
4906 buffer3.update(cx, |buffer, cx| {
4907 buffer.edit([(0..0, "x")], None, cx);
4908 });
4909 events.lock().clear();
4910 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4911 .await
4912 .unwrap();
4913 cx.executor().run_until_parked();
4914 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4915 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4916}
4917
4918#[gpui::test]
4919async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4920 init_test(cx);
4921
4922 let (initial_contents, initial_offsets) =
4923 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4924 let fs = FakeFs::new(cx.executor());
4925 fs.insert_tree(
4926 path!("/dir"),
4927 json!({
4928 "the-file": initial_contents,
4929 }),
4930 )
4931 .await;
4932 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4933 let buffer = project
4934 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4935 .await
4936 .unwrap();
4937
4938 let anchors = initial_offsets
4939 .iter()
4940 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4941 .collect::<Vec<_>>();
4942
4943 // Change the file on disk, adding two new lines of text, and removing
4944 // one line.
4945 buffer.update(cx, |buffer, _| {
4946 assert!(!buffer.is_dirty());
4947 assert!(!buffer.has_conflict());
4948 });
4949
4950 let (new_contents, new_offsets) =
4951 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4952 fs.save(
4953 path!("/dir/the-file").as_ref(),
4954 &new_contents.as_str().into(),
4955 LineEnding::Unix,
4956 )
4957 .await
4958 .unwrap();
4959
4960 // Because the buffer was not modified, it is reloaded from disk. Its
4961 // contents are edited according to the diff between the old and new
4962 // file contents.
4963 cx.executor().run_until_parked();
4964 buffer.update(cx, |buffer, _| {
4965 assert_eq!(buffer.text(), new_contents);
4966 assert!(!buffer.is_dirty());
4967 assert!(!buffer.has_conflict());
4968
4969 let anchor_offsets = anchors
4970 .iter()
4971 .map(|anchor| anchor.to_offset(&*buffer))
4972 .collect::<Vec<_>>();
4973 assert_eq!(anchor_offsets, new_offsets);
4974 });
4975
4976 // Modify the buffer
4977 buffer.update(cx, |buffer, cx| {
4978 buffer.edit([(0..0, " ")], None, cx);
4979 assert!(buffer.is_dirty());
4980 assert!(!buffer.has_conflict());
4981 });
4982
4983 // Change the file on disk again, adding blank lines to the beginning.
4984 fs.save(
4985 path!("/dir/the-file").as_ref(),
4986 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4987 LineEnding::Unix,
4988 )
4989 .await
4990 .unwrap();
4991
4992 // Because the buffer is modified, it doesn't reload from disk, but is
4993 // marked as having a conflict.
4994 cx.executor().run_until_parked();
4995 buffer.update(cx, |buffer, _| {
4996 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4997 assert!(buffer.has_conflict());
4998 });
4999}
5000
5001#[gpui::test]
5002async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
5003 init_test(cx);
5004
5005 let fs = FakeFs::new(cx.executor());
5006 fs.insert_tree(
5007 path!("/dir"),
5008 json!({
5009 "file1": "a\nb\nc\n",
5010 "file2": "one\r\ntwo\r\nthree\r\n",
5011 }),
5012 )
5013 .await;
5014
5015 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5016 let buffer1 = project
5017 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5018 .await
5019 .unwrap();
5020 let buffer2 = project
5021 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5022 .await
5023 .unwrap();
5024
5025 buffer1.update(cx, |buffer, _| {
5026 assert_eq!(buffer.text(), "a\nb\nc\n");
5027 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5028 });
5029 buffer2.update(cx, |buffer, _| {
5030 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5031 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5032 });
5033
5034 // Change a file's line endings on disk from unix to windows. The buffer's
5035 // state updates correctly.
5036 fs.save(
5037 path!("/dir/file1").as_ref(),
5038 &"aaa\nb\nc\n".into(),
5039 LineEnding::Windows,
5040 )
5041 .await
5042 .unwrap();
5043 cx.executor().run_until_parked();
5044 buffer1.update(cx, |buffer, _| {
5045 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5046 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5047 });
5048
5049 // Save a file with windows line endings. The file is written correctly.
5050 buffer2.update(cx, |buffer, cx| {
5051 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5052 });
5053 project
5054 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5055 .await
5056 .unwrap();
5057 assert_eq!(
5058 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5059 "one\r\ntwo\r\nthree\r\nfour\r\n",
5060 );
5061}
5062
5063#[gpui::test]
5064async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5065 init_test(cx);
5066
5067 let fs = FakeFs::new(cx.executor());
5068 fs.insert_tree(
5069 path!("/dir"),
5070 json!({
5071 "a.rs": "
5072 fn foo(mut v: Vec<usize>) {
5073 for x in &v {
5074 v.push(1);
5075 }
5076 }
5077 "
5078 .unindent(),
5079 }),
5080 )
5081 .await;
5082
5083 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5084 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5085 let buffer = project
5086 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5087 .await
5088 .unwrap();
5089
5090 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5091 let message = lsp::PublishDiagnosticsParams {
5092 uri: buffer_uri.clone(),
5093 diagnostics: vec![
5094 lsp::Diagnostic {
5095 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5096 severity: Some(DiagnosticSeverity::WARNING),
5097 message: "error 1".to_string(),
5098 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5099 location: lsp::Location {
5100 uri: buffer_uri.clone(),
5101 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5102 },
5103 message: "error 1 hint 1".to_string(),
5104 }]),
5105 ..Default::default()
5106 },
5107 lsp::Diagnostic {
5108 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5109 severity: Some(DiagnosticSeverity::HINT),
5110 message: "error 1 hint 1".to_string(),
5111 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5112 location: lsp::Location {
5113 uri: buffer_uri.clone(),
5114 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5115 },
5116 message: "original diagnostic".to_string(),
5117 }]),
5118 ..Default::default()
5119 },
5120 lsp::Diagnostic {
5121 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5122 severity: Some(DiagnosticSeverity::ERROR),
5123 message: "error 2".to_string(),
5124 related_information: Some(vec![
5125 lsp::DiagnosticRelatedInformation {
5126 location: lsp::Location {
5127 uri: buffer_uri.clone(),
5128 range: lsp::Range::new(
5129 lsp::Position::new(1, 13),
5130 lsp::Position::new(1, 15),
5131 ),
5132 },
5133 message: "error 2 hint 1".to_string(),
5134 },
5135 lsp::DiagnosticRelatedInformation {
5136 location: lsp::Location {
5137 uri: buffer_uri.clone(),
5138 range: lsp::Range::new(
5139 lsp::Position::new(1, 13),
5140 lsp::Position::new(1, 15),
5141 ),
5142 },
5143 message: "error 2 hint 2".to_string(),
5144 },
5145 ]),
5146 ..Default::default()
5147 },
5148 lsp::Diagnostic {
5149 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5150 severity: Some(DiagnosticSeverity::HINT),
5151 message: "error 2 hint 1".to_string(),
5152 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5153 location: lsp::Location {
5154 uri: buffer_uri.clone(),
5155 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5156 },
5157 message: "original diagnostic".to_string(),
5158 }]),
5159 ..Default::default()
5160 },
5161 lsp::Diagnostic {
5162 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5163 severity: Some(DiagnosticSeverity::HINT),
5164 message: "error 2 hint 2".to_string(),
5165 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5166 location: lsp::Location {
5167 uri: buffer_uri,
5168 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5169 },
5170 message: "original diagnostic".to_string(),
5171 }]),
5172 ..Default::default()
5173 },
5174 ],
5175 version: None,
5176 };
5177
5178 lsp_store
5179 .update(cx, |lsp_store, cx| {
5180 lsp_store.update_diagnostics(
5181 LanguageServerId(0),
5182 message,
5183 None,
5184 DiagnosticSourceKind::Pushed,
5185 &[],
5186 cx,
5187 )
5188 })
5189 .unwrap();
5190 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5191
5192 assert_eq!(
5193 buffer
5194 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5195 .collect::<Vec<_>>(),
5196 &[
5197 DiagnosticEntry {
5198 range: Point::new(1, 8)..Point::new(1, 9),
5199 diagnostic: Diagnostic {
5200 severity: DiagnosticSeverity::WARNING,
5201 message: "error 1".to_string(),
5202 group_id: 1,
5203 is_primary: true,
5204 source_kind: DiagnosticSourceKind::Pushed,
5205 ..Diagnostic::default()
5206 }
5207 },
5208 DiagnosticEntry {
5209 range: Point::new(1, 8)..Point::new(1, 9),
5210 diagnostic: Diagnostic {
5211 severity: DiagnosticSeverity::HINT,
5212 message: "error 1 hint 1".to_string(),
5213 group_id: 1,
5214 is_primary: false,
5215 source_kind: DiagnosticSourceKind::Pushed,
5216 ..Diagnostic::default()
5217 }
5218 },
5219 DiagnosticEntry {
5220 range: Point::new(1, 13)..Point::new(1, 15),
5221 diagnostic: Diagnostic {
5222 severity: DiagnosticSeverity::HINT,
5223 message: "error 2 hint 1".to_string(),
5224 group_id: 0,
5225 is_primary: false,
5226 source_kind: DiagnosticSourceKind::Pushed,
5227 ..Diagnostic::default()
5228 }
5229 },
5230 DiagnosticEntry {
5231 range: Point::new(1, 13)..Point::new(1, 15),
5232 diagnostic: Diagnostic {
5233 severity: DiagnosticSeverity::HINT,
5234 message: "error 2 hint 2".to_string(),
5235 group_id: 0,
5236 is_primary: false,
5237 source_kind: DiagnosticSourceKind::Pushed,
5238 ..Diagnostic::default()
5239 }
5240 },
5241 DiagnosticEntry {
5242 range: Point::new(2, 8)..Point::new(2, 17),
5243 diagnostic: Diagnostic {
5244 severity: DiagnosticSeverity::ERROR,
5245 message: "error 2".to_string(),
5246 group_id: 0,
5247 is_primary: true,
5248 source_kind: DiagnosticSourceKind::Pushed,
5249 ..Diagnostic::default()
5250 }
5251 }
5252 ]
5253 );
5254
5255 assert_eq!(
5256 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5257 &[
5258 DiagnosticEntry {
5259 range: Point::new(1, 13)..Point::new(1, 15),
5260 diagnostic: Diagnostic {
5261 severity: DiagnosticSeverity::HINT,
5262 message: "error 2 hint 1".to_string(),
5263 group_id: 0,
5264 is_primary: false,
5265 source_kind: DiagnosticSourceKind::Pushed,
5266 ..Diagnostic::default()
5267 }
5268 },
5269 DiagnosticEntry {
5270 range: Point::new(1, 13)..Point::new(1, 15),
5271 diagnostic: Diagnostic {
5272 severity: DiagnosticSeverity::HINT,
5273 message: "error 2 hint 2".to_string(),
5274 group_id: 0,
5275 is_primary: false,
5276 source_kind: DiagnosticSourceKind::Pushed,
5277 ..Diagnostic::default()
5278 }
5279 },
5280 DiagnosticEntry {
5281 range: Point::new(2, 8)..Point::new(2, 17),
5282 diagnostic: Diagnostic {
5283 severity: DiagnosticSeverity::ERROR,
5284 message: "error 2".to_string(),
5285 group_id: 0,
5286 is_primary: true,
5287 source_kind: DiagnosticSourceKind::Pushed,
5288 ..Diagnostic::default()
5289 }
5290 }
5291 ]
5292 );
5293
5294 assert_eq!(
5295 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5296 &[
5297 DiagnosticEntry {
5298 range: Point::new(1, 8)..Point::new(1, 9),
5299 diagnostic: Diagnostic {
5300 severity: DiagnosticSeverity::WARNING,
5301 message: "error 1".to_string(),
5302 group_id: 1,
5303 is_primary: true,
5304 source_kind: DiagnosticSourceKind::Pushed,
5305 ..Diagnostic::default()
5306 }
5307 },
5308 DiagnosticEntry {
5309 range: Point::new(1, 8)..Point::new(1, 9),
5310 diagnostic: Diagnostic {
5311 severity: DiagnosticSeverity::HINT,
5312 message: "error 1 hint 1".to_string(),
5313 group_id: 1,
5314 is_primary: false,
5315 source_kind: DiagnosticSourceKind::Pushed,
5316 ..Diagnostic::default()
5317 }
5318 },
5319 ]
5320 );
5321}
5322
5323#[gpui::test]
5324async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5325 init_test(cx);
5326
5327 let fs = FakeFs::new(cx.executor());
5328 fs.insert_tree(
5329 path!("/dir"),
5330 json!({
5331 "one.rs": "const ONE: usize = 1;",
5332 "two": {
5333 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5334 }
5335
5336 }),
5337 )
5338 .await;
5339 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5340
5341 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5342 language_registry.add(rust_lang());
5343 let watched_paths = lsp::FileOperationRegistrationOptions {
5344 filters: vec![
5345 FileOperationFilter {
5346 scheme: Some("file".to_owned()),
5347 pattern: lsp::FileOperationPattern {
5348 glob: "**/*.rs".to_owned(),
5349 matches: Some(lsp::FileOperationPatternKind::File),
5350 options: None,
5351 },
5352 },
5353 FileOperationFilter {
5354 scheme: Some("file".to_owned()),
5355 pattern: lsp::FileOperationPattern {
5356 glob: "**/**".to_owned(),
5357 matches: Some(lsp::FileOperationPatternKind::Folder),
5358 options: None,
5359 },
5360 },
5361 ],
5362 };
5363 let mut fake_servers = language_registry.register_fake_lsp(
5364 "Rust",
5365 FakeLspAdapter {
5366 capabilities: lsp::ServerCapabilities {
5367 workspace: Some(lsp::WorkspaceServerCapabilities {
5368 workspace_folders: None,
5369 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5370 did_rename: Some(watched_paths.clone()),
5371 will_rename: Some(watched_paths),
5372 ..Default::default()
5373 }),
5374 }),
5375 ..Default::default()
5376 },
5377 ..Default::default()
5378 },
5379 );
5380
5381 let _ = project
5382 .update(cx, |project, cx| {
5383 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5384 })
5385 .await
5386 .unwrap();
5387
5388 let fake_server = fake_servers.next().await.unwrap();
5389 cx.executor().run_until_parked();
5390 let response = project.update(cx, |project, cx| {
5391 let worktree = project.worktrees(cx).next().unwrap();
5392 let entry = worktree
5393 .read(cx)
5394 .entry_for_path(rel_path("one.rs"))
5395 .unwrap();
5396 project.rename_entry(
5397 entry.id,
5398 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5399 cx,
5400 )
5401 });
5402 let expected_edit = lsp::WorkspaceEdit {
5403 changes: None,
5404 document_changes: Some(DocumentChanges::Edits({
5405 vec![TextDocumentEdit {
5406 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5407 range: lsp::Range {
5408 start: lsp::Position {
5409 line: 0,
5410 character: 1,
5411 },
5412 end: lsp::Position {
5413 line: 0,
5414 character: 3,
5415 },
5416 },
5417 new_text: "This is not a drill".to_owned(),
5418 })],
5419 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5420 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5421 version: Some(1337),
5422 },
5423 }]
5424 })),
5425 change_annotations: None,
5426 };
5427 let resolved_workspace_edit = Arc::new(OnceLock::new());
5428 fake_server
5429 .set_request_handler::<WillRenameFiles, _, _>({
5430 let resolved_workspace_edit = resolved_workspace_edit.clone();
5431 let expected_edit = expected_edit.clone();
5432 move |params, _| {
5433 let resolved_workspace_edit = resolved_workspace_edit.clone();
5434 let expected_edit = expected_edit.clone();
5435 async move {
5436 assert_eq!(params.files.len(), 1);
5437 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5438 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5439 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5440 Ok(Some(expected_edit))
5441 }
5442 }
5443 })
5444 .next()
5445 .await
5446 .unwrap();
5447 let _ = response.await.unwrap();
5448 fake_server
5449 .handle_notification::<DidRenameFiles, _>(|params, _| {
5450 assert_eq!(params.files.len(), 1);
5451 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5452 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5453 })
5454 .next()
5455 .await
5456 .unwrap();
5457 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5458}
5459
5460#[gpui::test]
5461async fn test_rename(cx: &mut gpui::TestAppContext) {
5462 // hi
5463 init_test(cx);
5464
5465 let fs = FakeFs::new(cx.executor());
5466 fs.insert_tree(
5467 path!("/dir"),
5468 json!({
5469 "one.rs": "const ONE: usize = 1;",
5470 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5471 }),
5472 )
5473 .await;
5474
5475 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5476
5477 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5478 language_registry.add(rust_lang());
5479 let mut fake_servers = language_registry.register_fake_lsp(
5480 "Rust",
5481 FakeLspAdapter {
5482 capabilities: lsp::ServerCapabilities {
5483 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5484 prepare_provider: Some(true),
5485 work_done_progress_options: Default::default(),
5486 })),
5487 ..Default::default()
5488 },
5489 ..Default::default()
5490 },
5491 );
5492
5493 let (buffer, _handle) = project
5494 .update(cx, |project, cx| {
5495 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5496 })
5497 .await
5498 .unwrap();
5499
5500 let fake_server = fake_servers.next().await.unwrap();
5501 cx.executor().run_until_parked();
5502
5503 let response = project.update(cx, |project, cx| {
5504 project.prepare_rename(buffer.clone(), 7, cx)
5505 });
5506 fake_server
5507 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5508 assert_eq!(
5509 params.text_document.uri.as_str(),
5510 uri!("file:///dir/one.rs")
5511 );
5512 assert_eq!(params.position, lsp::Position::new(0, 7));
5513 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5514 lsp::Position::new(0, 6),
5515 lsp::Position::new(0, 9),
5516 ))))
5517 })
5518 .next()
5519 .await
5520 .unwrap();
5521 let response = response.await.unwrap();
5522 let PrepareRenameResponse::Success(range) = response else {
5523 panic!("{:?}", response);
5524 };
5525 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5526 assert_eq!(range, 6..9);
5527
5528 let response = project.update(cx, |project, cx| {
5529 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5530 });
5531 fake_server
5532 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5533 assert_eq!(
5534 params.text_document_position.text_document.uri.as_str(),
5535 uri!("file:///dir/one.rs")
5536 );
5537 assert_eq!(
5538 params.text_document_position.position,
5539 lsp::Position::new(0, 7)
5540 );
5541 assert_eq!(params.new_name, "THREE");
5542 Ok(Some(lsp::WorkspaceEdit {
5543 changes: Some(
5544 [
5545 (
5546 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5547 vec![lsp::TextEdit::new(
5548 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5549 "THREE".to_string(),
5550 )],
5551 ),
5552 (
5553 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5554 vec![
5555 lsp::TextEdit::new(
5556 lsp::Range::new(
5557 lsp::Position::new(0, 24),
5558 lsp::Position::new(0, 27),
5559 ),
5560 "THREE".to_string(),
5561 ),
5562 lsp::TextEdit::new(
5563 lsp::Range::new(
5564 lsp::Position::new(0, 35),
5565 lsp::Position::new(0, 38),
5566 ),
5567 "THREE".to_string(),
5568 ),
5569 ],
5570 ),
5571 ]
5572 .into_iter()
5573 .collect(),
5574 ),
5575 ..Default::default()
5576 }))
5577 })
5578 .next()
5579 .await
5580 .unwrap();
5581 let mut transaction = response.await.unwrap().0;
5582 assert_eq!(transaction.len(), 2);
5583 assert_eq!(
5584 transaction
5585 .remove_entry(&buffer)
5586 .unwrap()
5587 .0
5588 .update(cx, |buffer, _| buffer.text()),
5589 "const THREE: usize = 1;"
5590 );
5591 assert_eq!(
5592 transaction
5593 .into_keys()
5594 .next()
5595 .unwrap()
5596 .update(cx, |buffer, _| buffer.text()),
5597 "const TWO: usize = one::THREE + one::THREE;"
5598 );
5599}
5600
5601#[gpui::test]
5602async fn test_search(cx: &mut gpui::TestAppContext) {
5603 init_test(cx);
5604
5605 let fs = FakeFs::new(cx.executor());
5606 fs.insert_tree(
5607 path!("/dir"),
5608 json!({
5609 "one.rs": "const ONE: usize = 1;",
5610 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5611 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5612 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5613 }),
5614 )
5615 .await;
5616 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5617 assert_eq!(
5618 search(
5619 &project,
5620 SearchQuery::text(
5621 "TWO",
5622 false,
5623 true,
5624 false,
5625 Default::default(),
5626 Default::default(),
5627 false,
5628 None
5629 )
5630 .unwrap(),
5631 cx
5632 )
5633 .await
5634 .unwrap(),
5635 HashMap::from_iter([
5636 (path!("dir/two.rs").to_string(), vec![6..9]),
5637 (path!("dir/three.rs").to_string(), vec![37..40])
5638 ])
5639 );
5640
5641 let buffer_4 = project
5642 .update(cx, |project, cx| {
5643 project.open_local_buffer(path!("/dir/four.rs"), cx)
5644 })
5645 .await
5646 .unwrap();
5647 buffer_4.update(cx, |buffer, cx| {
5648 let text = "two::TWO";
5649 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5650 });
5651
5652 assert_eq!(
5653 search(
5654 &project,
5655 SearchQuery::text(
5656 "TWO",
5657 false,
5658 true,
5659 false,
5660 Default::default(),
5661 Default::default(),
5662 false,
5663 None,
5664 )
5665 .unwrap(),
5666 cx
5667 )
5668 .await
5669 .unwrap(),
5670 HashMap::from_iter([
5671 (path!("dir/two.rs").to_string(), vec![6..9]),
5672 (path!("dir/three.rs").to_string(), vec![37..40]),
5673 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5674 ])
5675 );
5676}
5677
5678#[gpui::test]
5679async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5680 init_test(cx);
5681
5682 let search_query = "file";
5683
5684 let fs = FakeFs::new(cx.executor());
5685 fs.insert_tree(
5686 path!("/dir"),
5687 json!({
5688 "one.rs": r#"// Rust file one"#,
5689 "one.ts": r#"// TypeScript file one"#,
5690 "two.rs": r#"// Rust file two"#,
5691 "two.ts": r#"// TypeScript file two"#,
5692 }),
5693 )
5694 .await;
5695 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5696
5697 assert!(
5698 search(
5699 &project,
5700 SearchQuery::text(
5701 search_query,
5702 false,
5703 true,
5704 false,
5705 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5706 Default::default(),
5707 false,
5708 None
5709 )
5710 .unwrap(),
5711 cx
5712 )
5713 .await
5714 .unwrap()
5715 .is_empty(),
5716 "If no inclusions match, no files should be returned"
5717 );
5718
5719 assert_eq!(
5720 search(
5721 &project,
5722 SearchQuery::text(
5723 search_query,
5724 false,
5725 true,
5726 false,
5727 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5728 Default::default(),
5729 false,
5730 None
5731 )
5732 .unwrap(),
5733 cx
5734 )
5735 .await
5736 .unwrap(),
5737 HashMap::from_iter([
5738 (path!("dir/one.rs").to_string(), vec![8..12]),
5739 (path!("dir/two.rs").to_string(), vec![8..12]),
5740 ]),
5741 "Rust only search should give only Rust files"
5742 );
5743
5744 assert_eq!(
5745 search(
5746 &project,
5747 SearchQuery::text(
5748 search_query,
5749 false,
5750 true,
5751 false,
5752 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5753 .unwrap(),
5754 Default::default(),
5755 false,
5756 None,
5757 )
5758 .unwrap(),
5759 cx
5760 )
5761 .await
5762 .unwrap(),
5763 HashMap::from_iter([
5764 (path!("dir/one.ts").to_string(), vec![14..18]),
5765 (path!("dir/two.ts").to_string(), vec![14..18]),
5766 ]),
5767 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5768 );
5769
5770 assert_eq!(
5771 search(
5772 &project,
5773 SearchQuery::text(
5774 search_query,
5775 false,
5776 true,
5777 false,
5778 PathMatcher::new(
5779 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5780 PathStyle::local()
5781 )
5782 .unwrap(),
5783 Default::default(),
5784 false,
5785 None,
5786 )
5787 .unwrap(),
5788 cx
5789 )
5790 .await
5791 .unwrap(),
5792 HashMap::from_iter([
5793 (path!("dir/two.ts").to_string(), vec![14..18]),
5794 (path!("dir/one.rs").to_string(), vec![8..12]),
5795 (path!("dir/one.ts").to_string(), vec![14..18]),
5796 (path!("dir/two.rs").to_string(), vec![8..12]),
5797 ]),
5798 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5799 );
5800}
5801
5802#[gpui::test]
5803async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5804 init_test(cx);
5805
5806 let search_query = "file";
5807
5808 let fs = FakeFs::new(cx.executor());
5809 fs.insert_tree(
5810 path!("/dir"),
5811 json!({
5812 "one.rs": r#"// Rust file one"#,
5813 "one.ts": r#"// TypeScript file one"#,
5814 "two.rs": r#"// Rust file two"#,
5815 "two.ts": r#"// TypeScript file two"#,
5816 }),
5817 )
5818 .await;
5819 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5820
5821 assert_eq!(
5822 search(
5823 &project,
5824 SearchQuery::text(
5825 search_query,
5826 false,
5827 true,
5828 false,
5829 Default::default(),
5830 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5831 false,
5832 None,
5833 )
5834 .unwrap(),
5835 cx
5836 )
5837 .await
5838 .unwrap(),
5839 HashMap::from_iter([
5840 (path!("dir/one.rs").to_string(), vec![8..12]),
5841 (path!("dir/one.ts").to_string(), vec![14..18]),
5842 (path!("dir/two.rs").to_string(), vec![8..12]),
5843 (path!("dir/two.ts").to_string(), vec![14..18]),
5844 ]),
5845 "If no exclusions match, all files should be returned"
5846 );
5847
5848 assert_eq!(
5849 search(
5850 &project,
5851 SearchQuery::text(
5852 search_query,
5853 false,
5854 true,
5855 false,
5856 Default::default(),
5857 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5858 false,
5859 None,
5860 )
5861 .unwrap(),
5862 cx
5863 )
5864 .await
5865 .unwrap(),
5866 HashMap::from_iter([
5867 (path!("dir/one.ts").to_string(), vec![14..18]),
5868 (path!("dir/two.ts").to_string(), vec![14..18]),
5869 ]),
5870 "Rust exclusion search should give only TypeScript files"
5871 );
5872
5873 assert_eq!(
5874 search(
5875 &project,
5876 SearchQuery::text(
5877 search_query,
5878 false,
5879 true,
5880 false,
5881 Default::default(),
5882 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5883 .unwrap(),
5884 false,
5885 None,
5886 )
5887 .unwrap(),
5888 cx
5889 )
5890 .await
5891 .unwrap(),
5892 HashMap::from_iter([
5893 (path!("dir/one.rs").to_string(), vec![8..12]),
5894 (path!("dir/two.rs").to_string(), vec![8..12]),
5895 ]),
5896 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5897 );
5898
5899 assert!(
5900 search(
5901 &project,
5902 SearchQuery::text(
5903 search_query,
5904 false,
5905 true,
5906 false,
5907 Default::default(),
5908 PathMatcher::new(
5909 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5910 PathStyle::local(),
5911 )
5912 .unwrap(),
5913 false,
5914 None,
5915 )
5916 .unwrap(),
5917 cx
5918 )
5919 .await
5920 .unwrap()
5921 .is_empty(),
5922 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5923 );
5924}
5925
5926#[gpui::test]
5927async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5928 init_test(cx);
5929
5930 let search_query = "file";
5931
5932 let fs = FakeFs::new(cx.executor());
5933 fs.insert_tree(
5934 path!("/dir"),
5935 json!({
5936 "one.rs": r#"// Rust file one"#,
5937 "one.ts": r#"// TypeScript file one"#,
5938 "two.rs": r#"// Rust file two"#,
5939 "two.ts": r#"// TypeScript file two"#,
5940 }),
5941 )
5942 .await;
5943
5944 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5945 let path_style = PathStyle::local();
5946 let _buffer = project.update(cx, |project, cx| {
5947 project.create_local_buffer("file", None, false, cx)
5948 });
5949
5950 assert_eq!(
5951 search(
5952 &project,
5953 SearchQuery::text(
5954 search_query,
5955 false,
5956 true,
5957 false,
5958 Default::default(),
5959 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5960 false,
5961 None,
5962 )
5963 .unwrap(),
5964 cx
5965 )
5966 .await
5967 .unwrap(),
5968 HashMap::from_iter([
5969 (path!("dir/one.rs").to_string(), vec![8..12]),
5970 (path!("dir/one.ts").to_string(), vec![14..18]),
5971 (path!("dir/two.rs").to_string(), vec![8..12]),
5972 (path!("dir/two.ts").to_string(), vec![14..18]),
5973 ]),
5974 "If no exclusions match, all files should be returned"
5975 );
5976
5977 assert_eq!(
5978 search(
5979 &project,
5980 SearchQuery::text(
5981 search_query,
5982 false,
5983 true,
5984 false,
5985 Default::default(),
5986 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5987 false,
5988 None,
5989 )
5990 .unwrap(),
5991 cx
5992 )
5993 .await
5994 .unwrap(),
5995 HashMap::from_iter([
5996 (path!("dir/one.ts").to_string(), vec![14..18]),
5997 (path!("dir/two.ts").to_string(), vec![14..18]),
5998 ]),
5999 "Rust exclusion search should give only TypeScript files"
6000 );
6001
6002 assert_eq!(
6003 search(
6004 &project,
6005 SearchQuery::text(
6006 search_query,
6007 false,
6008 true,
6009 false,
6010 Default::default(),
6011 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6012 false,
6013 None,
6014 )
6015 .unwrap(),
6016 cx
6017 )
6018 .await
6019 .unwrap(),
6020 HashMap::from_iter([
6021 (path!("dir/one.rs").to_string(), vec![8..12]),
6022 (path!("dir/two.rs").to_string(), vec![8..12]),
6023 ]),
6024 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6025 );
6026
6027 assert!(
6028 search(
6029 &project,
6030 SearchQuery::text(
6031 search_query,
6032 false,
6033 true,
6034 false,
6035 Default::default(),
6036 PathMatcher::new(
6037 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6038 PathStyle::local(),
6039 )
6040 .unwrap(),
6041 false,
6042 None,
6043 )
6044 .unwrap(),
6045 cx
6046 )
6047 .await
6048 .unwrap()
6049 .is_empty(),
6050 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6051 );
6052}
6053
6054#[gpui::test]
6055async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6056 init_test(cx);
6057
6058 let search_query = "file";
6059
6060 let fs = FakeFs::new(cx.executor());
6061 fs.insert_tree(
6062 path!("/dir"),
6063 json!({
6064 "one.rs": r#"// Rust file one"#,
6065 "one.ts": r#"// TypeScript file one"#,
6066 "two.rs": r#"// Rust file two"#,
6067 "two.ts": r#"// TypeScript file two"#,
6068 }),
6069 )
6070 .await;
6071 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6072 assert!(
6073 search(
6074 &project,
6075 SearchQuery::text(
6076 search_query,
6077 false,
6078 true,
6079 false,
6080 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6081 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6082 false,
6083 None,
6084 )
6085 .unwrap(),
6086 cx
6087 )
6088 .await
6089 .unwrap()
6090 .is_empty(),
6091 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6092 );
6093
6094 assert!(
6095 search(
6096 &project,
6097 SearchQuery::text(
6098 search_query,
6099 false,
6100 true,
6101 false,
6102 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6103 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6104 false,
6105 None,
6106 )
6107 .unwrap(),
6108 cx
6109 )
6110 .await
6111 .unwrap()
6112 .is_empty(),
6113 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6114 );
6115
6116 assert!(
6117 search(
6118 &project,
6119 SearchQuery::text(
6120 search_query,
6121 false,
6122 true,
6123 false,
6124 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6125 .unwrap(),
6126 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6127 .unwrap(),
6128 false,
6129 None,
6130 )
6131 .unwrap(),
6132 cx
6133 )
6134 .await
6135 .unwrap()
6136 .is_empty(),
6137 "Non-matching inclusions and exclusions should not change that."
6138 );
6139
6140 assert_eq!(
6141 search(
6142 &project,
6143 SearchQuery::text(
6144 search_query,
6145 false,
6146 true,
6147 false,
6148 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6149 .unwrap(),
6150 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6151 .unwrap(),
6152 false,
6153 None,
6154 )
6155 .unwrap(),
6156 cx
6157 )
6158 .await
6159 .unwrap(),
6160 HashMap::from_iter([
6161 (path!("dir/one.ts").to_string(), vec![14..18]),
6162 (path!("dir/two.ts").to_string(), vec![14..18]),
6163 ]),
6164 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6165 );
6166}
6167
6168#[gpui::test]
6169async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6170 init_test(cx);
6171
6172 let fs = FakeFs::new(cx.executor());
6173 fs.insert_tree(
6174 path!("/worktree-a"),
6175 json!({
6176 "haystack.rs": r#"// NEEDLE"#,
6177 "haystack.ts": r#"// NEEDLE"#,
6178 }),
6179 )
6180 .await;
6181 fs.insert_tree(
6182 path!("/worktree-b"),
6183 json!({
6184 "haystack.rs": r#"// NEEDLE"#,
6185 "haystack.ts": r#"// NEEDLE"#,
6186 }),
6187 )
6188 .await;
6189
6190 let path_style = PathStyle::local();
6191 let project = Project::test(
6192 fs.clone(),
6193 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6194 cx,
6195 )
6196 .await;
6197
6198 assert_eq!(
6199 search(
6200 &project,
6201 SearchQuery::text(
6202 "NEEDLE",
6203 false,
6204 true,
6205 false,
6206 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6207 Default::default(),
6208 true,
6209 None,
6210 )
6211 .unwrap(),
6212 cx
6213 )
6214 .await
6215 .unwrap(),
6216 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6217 "should only return results from included worktree"
6218 );
6219 assert_eq!(
6220 search(
6221 &project,
6222 SearchQuery::text(
6223 "NEEDLE",
6224 false,
6225 true,
6226 false,
6227 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6228 Default::default(),
6229 true,
6230 None,
6231 )
6232 .unwrap(),
6233 cx
6234 )
6235 .await
6236 .unwrap(),
6237 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6238 "should only return results from included worktree"
6239 );
6240
6241 assert_eq!(
6242 search(
6243 &project,
6244 SearchQuery::text(
6245 "NEEDLE",
6246 false,
6247 true,
6248 false,
6249 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6250 Default::default(),
6251 false,
6252 None,
6253 )
6254 .unwrap(),
6255 cx
6256 )
6257 .await
6258 .unwrap(),
6259 HashMap::from_iter([
6260 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6261 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6262 ]),
6263 "should return results from both worktrees"
6264 );
6265}
6266
6267#[gpui::test]
6268async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6269 init_test(cx);
6270
6271 let fs = FakeFs::new(cx.background_executor.clone());
6272 fs.insert_tree(
6273 path!("/dir"),
6274 json!({
6275 ".git": {},
6276 ".gitignore": "**/target\n/node_modules\n",
6277 "target": {
6278 "index.txt": "index_key:index_value"
6279 },
6280 "node_modules": {
6281 "eslint": {
6282 "index.ts": "const eslint_key = 'eslint value'",
6283 "package.json": r#"{ "some_key": "some value" }"#,
6284 },
6285 "prettier": {
6286 "index.ts": "const prettier_key = 'prettier value'",
6287 "package.json": r#"{ "other_key": "other value" }"#,
6288 },
6289 },
6290 "package.json": r#"{ "main_key": "main value" }"#,
6291 }),
6292 )
6293 .await;
6294 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6295
6296 let query = "key";
6297 assert_eq!(
6298 search(
6299 &project,
6300 SearchQuery::text(
6301 query,
6302 false,
6303 false,
6304 false,
6305 Default::default(),
6306 Default::default(),
6307 false,
6308 None,
6309 )
6310 .unwrap(),
6311 cx
6312 )
6313 .await
6314 .unwrap(),
6315 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6316 "Only one non-ignored file should have the query"
6317 );
6318
6319 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6320 let path_style = PathStyle::local();
6321 assert_eq!(
6322 search(
6323 &project,
6324 SearchQuery::text(
6325 query,
6326 false,
6327 false,
6328 true,
6329 Default::default(),
6330 Default::default(),
6331 false,
6332 None,
6333 )
6334 .unwrap(),
6335 cx
6336 )
6337 .await
6338 .unwrap(),
6339 HashMap::from_iter([
6340 (path!("dir/package.json").to_string(), vec![8..11]),
6341 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6342 (
6343 path!("dir/node_modules/prettier/package.json").to_string(),
6344 vec![9..12]
6345 ),
6346 (
6347 path!("dir/node_modules/prettier/index.ts").to_string(),
6348 vec![15..18]
6349 ),
6350 (
6351 path!("dir/node_modules/eslint/index.ts").to_string(),
6352 vec![13..16]
6353 ),
6354 (
6355 path!("dir/node_modules/eslint/package.json").to_string(),
6356 vec![8..11]
6357 ),
6358 ]),
6359 "Unrestricted search with ignored directories should find every file with the query"
6360 );
6361
6362 let files_to_include =
6363 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6364 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6365 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6366 assert_eq!(
6367 search(
6368 &project,
6369 SearchQuery::text(
6370 query,
6371 false,
6372 false,
6373 true,
6374 files_to_include,
6375 files_to_exclude,
6376 false,
6377 None,
6378 )
6379 .unwrap(),
6380 cx
6381 )
6382 .await
6383 .unwrap(),
6384 HashMap::from_iter([(
6385 path!("dir/node_modules/prettier/package.json").to_string(),
6386 vec![9..12]
6387 )]),
6388 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6389 );
6390}
6391
6392#[gpui::test]
6393async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6394 init_test(cx);
6395
6396 let fs = FakeFs::new(cx.executor());
6397 fs.insert_tree(
6398 path!("/dir"),
6399 json!({
6400 "one.rs": "// ПРИВЕТ? привет!",
6401 "two.rs": "// ПРИВЕТ.",
6402 "three.rs": "// привет",
6403 }),
6404 )
6405 .await;
6406 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6407 let unicode_case_sensitive_query = SearchQuery::text(
6408 "привет",
6409 false,
6410 true,
6411 false,
6412 Default::default(),
6413 Default::default(),
6414 false,
6415 None,
6416 );
6417 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6418 assert_eq!(
6419 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6420 .await
6421 .unwrap(),
6422 HashMap::from_iter([
6423 (path!("dir/one.rs").to_string(), vec![17..29]),
6424 (path!("dir/three.rs").to_string(), vec![3..15]),
6425 ])
6426 );
6427
6428 let unicode_case_insensitive_query = SearchQuery::text(
6429 "привет",
6430 false,
6431 false,
6432 false,
6433 Default::default(),
6434 Default::default(),
6435 false,
6436 None,
6437 );
6438 assert_matches!(
6439 unicode_case_insensitive_query,
6440 Ok(SearchQuery::Regex { .. })
6441 );
6442 assert_eq!(
6443 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6444 .await
6445 .unwrap(),
6446 HashMap::from_iter([
6447 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6448 (path!("dir/two.rs").to_string(), vec![3..15]),
6449 (path!("dir/three.rs").to_string(), vec![3..15]),
6450 ])
6451 );
6452
6453 assert_eq!(
6454 search(
6455 &project,
6456 SearchQuery::text(
6457 "привет.",
6458 false,
6459 false,
6460 false,
6461 Default::default(),
6462 Default::default(),
6463 false,
6464 None,
6465 )
6466 .unwrap(),
6467 cx
6468 )
6469 .await
6470 .unwrap(),
6471 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6472 );
6473}
6474
6475#[gpui::test]
6476async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6477 init_test(cx);
6478
6479 let fs = FakeFs::new(cx.executor());
6480 fs.insert_tree(
6481 "/one/two",
6482 json!({
6483 "three": {
6484 "a.txt": "",
6485 "four": {}
6486 },
6487 "c.rs": ""
6488 }),
6489 )
6490 .await;
6491
6492 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6493 project
6494 .update(cx, |project, cx| {
6495 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6496 project.create_entry((id, rel_path("b..")), true, cx)
6497 })
6498 .await
6499 .unwrap()
6500 .into_included()
6501 .unwrap();
6502
6503 assert_eq!(
6504 fs.paths(true),
6505 vec![
6506 PathBuf::from(path!("/")),
6507 PathBuf::from(path!("/one")),
6508 PathBuf::from(path!("/one/two")),
6509 PathBuf::from(path!("/one/two/c.rs")),
6510 PathBuf::from(path!("/one/two/three")),
6511 PathBuf::from(path!("/one/two/three/a.txt")),
6512 PathBuf::from(path!("/one/two/three/b..")),
6513 PathBuf::from(path!("/one/two/three/four")),
6514 ]
6515 );
6516}
6517
6518#[gpui::test]
6519async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6520 init_test(cx);
6521
6522 let fs = FakeFs::new(cx.executor());
6523 fs.insert_tree(
6524 path!("/dir"),
6525 json!({
6526 "a.tsx": "a",
6527 }),
6528 )
6529 .await;
6530
6531 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6532
6533 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6534 language_registry.add(tsx_lang());
6535 let language_server_names = [
6536 "TypeScriptServer",
6537 "TailwindServer",
6538 "ESLintServer",
6539 "NoHoverCapabilitiesServer",
6540 ];
6541 let mut language_servers = [
6542 language_registry.register_fake_lsp(
6543 "tsx",
6544 FakeLspAdapter {
6545 name: language_server_names[0],
6546 capabilities: lsp::ServerCapabilities {
6547 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6548 ..lsp::ServerCapabilities::default()
6549 },
6550 ..FakeLspAdapter::default()
6551 },
6552 ),
6553 language_registry.register_fake_lsp(
6554 "tsx",
6555 FakeLspAdapter {
6556 name: language_server_names[1],
6557 capabilities: lsp::ServerCapabilities {
6558 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6559 ..lsp::ServerCapabilities::default()
6560 },
6561 ..FakeLspAdapter::default()
6562 },
6563 ),
6564 language_registry.register_fake_lsp(
6565 "tsx",
6566 FakeLspAdapter {
6567 name: language_server_names[2],
6568 capabilities: lsp::ServerCapabilities {
6569 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6570 ..lsp::ServerCapabilities::default()
6571 },
6572 ..FakeLspAdapter::default()
6573 },
6574 ),
6575 language_registry.register_fake_lsp(
6576 "tsx",
6577 FakeLspAdapter {
6578 name: language_server_names[3],
6579 capabilities: lsp::ServerCapabilities {
6580 hover_provider: None,
6581 ..lsp::ServerCapabilities::default()
6582 },
6583 ..FakeLspAdapter::default()
6584 },
6585 ),
6586 ];
6587
6588 let (buffer, _handle) = project
6589 .update(cx, |p, cx| {
6590 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6591 })
6592 .await
6593 .unwrap();
6594 cx.executor().run_until_parked();
6595
6596 let mut servers_with_hover_requests = HashMap::default();
6597 for i in 0..language_server_names.len() {
6598 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6599 panic!(
6600 "Failed to get language server #{i} with name {}",
6601 &language_server_names[i]
6602 )
6603 });
6604 let new_server_name = new_server.server.name();
6605 assert!(
6606 !servers_with_hover_requests.contains_key(&new_server_name),
6607 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6608 );
6609 match new_server_name.as_ref() {
6610 "TailwindServer" | "TypeScriptServer" => {
6611 servers_with_hover_requests.insert(
6612 new_server_name.clone(),
6613 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6614 move |_, _| {
6615 let name = new_server_name.clone();
6616 async move {
6617 Ok(Some(lsp::Hover {
6618 contents: lsp::HoverContents::Scalar(
6619 lsp::MarkedString::String(format!("{name} hover")),
6620 ),
6621 range: None,
6622 }))
6623 }
6624 },
6625 ),
6626 );
6627 }
6628 "ESLintServer" => {
6629 servers_with_hover_requests.insert(
6630 new_server_name,
6631 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6632 |_, _| async move { Ok(None) },
6633 ),
6634 );
6635 }
6636 "NoHoverCapabilitiesServer" => {
6637 let _never_handled = new_server
6638 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6639 panic!(
6640 "Should not call for hovers server with no corresponding capabilities"
6641 )
6642 });
6643 }
6644 unexpected => panic!("Unexpected server name: {unexpected}"),
6645 }
6646 }
6647
6648 let hover_task = project.update(cx, |project, cx| {
6649 project.hover(&buffer, Point::new(0, 0), cx)
6650 });
6651 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6652 |mut hover_request| async move {
6653 hover_request
6654 .next()
6655 .await
6656 .expect("All hover requests should have been triggered")
6657 },
6658 ))
6659 .await;
6660 assert_eq!(
6661 vec!["TailwindServer hover", "TypeScriptServer hover"],
6662 hover_task
6663 .await
6664 .into_iter()
6665 .flatten()
6666 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6667 .sorted()
6668 .collect::<Vec<_>>(),
6669 "Should receive hover responses from all related servers with hover capabilities"
6670 );
6671}
6672
6673#[gpui::test]
6674async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6675 init_test(cx);
6676
6677 let fs = FakeFs::new(cx.executor());
6678 fs.insert_tree(
6679 path!("/dir"),
6680 json!({
6681 "a.ts": "a",
6682 }),
6683 )
6684 .await;
6685
6686 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6687
6688 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6689 language_registry.add(typescript_lang());
6690 let mut fake_language_servers = language_registry.register_fake_lsp(
6691 "TypeScript",
6692 FakeLspAdapter {
6693 capabilities: lsp::ServerCapabilities {
6694 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6695 ..lsp::ServerCapabilities::default()
6696 },
6697 ..FakeLspAdapter::default()
6698 },
6699 );
6700
6701 let (buffer, _handle) = project
6702 .update(cx, |p, cx| {
6703 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6704 })
6705 .await
6706 .unwrap();
6707 cx.executor().run_until_parked();
6708
6709 let fake_server = fake_language_servers
6710 .next()
6711 .await
6712 .expect("failed to get the language server");
6713
6714 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6715 move |_, _| async move {
6716 Ok(Some(lsp::Hover {
6717 contents: lsp::HoverContents::Array(vec![
6718 lsp::MarkedString::String("".to_string()),
6719 lsp::MarkedString::String(" ".to_string()),
6720 lsp::MarkedString::String("\n\n\n".to_string()),
6721 ]),
6722 range: None,
6723 }))
6724 },
6725 );
6726
6727 let hover_task = project.update(cx, |project, cx| {
6728 project.hover(&buffer, Point::new(0, 0), cx)
6729 });
6730 let () = request_handled
6731 .next()
6732 .await
6733 .expect("All hover requests should have been triggered");
6734 assert_eq!(
6735 Vec::<String>::new(),
6736 hover_task
6737 .await
6738 .into_iter()
6739 .flatten()
6740 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6741 .sorted()
6742 .collect::<Vec<_>>(),
6743 "Empty hover parts should be ignored"
6744 );
6745}
6746
6747#[gpui::test]
6748async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6749 init_test(cx);
6750
6751 let fs = FakeFs::new(cx.executor());
6752 fs.insert_tree(
6753 path!("/dir"),
6754 json!({
6755 "a.ts": "a",
6756 }),
6757 )
6758 .await;
6759
6760 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6761
6762 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6763 language_registry.add(typescript_lang());
6764 let mut fake_language_servers = language_registry.register_fake_lsp(
6765 "TypeScript",
6766 FakeLspAdapter {
6767 capabilities: lsp::ServerCapabilities {
6768 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6769 ..lsp::ServerCapabilities::default()
6770 },
6771 ..FakeLspAdapter::default()
6772 },
6773 );
6774
6775 let (buffer, _handle) = project
6776 .update(cx, |p, cx| {
6777 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6778 })
6779 .await
6780 .unwrap();
6781 cx.executor().run_until_parked();
6782
6783 let fake_server = fake_language_servers
6784 .next()
6785 .await
6786 .expect("failed to get the language server");
6787
6788 let mut request_handled = fake_server
6789 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6790 Ok(Some(vec![
6791 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6792 title: "organize imports".to_string(),
6793 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6794 ..lsp::CodeAction::default()
6795 }),
6796 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6797 title: "fix code".to_string(),
6798 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6799 ..lsp::CodeAction::default()
6800 }),
6801 ]))
6802 });
6803
6804 let code_actions_task = project.update(cx, |project, cx| {
6805 project.code_actions(
6806 &buffer,
6807 0..buffer.read(cx).len(),
6808 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6809 cx,
6810 )
6811 });
6812
6813 let () = request_handled
6814 .next()
6815 .await
6816 .expect("The code action request should have been triggered");
6817
6818 let code_actions = code_actions_task.await.unwrap().unwrap();
6819 assert_eq!(code_actions.len(), 1);
6820 assert_eq!(
6821 code_actions[0].lsp_action.action_kind(),
6822 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6823 );
6824}
6825
6826#[gpui::test]
6827async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6828 init_test(cx);
6829
6830 let fs = FakeFs::new(cx.executor());
6831 fs.insert_tree(
6832 path!("/dir"),
6833 json!({
6834 "a.tsx": "a",
6835 }),
6836 )
6837 .await;
6838
6839 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6840
6841 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6842 language_registry.add(tsx_lang());
6843 let language_server_names = [
6844 "TypeScriptServer",
6845 "TailwindServer",
6846 "ESLintServer",
6847 "NoActionsCapabilitiesServer",
6848 ];
6849
6850 let mut language_server_rxs = [
6851 language_registry.register_fake_lsp(
6852 "tsx",
6853 FakeLspAdapter {
6854 name: language_server_names[0],
6855 capabilities: lsp::ServerCapabilities {
6856 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6857 ..lsp::ServerCapabilities::default()
6858 },
6859 ..FakeLspAdapter::default()
6860 },
6861 ),
6862 language_registry.register_fake_lsp(
6863 "tsx",
6864 FakeLspAdapter {
6865 name: language_server_names[1],
6866 capabilities: lsp::ServerCapabilities {
6867 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6868 ..lsp::ServerCapabilities::default()
6869 },
6870 ..FakeLspAdapter::default()
6871 },
6872 ),
6873 language_registry.register_fake_lsp(
6874 "tsx",
6875 FakeLspAdapter {
6876 name: language_server_names[2],
6877 capabilities: lsp::ServerCapabilities {
6878 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6879 ..lsp::ServerCapabilities::default()
6880 },
6881 ..FakeLspAdapter::default()
6882 },
6883 ),
6884 language_registry.register_fake_lsp(
6885 "tsx",
6886 FakeLspAdapter {
6887 name: language_server_names[3],
6888 capabilities: lsp::ServerCapabilities {
6889 code_action_provider: None,
6890 ..lsp::ServerCapabilities::default()
6891 },
6892 ..FakeLspAdapter::default()
6893 },
6894 ),
6895 ];
6896
6897 let (buffer, _handle) = project
6898 .update(cx, |p, cx| {
6899 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6900 })
6901 .await
6902 .unwrap();
6903 cx.executor().run_until_parked();
6904
6905 let mut servers_with_actions_requests = HashMap::default();
6906 for i in 0..language_server_names.len() {
6907 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6908 panic!(
6909 "Failed to get language server #{i} with name {}",
6910 &language_server_names[i]
6911 )
6912 });
6913 let new_server_name = new_server.server.name();
6914
6915 assert!(
6916 !servers_with_actions_requests.contains_key(&new_server_name),
6917 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6918 );
6919 match new_server_name.0.as_ref() {
6920 "TailwindServer" | "TypeScriptServer" => {
6921 servers_with_actions_requests.insert(
6922 new_server_name.clone(),
6923 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6924 move |_, _| {
6925 let name = new_server_name.clone();
6926 async move {
6927 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6928 lsp::CodeAction {
6929 title: format!("{name} code action"),
6930 ..lsp::CodeAction::default()
6931 },
6932 )]))
6933 }
6934 },
6935 ),
6936 );
6937 }
6938 "ESLintServer" => {
6939 servers_with_actions_requests.insert(
6940 new_server_name,
6941 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6942 |_, _| async move { Ok(None) },
6943 ),
6944 );
6945 }
6946 "NoActionsCapabilitiesServer" => {
6947 let _never_handled = new_server
6948 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6949 panic!(
6950 "Should not call for code actions server with no corresponding capabilities"
6951 )
6952 });
6953 }
6954 unexpected => panic!("Unexpected server name: {unexpected}"),
6955 }
6956 }
6957
6958 let code_actions_task = project.update(cx, |project, cx| {
6959 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6960 });
6961
6962 // cx.run_until_parked();
6963 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6964 |mut code_actions_request| async move {
6965 code_actions_request
6966 .next()
6967 .await
6968 .expect("All code actions requests should have been triggered")
6969 },
6970 ))
6971 .await;
6972 assert_eq!(
6973 vec!["TailwindServer code action", "TypeScriptServer code action"],
6974 code_actions_task
6975 .await
6976 .unwrap()
6977 .unwrap()
6978 .into_iter()
6979 .map(|code_action| code_action.lsp_action.title().to_owned())
6980 .sorted()
6981 .collect::<Vec<_>>(),
6982 "Should receive code actions responses from all related servers with hover capabilities"
6983 );
6984}
6985
6986#[gpui::test]
6987async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6988 init_test(cx);
6989
6990 let fs = FakeFs::new(cx.executor());
6991 fs.insert_tree(
6992 "/dir",
6993 json!({
6994 "a.rs": "let a = 1;",
6995 "b.rs": "let b = 2;",
6996 "c.rs": "let c = 2;",
6997 }),
6998 )
6999 .await;
7000
7001 let project = Project::test(
7002 fs,
7003 [
7004 "/dir/a.rs".as_ref(),
7005 "/dir/b.rs".as_ref(),
7006 "/dir/c.rs".as_ref(),
7007 ],
7008 cx,
7009 )
7010 .await;
7011
7012 // check the initial state and get the worktrees
7013 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7014 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7015 assert_eq!(worktrees.len(), 3);
7016
7017 let worktree_a = worktrees[0].read(cx);
7018 let worktree_b = worktrees[1].read(cx);
7019 let worktree_c = worktrees[2].read(cx);
7020
7021 // check they start in the right order
7022 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7023 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7024 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7025
7026 (
7027 worktrees[0].clone(),
7028 worktrees[1].clone(),
7029 worktrees[2].clone(),
7030 )
7031 });
7032
7033 // move first worktree to after the second
7034 // [a, b, c] -> [b, a, c]
7035 project
7036 .update(cx, |project, cx| {
7037 let first = worktree_a.read(cx);
7038 let second = worktree_b.read(cx);
7039 project.move_worktree(first.id(), second.id(), cx)
7040 })
7041 .expect("moving first after second");
7042
7043 // check the state after moving
7044 project.update(cx, |project, cx| {
7045 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7046 assert_eq!(worktrees.len(), 3);
7047
7048 let first = worktrees[0].read(cx);
7049 let second = worktrees[1].read(cx);
7050 let third = worktrees[2].read(cx);
7051
7052 // check they are now in the right order
7053 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7054 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7055 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7056 });
7057
7058 // move the second worktree to before the first
7059 // [b, a, c] -> [a, b, c]
7060 project
7061 .update(cx, |project, cx| {
7062 let second = worktree_a.read(cx);
7063 let first = worktree_b.read(cx);
7064 project.move_worktree(first.id(), second.id(), cx)
7065 })
7066 .expect("moving second before first");
7067
7068 // check the state after moving
7069 project.update(cx, |project, cx| {
7070 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7071 assert_eq!(worktrees.len(), 3);
7072
7073 let first = worktrees[0].read(cx);
7074 let second = worktrees[1].read(cx);
7075 let third = worktrees[2].read(cx);
7076
7077 // check they are now in the right order
7078 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7079 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7080 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7081 });
7082
7083 // move the second worktree to after the third
7084 // [a, b, c] -> [a, c, b]
7085 project
7086 .update(cx, |project, cx| {
7087 let second = worktree_b.read(cx);
7088 let third = worktree_c.read(cx);
7089 project.move_worktree(second.id(), third.id(), cx)
7090 })
7091 .expect("moving second after third");
7092
7093 // check the state after moving
7094 project.update(cx, |project, cx| {
7095 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7096 assert_eq!(worktrees.len(), 3);
7097
7098 let first = worktrees[0].read(cx);
7099 let second = worktrees[1].read(cx);
7100 let third = worktrees[2].read(cx);
7101
7102 // check they are now in the right order
7103 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7104 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7105 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7106 });
7107
7108 // move the third worktree to before the second
7109 // [a, c, b] -> [a, b, c]
7110 project
7111 .update(cx, |project, cx| {
7112 let third = worktree_c.read(cx);
7113 let second = worktree_b.read(cx);
7114 project.move_worktree(third.id(), second.id(), cx)
7115 })
7116 .expect("moving third before second");
7117
7118 // check the state after moving
7119 project.update(cx, |project, cx| {
7120 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7121 assert_eq!(worktrees.len(), 3);
7122
7123 let first = worktrees[0].read(cx);
7124 let second = worktrees[1].read(cx);
7125 let third = worktrees[2].read(cx);
7126
7127 // check they are now in the right order
7128 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7129 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7130 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7131 });
7132
7133 // move the first worktree to after the third
7134 // [a, b, c] -> [b, c, a]
7135 project
7136 .update(cx, |project, cx| {
7137 let first = worktree_a.read(cx);
7138 let third = worktree_c.read(cx);
7139 project.move_worktree(first.id(), third.id(), cx)
7140 })
7141 .expect("moving first after third");
7142
7143 // check the state after moving
7144 project.update(cx, |project, cx| {
7145 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7146 assert_eq!(worktrees.len(), 3);
7147
7148 let first = worktrees[0].read(cx);
7149 let second = worktrees[1].read(cx);
7150 let third = worktrees[2].read(cx);
7151
7152 // check they are now in the right order
7153 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7154 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7155 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7156 });
7157
7158 // move the third worktree to before the first
7159 // [b, c, a] -> [a, b, c]
7160 project
7161 .update(cx, |project, cx| {
7162 let third = worktree_a.read(cx);
7163 let first = worktree_b.read(cx);
7164 project.move_worktree(third.id(), first.id(), cx)
7165 })
7166 .expect("moving third before first");
7167
7168 // check the state after moving
7169 project.update(cx, |project, cx| {
7170 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7171 assert_eq!(worktrees.len(), 3);
7172
7173 let first = worktrees[0].read(cx);
7174 let second = worktrees[1].read(cx);
7175 let third = worktrees[2].read(cx);
7176
7177 // check they are now in the right order
7178 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7179 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7180 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7181 });
7182}
7183
7184#[gpui::test]
7185async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7186 init_test(cx);
7187
7188 let staged_contents = r#"
7189 fn main() {
7190 println!("hello world");
7191 }
7192 "#
7193 .unindent();
7194 let file_contents = r#"
7195 // print goodbye
7196 fn main() {
7197 println!("goodbye world");
7198 }
7199 "#
7200 .unindent();
7201
7202 let fs = FakeFs::new(cx.background_executor.clone());
7203 fs.insert_tree(
7204 "/dir",
7205 json!({
7206 ".git": {},
7207 "src": {
7208 "main.rs": file_contents,
7209 }
7210 }),
7211 )
7212 .await;
7213
7214 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7215
7216 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7217
7218 let buffer = project
7219 .update(cx, |project, cx| {
7220 project.open_local_buffer("/dir/src/main.rs", cx)
7221 })
7222 .await
7223 .unwrap();
7224 let unstaged_diff = project
7225 .update(cx, |project, cx| {
7226 project.open_unstaged_diff(buffer.clone(), cx)
7227 })
7228 .await
7229 .unwrap();
7230
7231 cx.run_until_parked();
7232 unstaged_diff.update(cx, |unstaged_diff, cx| {
7233 let snapshot = buffer.read(cx).snapshot();
7234 assert_hunks(
7235 unstaged_diff.snapshot(cx).hunks(&snapshot),
7236 &snapshot,
7237 &unstaged_diff.base_text_string(cx).unwrap(),
7238 &[
7239 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7240 (
7241 2..3,
7242 " println!(\"hello world\");\n",
7243 " println!(\"goodbye world\");\n",
7244 DiffHunkStatus::modified_none(),
7245 ),
7246 ],
7247 );
7248 });
7249
7250 let staged_contents = r#"
7251 // print goodbye
7252 fn main() {
7253 }
7254 "#
7255 .unindent();
7256
7257 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7258
7259 cx.run_until_parked();
7260 unstaged_diff.update(cx, |unstaged_diff, cx| {
7261 let snapshot = buffer.read(cx).snapshot();
7262 assert_hunks(
7263 unstaged_diff
7264 .snapshot(cx)
7265 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7266 &snapshot,
7267 &unstaged_diff.base_text(cx).text(),
7268 &[(
7269 2..3,
7270 "",
7271 " println!(\"goodbye world\");\n",
7272 DiffHunkStatus::added_none(),
7273 )],
7274 );
7275 });
7276}
7277
7278#[gpui::test]
7279async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7280 init_test(cx);
7281
7282 let committed_contents = r#"
7283 fn main() {
7284 println!("hello world");
7285 }
7286 "#
7287 .unindent();
7288 let staged_contents = r#"
7289 fn main() {
7290 println!("goodbye world");
7291 }
7292 "#
7293 .unindent();
7294 let file_contents = r#"
7295 // print goodbye
7296 fn main() {
7297 println!("goodbye world");
7298 }
7299 "#
7300 .unindent();
7301
7302 let fs = FakeFs::new(cx.background_executor.clone());
7303 fs.insert_tree(
7304 "/dir",
7305 json!({
7306 ".git": {},
7307 "src": {
7308 "modification.rs": file_contents,
7309 }
7310 }),
7311 )
7312 .await;
7313
7314 fs.set_head_for_repo(
7315 Path::new("/dir/.git"),
7316 &[
7317 ("src/modification.rs", committed_contents),
7318 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7319 ],
7320 "deadbeef",
7321 );
7322 fs.set_index_for_repo(
7323 Path::new("/dir/.git"),
7324 &[
7325 ("src/modification.rs", staged_contents),
7326 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7327 ],
7328 );
7329
7330 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7331 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7332 let language = rust_lang();
7333 language_registry.add(language.clone());
7334
7335 let buffer_1 = project
7336 .update(cx, |project, cx| {
7337 project.open_local_buffer("/dir/src/modification.rs", cx)
7338 })
7339 .await
7340 .unwrap();
7341 let diff_1 = project
7342 .update(cx, |project, cx| {
7343 project.open_uncommitted_diff(buffer_1.clone(), cx)
7344 })
7345 .await
7346 .unwrap();
7347 diff_1.read_with(cx, |diff, cx| {
7348 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7349 });
7350 cx.run_until_parked();
7351 diff_1.update(cx, |diff, cx| {
7352 let snapshot = buffer_1.read(cx).snapshot();
7353 assert_hunks(
7354 diff.snapshot(cx)
7355 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7356 &snapshot,
7357 &diff.base_text_string(cx).unwrap(),
7358 &[
7359 (
7360 0..1,
7361 "",
7362 "// print goodbye\n",
7363 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7364 ),
7365 (
7366 2..3,
7367 " println!(\"hello world\");\n",
7368 " println!(\"goodbye world\");\n",
7369 DiffHunkStatus::modified_none(),
7370 ),
7371 ],
7372 );
7373 });
7374
7375 // Reset HEAD to a version that differs from both the buffer and the index.
7376 let committed_contents = r#"
7377 // print goodbye
7378 fn main() {
7379 }
7380 "#
7381 .unindent();
7382 fs.set_head_for_repo(
7383 Path::new("/dir/.git"),
7384 &[
7385 ("src/modification.rs", committed_contents.clone()),
7386 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7387 ],
7388 "deadbeef",
7389 );
7390
7391 // Buffer now has an unstaged hunk.
7392 cx.run_until_parked();
7393 diff_1.update(cx, |diff, cx| {
7394 let snapshot = buffer_1.read(cx).snapshot();
7395 assert_hunks(
7396 diff.snapshot(cx)
7397 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7398 &snapshot,
7399 &diff.base_text(cx).text(),
7400 &[(
7401 2..3,
7402 "",
7403 " println!(\"goodbye world\");\n",
7404 DiffHunkStatus::added_none(),
7405 )],
7406 );
7407 });
7408
7409 // Open a buffer for a file that's been deleted.
7410 let buffer_2 = project
7411 .update(cx, |project, cx| {
7412 project.open_local_buffer("/dir/src/deletion.rs", cx)
7413 })
7414 .await
7415 .unwrap();
7416 let diff_2 = project
7417 .update(cx, |project, cx| {
7418 project.open_uncommitted_diff(buffer_2.clone(), cx)
7419 })
7420 .await
7421 .unwrap();
7422 cx.run_until_parked();
7423 diff_2.update(cx, |diff, cx| {
7424 let snapshot = buffer_2.read(cx).snapshot();
7425 assert_hunks(
7426 diff.snapshot(cx)
7427 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7428 &snapshot,
7429 &diff.base_text_string(cx).unwrap(),
7430 &[(
7431 0..0,
7432 "// the-deleted-contents\n",
7433 "",
7434 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7435 )],
7436 );
7437 });
7438
7439 // Stage the deletion of this file
7440 fs.set_index_for_repo(
7441 Path::new("/dir/.git"),
7442 &[("src/modification.rs", committed_contents.clone())],
7443 );
7444 cx.run_until_parked();
7445 diff_2.update(cx, |diff, cx| {
7446 let snapshot = buffer_2.read(cx).snapshot();
7447 assert_hunks(
7448 diff.snapshot(cx)
7449 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7450 &snapshot,
7451 &diff.base_text_string(cx).unwrap(),
7452 &[(
7453 0..0,
7454 "// the-deleted-contents\n",
7455 "",
7456 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7457 )],
7458 );
7459 });
7460}
7461
7462#[gpui::test]
7463async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7464 use DiffHunkSecondaryStatus::*;
7465 init_test(cx);
7466
7467 let committed_contents = r#"
7468 zero
7469 one
7470 two
7471 three
7472 four
7473 five
7474 "#
7475 .unindent();
7476 let file_contents = r#"
7477 one
7478 TWO
7479 three
7480 FOUR
7481 five
7482 "#
7483 .unindent();
7484
7485 let fs = FakeFs::new(cx.background_executor.clone());
7486 fs.insert_tree(
7487 "/dir",
7488 json!({
7489 ".git": {},
7490 "file.txt": file_contents.clone()
7491 }),
7492 )
7493 .await;
7494
7495 fs.set_head_and_index_for_repo(
7496 path!("/dir/.git").as_ref(),
7497 &[("file.txt", committed_contents.clone())],
7498 );
7499
7500 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7501
7502 let buffer = project
7503 .update(cx, |project, cx| {
7504 project.open_local_buffer("/dir/file.txt", cx)
7505 })
7506 .await
7507 .unwrap();
7508 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7509 let uncommitted_diff = project
7510 .update(cx, |project, cx| {
7511 project.open_uncommitted_diff(buffer.clone(), cx)
7512 })
7513 .await
7514 .unwrap();
7515 let mut diff_events = cx.events(&uncommitted_diff);
7516
7517 // The hunks are initially unstaged.
7518 uncommitted_diff.read_with(cx, |diff, cx| {
7519 assert_hunks(
7520 diff.snapshot(cx).hunks(&snapshot),
7521 &snapshot,
7522 &diff.base_text_string(cx).unwrap(),
7523 &[
7524 (
7525 0..0,
7526 "zero\n",
7527 "",
7528 DiffHunkStatus::deleted(HasSecondaryHunk),
7529 ),
7530 (
7531 1..2,
7532 "two\n",
7533 "TWO\n",
7534 DiffHunkStatus::modified(HasSecondaryHunk),
7535 ),
7536 (
7537 3..4,
7538 "four\n",
7539 "FOUR\n",
7540 DiffHunkStatus::modified(HasSecondaryHunk),
7541 ),
7542 ],
7543 );
7544 });
7545
7546 // Stage a hunk. It appears as optimistically staged.
7547 uncommitted_diff.update(cx, |diff, cx| {
7548 let range =
7549 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7550 let hunks = diff
7551 .snapshot(cx)
7552 .hunks_intersecting_range(range, &snapshot)
7553 .collect::<Vec<_>>();
7554 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7555
7556 assert_hunks(
7557 diff.snapshot(cx).hunks(&snapshot),
7558 &snapshot,
7559 &diff.base_text_string(cx).unwrap(),
7560 &[
7561 (
7562 0..0,
7563 "zero\n",
7564 "",
7565 DiffHunkStatus::deleted(HasSecondaryHunk),
7566 ),
7567 (
7568 1..2,
7569 "two\n",
7570 "TWO\n",
7571 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7572 ),
7573 (
7574 3..4,
7575 "four\n",
7576 "FOUR\n",
7577 DiffHunkStatus::modified(HasSecondaryHunk),
7578 ),
7579 ],
7580 );
7581 });
7582
7583 // The diff emits a change event for the range of the staged hunk.
7584 assert!(matches!(
7585 diff_events.next().await.unwrap(),
7586 BufferDiffEvent::HunksStagedOrUnstaged(_)
7587 ));
7588 let event = diff_events.next().await.unwrap();
7589 if let BufferDiffEvent::DiffChanged {
7590 changed_range: Some(changed_range),
7591 base_text_changed_range: _,
7592 } = event
7593 {
7594 let changed_range = changed_range.to_point(&snapshot);
7595 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7596 } else {
7597 panic!("Unexpected event {event:?}");
7598 }
7599
7600 // When the write to the index completes, it appears as staged.
7601 cx.run_until_parked();
7602 uncommitted_diff.update(cx, |diff, cx| {
7603 assert_hunks(
7604 diff.snapshot(cx).hunks(&snapshot),
7605 &snapshot,
7606 &diff.base_text_string(cx).unwrap(),
7607 &[
7608 (
7609 0..0,
7610 "zero\n",
7611 "",
7612 DiffHunkStatus::deleted(HasSecondaryHunk),
7613 ),
7614 (
7615 1..2,
7616 "two\n",
7617 "TWO\n",
7618 DiffHunkStatus::modified(NoSecondaryHunk),
7619 ),
7620 (
7621 3..4,
7622 "four\n",
7623 "FOUR\n",
7624 DiffHunkStatus::modified(HasSecondaryHunk),
7625 ),
7626 ],
7627 );
7628 });
7629
7630 // The diff emits a change event for the changed index text.
7631 let event = diff_events.next().await.unwrap();
7632 if let BufferDiffEvent::DiffChanged {
7633 changed_range: Some(changed_range),
7634 base_text_changed_range: _,
7635 } = event
7636 {
7637 let changed_range = changed_range.to_point(&snapshot);
7638 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7639 } else {
7640 panic!("Unexpected event {event:?}");
7641 }
7642
7643 // Simulate a problem writing to the git index.
7644 fs.set_error_message_for_index_write(
7645 "/dir/.git".as_ref(),
7646 Some("failed to write git index".into()),
7647 );
7648
7649 // Stage another hunk.
7650 uncommitted_diff.update(cx, |diff, cx| {
7651 let range =
7652 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7653 let hunks = diff
7654 .snapshot(cx)
7655 .hunks_intersecting_range(range, &snapshot)
7656 .collect::<Vec<_>>();
7657 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7658
7659 assert_hunks(
7660 diff.snapshot(cx).hunks(&snapshot),
7661 &snapshot,
7662 &diff.base_text_string(cx).unwrap(),
7663 &[
7664 (
7665 0..0,
7666 "zero\n",
7667 "",
7668 DiffHunkStatus::deleted(HasSecondaryHunk),
7669 ),
7670 (
7671 1..2,
7672 "two\n",
7673 "TWO\n",
7674 DiffHunkStatus::modified(NoSecondaryHunk),
7675 ),
7676 (
7677 3..4,
7678 "four\n",
7679 "FOUR\n",
7680 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7681 ),
7682 ],
7683 );
7684 });
7685 assert!(matches!(
7686 diff_events.next().await.unwrap(),
7687 BufferDiffEvent::HunksStagedOrUnstaged(_)
7688 ));
7689 let event = diff_events.next().await.unwrap();
7690 if let BufferDiffEvent::DiffChanged {
7691 changed_range: Some(changed_range),
7692 base_text_changed_range: _,
7693 } = event
7694 {
7695 let changed_range = changed_range.to_point(&snapshot);
7696 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7697 } else {
7698 panic!("Unexpected event {event:?}");
7699 }
7700
7701 // When the write fails, the hunk returns to being unstaged.
7702 cx.run_until_parked();
7703 uncommitted_diff.update(cx, |diff, cx| {
7704 assert_hunks(
7705 diff.snapshot(cx).hunks(&snapshot),
7706 &snapshot,
7707 &diff.base_text_string(cx).unwrap(),
7708 &[
7709 (
7710 0..0,
7711 "zero\n",
7712 "",
7713 DiffHunkStatus::deleted(HasSecondaryHunk),
7714 ),
7715 (
7716 1..2,
7717 "two\n",
7718 "TWO\n",
7719 DiffHunkStatus::modified(NoSecondaryHunk),
7720 ),
7721 (
7722 3..4,
7723 "four\n",
7724 "FOUR\n",
7725 DiffHunkStatus::modified(HasSecondaryHunk),
7726 ),
7727 ],
7728 );
7729 });
7730
7731 let event = diff_events.next().await.unwrap();
7732 if let BufferDiffEvent::DiffChanged {
7733 changed_range: Some(changed_range),
7734 base_text_changed_range: _,
7735 } = event
7736 {
7737 let changed_range = changed_range.to_point(&snapshot);
7738 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7739 } else {
7740 panic!("Unexpected event {event:?}");
7741 }
7742
7743 // Allow writing to the git index to succeed again.
7744 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7745
7746 // Stage two hunks with separate operations.
7747 uncommitted_diff.update(cx, |diff, cx| {
7748 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
7749 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7750 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7751 });
7752
7753 // Both staged hunks appear as pending.
7754 uncommitted_diff.update(cx, |diff, cx| {
7755 assert_hunks(
7756 diff.snapshot(cx).hunks(&snapshot),
7757 &snapshot,
7758 &diff.base_text_string(cx).unwrap(),
7759 &[
7760 (
7761 0..0,
7762 "zero\n",
7763 "",
7764 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7765 ),
7766 (
7767 1..2,
7768 "two\n",
7769 "TWO\n",
7770 DiffHunkStatus::modified(NoSecondaryHunk),
7771 ),
7772 (
7773 3..4,
7774 "four\n",
7775 "FOUR\n",
7776 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7777 ),
7778 ],
7779 );
7780 });
7781
7782 // Both staging operations take effect.
7783 cx.run_until_parked();
7784 uncommitted_diff.update(cx, |diff, cx| {
7785 assert_hunks(
7786 diff.snapshot(cx).hunks(&snapshot),
7787 &snapshot,
7788 &diff.base_text_string(cx).unwrap(),
7789 &[
7790 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7791 (
7792 1..2,
7793 "two\n",
7794 "TWO\n",
7795 DiffHunkStatus::modified(NoSecondaryHunk),
7796 ),
7797 (
7798 3..4,
7799 "four\n",
7800 "FOUR\n",
7801 DiffHunkStatus::modified(NoSecondaryHunk),
7802 ),
7803 ],
7804 );
7805 });
7806}
7807
7808#[gpui::test(seeds(340, 472))]
7809async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7810 use DiffHunkSecondaryStatus::*;
7811 init_test(cx);
7812
7813 let committed_contents = r#"
7814 zero
7815 one
7816 two
7817 three
7818 four
7819 five
7820 "#
7821 .unindent();
7822 let file_contents = r#"
7823 one
7824 TWO
7825 three
7826 FOUR
7827 five
7828 "#
7829 .unindent();
7830
7831 let fs = FakeFs::new(cx.background_executor.clone());
7832 fs.insert_tree(
7833 "/dir",
7834 json!({
7835 ".git": {},
7836 "file.txt": file_contents.clone()
7837 }),
7838 )
7839 .await;
7840
7841 fs.set_head_for_repo(
7842 "/dir/.git".as_ref(),
7843 &[("file.txt", committed_contents.clone())],
7844 "deadbeef",
7845 );
7846 fs.set_index_for_repo(
7847 "/dir/.git".as_ref(),
7848 &[("file.txt", committed_contents.clone())],
7849 );
7850
7851 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7852
7853 let buffer = project
7854 .update(cx, |project, cx| {
7855 project.open_local_buffer("/dir/file.txt", cx)
7856 })
7857 .await
7858 .unwrap();
7859 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7860 let uncommitted_diff = project
7861 .update(cx, |project, cx| {
7862 project.open_uncommitted_diff(buffer.clone(), cx)
7863 })
7864 .await
7865 .unwrap();
7866
7867 // The hunks are initially unstaged.
7868 uncommitted_diff.read_with(cx, |diff, cx| {
7869 assert_hunks(
7870 diff.snapshot(cx).hunks(&snapshot),
7871 &snapshot,
7872 &diff.base_text_string(cx).unwrap(),
7873 &[
7874 (
7875 0..0,
7876 "zero\n",
7877 "",
7878 DiffHunkStatus::deleted(HasSecondaryHunk),
7879 ),
7880 (
7881 1..2,
7882 "two\n",
7883 "TWO\n",
7884 DiffHunkStatus::modified(HasSecondaryHunk),
7885 ),
7886 (
7887 3..4,
7888 "four\n",
7889 "FOUR\n",
7890 DiffHunkStatus::modified(HasSecondaryHunk),
7891 ),
7892 ],
7893 );
7894 });
7895
7896 // Pause IO events
7897 fs.pause_events();
7898
7899 // Stage the first hunk.
7900 uncommitted_diff.update(cx, |diff, cx| {
7901 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
7902 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7903 assert_hunks(
7904 diff.snapshot(cx).hunks(&snapshot),
7905 &snapshot,
7906 &diff.base_text_string(cx).unwrap(),
7907 &[
7908 (
7909 0..0,
7910 "zero\n",
7911 "",
7912 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7913 ),
7914 (
7915 1..2,
7916 "two\n",
7917 "TWO\n",
7918 DiffHunkStatus::modified(HasSecondaryHunk),
7919 ),
7920 (
7921 3..4,
7922 "four\n",
7923 "FOUR\n",
7924 DiffHunkStatus::modified(HasSecondaryHunk),
7925 ),
7926 ],
7927 );
7928 });
7929
7930 // Stage the second hunk *before* receiving the FS event for the first hunk.
7931 cx.run_until_parked();
7932 uncommitted_diff.update(cx, |diff, cx| {
7933 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
7934 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7935 assert_hunks(
7936 diff.snapshot(cx).hunks(&snapshot),
7937 &snapshot,
7938 &diff.base_text_string(cx).unwrap(),
7939 &[
7940 (
7941 0..0,
7942 "zero\n",
7943 "",
7944 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7945 ),
7946 (
7947 1..2,
7948 "two\n",
7949 "TWO\n",
7950 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7951 ),
7952 (
7953 3..4,
7954 "four\n",
7955 "FOUR\n",
7956 DiffHunkStatus::modified(HasSecondaryHunk),
7957 ),
7958 ],
7959 );
7960 });
7961
7962 // Process the FS event for staging the first hunk (second event is still pending).
7963 fs.flush_events(1);
7964 cx.run_until_parked();
7965
7966 // Stage the third hunk before receiving the second FS event.
7967 uncommitted_diff.update(cx, |diff, cx| {
7968 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
7969 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7970 });
7971
7972 // Wait for all remaining IO.
7973 cx.run_until_parked();
7974 fs.flush_events(fs.buffered_event_count());
7975
7976 // Now all hunks are staged.
7977 cx.run_until_parked();
7978 uncommitted_diff.update(cx, |diff, cx| {
7979 assert_hunks(
7980 diff.snapshot(cx).hunks(&snapshot),
7981 &snapshot,
7982 &diff.base_text_string(cx).unwrap(),
7983 &[
7984 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7985 (
7986 1..2,
7987 "two\n",
7988 "TWO\n",
7989 DiffHunkStatus::modified(NoSecondaryHunk),
7990 ),
7991 (
7992 3..4,
7993 "four\n",
7994 "FOUR\n",
7995 DiffHunkStatus::modified(NoSecondaryHunk),
7996 ),
7997 ],
7998 );
7999 });
8000}
8001
8002#[gpui::test(iterations = 25)]
8003async fn test_staging_random_hunks(
8004 mut rng: StdRng,
8005 _executor: BackgroundExecutor,
8006 cx: &mut gpui::TestAppContext,
8007) {
8008 let operations = env::var("OPERATIONS")
8009 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8010 .unwrap_or(20);
8011
8012 use DiffHunkSecondaryStatus::*;
8013 init_test(cx);
8014
8015 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8016 let index_text = committed_text.clone();
8017 let buffer_text = (0..30)
8018 .map(|i| match i % 5 {
8019 0 => format!("line {i} (modified)\n"),
8020 _ => format!("line {i}\n"),
8021 })
8022 .collect::<String>();
8023
8024 let fs = FakeFs::new(cx.background_executor.clone());
8025 fs.insert_tree(
8026 path!("/dir"),
8027 json!({
8028 ".git": {},
8029 "file.txt": buffer_text.clone()
8030 }),
8031 )
8032 .await;
8033 fs.set_head_for_repo(
8034 path!("/dir/.git").as_ref(),
8035 &[("file.txt", committed_text.clone())],
8036 "deadbeef",
8037 );
8038 fs.set_index_for_repo(
8039 path!("/dir/.git").as_ref(),
8040 &[("file.txt", index_text.clone())],
8041 );
8042 let repo = fs
8043 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8044 .unwrap();
8045
8046 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8047 let buffer = project
8048 .update(cx, |project, cx| {
8049 project.open_local_buffer(path!("/dir/file.txt"), cx)
8050 })
8051 .await
8052 .unwrap();
8053 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8054 let uncommitted_diff = project
8055 .update(cx, |project, cx| {
8056 project.open_uncommitted_diff(buffer.clone(), cx)
8057 })
8058 .await
8059 .unwrap();
8060
8061 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8062 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8063 });
8064 assert_eq!(hunks.len(), 6);
8065
8066 for _i in 0..operations {
8067 let hunk_ix = rng.random_range(0..hunks.len());
8068 let hunk = &mut hunks[hunk_ix];
8069 let row = hunk.range.start.row;
8070
8071 if hunk.status().has_secondary_hunk() {
8072 log::info!("staging hunk at {row}");
8073 uncommitted_diff.update(cx, |diff, cx| {
8074 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8075 });
8076 hunk.secondary_status = SecondaryHunkRemovalPending;
8077 } else {
8078 log::info!("unstaging hunk at {row}");
8079 uncommitted_diff.update(cx, |diff, cx| {
8080 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8081 });
8082 hunk.secondary_status = SecondaryHunkAdditionPending;
8083 }
8084
8085 for _ in 0..rng.random_range(0..10) {
8086 log::info!("yielding");
8087 cx.executor().simulate_random_delay().await;
8088 }
8089 }
8090
8091 cx.executor().run_until_parked();
8092
8093 for hunk in &mut hunks {
8094 if hunk.secondary_status == SecondaryHunkRemovalPending {
8095 hunk.secondary_status = NoSecondaryHunk;
8096 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8097 hunk.secondary_status = HasSecondaryHunk;
8098 }
8099 }
8100
8101 log::info!(
8102 "index text:\n{}",
8103 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8104 .await
8105 .unwrap()
8106 );
8107
8108 uncommitted_diff.update(cx, |diff, cx| {
8109 let expected_hunks = hunks
8110 .iter()
8111 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8112 .collect::<Vec<_>>();
8113 let actual_hunks = diff
8114 .snapshot(cx)
8115 .hunks(&snapshot)
8116 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8117 .collect::<Vec<_>>();
8118 assert_eq!(actual_hunks, expected_hunks);
8119 });
8120}
8121
8122#[gpui::test]
8123async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8124 init_test(cx);
8125
8126 let committed_contents = r#"
8127 fn main() {
8128 println!("hello from HEAD");
8129 }
8130 "#
8131 .unindent();
8132 let file_contents = r#"
8133 fn main() {
8134 println!("hello from the working copy");
8135 }
8136 "#
8137 .unindent();
8138
8139 let fs = FakeFs::new(cx.background_executor.clone());
8140 fs.insert_tree(
8141 "/dir",
8142 json!({
8143 ".git": {},
8144 "src": {
8145 "main.rs": file_contents,
8146 }
8147 }),
8148 )
8149 .await;
8150
8151 fs.set_head_for_repo(
8152 Path::new("/dir/.git"),
8153 &[("src/main.rs", committed_contents.clone())],
8154 "deadbeef",
8155 );
8156 fs.set_index_for_repo(
8157 Path::new("/dir/.git"),
8158 &[("src/main.rs", committed_contents.clone())],
8159 );
8160
8161 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8162
8163 let buffer = project
8164 .update(cx, |project, cx| {
8165 project.open_local_buffer("/dir/src/main.rs", cx)
8166 })
8167 .await
8168 .unwrap();
8169 let uncommitted_diff = project
8170 .update(cx, |project, cx| {
8171 project.open_uncommitted_diff(buffer.clone(), cx)
8172 })
8173 .await
8174 .unwrap();
8175
8176 cx.run_until_parked();
8177 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8178 let snapshot = buffer.read(cx).snapshot();
8179 assert_hunks(
8180 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8181 &snapshot,
8182 &uncommitted_diff.base_text_string(cx).unwrap(),
8183 &[(
8184 1..2,
8185 " println!(\"hello from HEAD\");\n",
8186 " println!(\"hello from the working copy\");\n",
8187 DiffHunkStatus {
8188 kind: DiffHunkStatusKind::Modified,
8189 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8190 },
8191 )],
8192 );
8193 });
8194}
8195
8196// TODO: Should we test this on Windows also?
8197#[gpui::test]
8198#[cfg(not(windows))]
8199async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8200 use std::os::unix::fs::PermissionsExt;
8201 init_test(cx);
8202 cx.executor().allow_parking();
8203 let committed_contents = "bar\n";
8204 let file_contents = "baz\n";
8205 let root = TempTree::new(json!({
8206 "project": {
8207 "foo": committed_contents
8208 },
8209 }));
8210
8211 let work_dir = root.path().join("project");
8212 let file_path = work_dir.join("foo");
8213 let repo = git_init(work_dir.as_path());
8214 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8215 perms.set_mode(0o755);
8216 std::fs::set_permissions(&file_path, perms).unwrap();
8217 git_add("foo", &repo);
8218 git_commit("Initial commit", &repo);
8219 std::fs::write(&file_path, file_contents).unwrap();
8220
8221 let project = Project::test(
8222 Arc::new(RealFs::new(None, cx.executor())),
8223 [root.path()],
8224 cx,
8225 )
8226 .await;
8227
8228 let buffer = project
8229 .update(cx, |project, cx| {
8230 project.open_local_buffer(file_path.as_path(), cx)
8231 })
8232 .await
8233 .unwrap();
8234
8235 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8236
8237 let uncommitted_diff = project
8238 .update(cx, |project, cx| {
8239 project.open_uncommitted_diff(buffer.clone(), cx)
8240 })
8241 .await
8242 .unwrap();
8243
8244 uncommitted_diff.update(cx, |diff, cx| {
8245 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8246 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8247 });
8248
8249 cx.run_until_parked();
8250
8251 let output = smol::process::Command::new("git")
8252 .current_dir(&work_dir)
8253 .args(["diff", "--staged"])
8254 .output()
8255 .await
8256 .unwrap();
8257
8258 let staged_diff = String::from_utf8_lossy(&output.stdout);
8259
8260 assert!(
8261 !staged_diff.contains("new mode 100644"),
8262 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8263 staged_diff
8264 );
8265
8266 let output = smol::process::Command::new("git")
8267 .current_dir(&work_dir)
8268 .args(["ls-files", "-s"])
8269 .output()
8270 .await
8271 .unwrap();
8272 let index_contents = String::from_utf8_lossy(&output.stdout);
8273
8274 assert!(
8275 index_contents.contains("100755"),
8276 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8277 index_contents
8278 );
8279}
8280
8281#[gpui::test]
8282async fn test_repository_and_path_for_project_path(
8283 background_executor: BackgroundExecutor,
8284 cx: &mut gpui::TestAppContext,
8285) {
8286 init_test(cx);
8287 let fs = FakeFs::new(background_executor);
8288 fs.insert_tree(
8289 path!("/root"),
8290 json!({
8291 "c.txt": "",
8292 "dir1": {
8293 ".git": {},
8294 "deps": {
8295 "dep1": {
8296 ".git": {},
8297 "src": {
8298 "a.txt": ""
8299 }
8300 }
8301 },
8302 "src": {
8303 "b.txt": ""
8304 }
8305 },
8306 }),
8307 )
8308 .await;
8309
8310 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8311 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8312 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8313 project
8314 .update(cx, |project, cx| project.git_scans_complete(cx))
8315 .await;
8316 cx.run_until_parked();
8317
8318 project.read_with(cx, |project, cx| {
8319 let git_store = project.git_store().read(cx);
8320 let pairs = [
8321 ("c.txt", None),
8322 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8323 (
8324 "dir1/deps/dep1/src/a.txt",
8325 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8326 ),
8327 ];
8328 let expected = pairs
8329 .iter()
8330 .map(|(path, result)| {
8331 (
8332 path,
8333 result.map(|(repo, repo_path)| {
8334 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8335 }),
8336 )
8337 })
8338 .collect::<Vec<_>>();
8339 let actual = pairs
8340 .iter()
8341 .map(|(path, _)| {
8342 let project_path = (tree_id, rel_path(path)).into();
8343 let result = maybe!({
8344 let (repo, repo_path) =
8345 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8346 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8347 });
8348 (path, result)
8349 })
8350 .collect::<Vec<_>>();
8351 pretty_assertions::assert_eq!(expected, actual);
8352 });
8353
8354 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8355 .await
8356 .unwrap();
8357 cx.run_until_parked();
8358
8359 project.read_with(cx, |project, cx| {
8360 let git_store = project.git_store().read(cx);
8361 assert_eq!(
8362 git_store.repository_and_path_for_project_path(
8363 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8364 cx
8365 ),
8366 None
8367 );
8368 });
8369}
8370
8371#[gpui::test]
8372async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8373 init_test(cx);
8374 let fs = FakeFs::new(cx.background_executor.clone());
8375 let home = paths::home_dir();
8376 fs.insert_tree(
8377 home,
8378 json!({
8379 ".git": {},
8380 "project": {
8381 "a.txt": "A"
8382 },
8383 }),
8384 )
8385 .await;
8386
8387 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8388 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8389 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8390
8391 project
8392 .update(cx, |project, cx| project.git_scans_complete(cx))
8393 .await;
8394 tree.flush_fs_events(cx).await;
8395
8396 project.read_with(cx, |project, cx| {
8397 let containing = project
8398 .git_store()
8399 .read(cx)
8400 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8401 assert!(containing.is_none());
8402 });
8403
8404 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8405 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8406 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8407 project
8408 .update(cx, |project, cx| project.git_scans_complete(cx))
8409 .await;
8410 tree.flush_fs_events(cx).await;
8411
8412 project.read_with(cx, |project, cx| {
8413 let containing = project
8414 .git_store()
8415 .read(cx)
8416 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8417 assert_eq!(
8418 containing
8419 .unwrap()
8420 .0
8421 .read(cx)
8422 .work_directory_abs_path
8423 .as_ref(),
8424 home,
8425 );
8426 });
8427}
8428
8429#[gpui::test]
8430async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8431 init_test(cx);
8432 cx.executor().allow_parking();
8433
8434 let root = TempTree::new(json!({
8435 "project": {
8436 "a.txt": "a", // Modified
8437 "b.txt": "bb", // Added
8438 "c.txt": "ccc", // Unchanged
8439 "d.txt": "dddd", // Deleted
8440 },
8441 }));
8442
8443 // Set up git repository before creating the project.
8444 let work_dir = root.path().join("project");
8445 let repo = git_init(work_dir.as_path());
8446 git_add("a.txt", &repo);
8447 git_add("c.txt", &repo);
8448 git_add("d.txt", &repo);
8449 git_commit("Initial commit", &repo);
8450 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8451 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8452
8453 let project = Project::test(
8454 Arc::new(RealFs::new(None, cx.executor())),
8455 [root.path()],
8456 cx,
8457 )
8458 .await;
8459
8460 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8461 tree.flush_fs_events(cx).await;
8462 project
8463 .update(cx, |project, cx| project.git_scans_complete(cx))
8464 .await;
8465 cx.executor().run_until_parked();
8466
8467 let repository = project.read_with(cx, |project, cx| {
8468 project.repositories(cx).values().next().unwrap().clone()
8469 });
8470
8471 // Check that the right git state is observed on startup
8472 repository.read_with(cx, |repository, _| {
8473 let entries = repository.cached_status().collect::<Vec<_>>();
8474 assert_eq!(
8475 entries,
8476 [
8477 StatusEntry {
8478 repo_path: repo_path("a.txt"),
8479 status: StatusCode::Modified.worktree(),
8480 },
8481 StatusEntry {
8482 repo_path: repo_path("b.txt"),
8483 status: FileStatus::Untracked,
8484 },
8485 StatusEntry {
8486 repo_path: repo_path("d.txt"),
8487 status: StatusCode::Deleted.worktree(),
8488 },
8489 ]
8490 );
8491 });
8492
8493 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8494
8495 tree.flush_fs_events(cx).await;
8496 project
8497 .update(cx, |project, cx| project.git_scans_complete(cx))
8498 .await;
8499 cx.executor().run_until_parked();
8500
8501 repository.read_with(cx, |repository, _| {
8502 let entries = repository.cached_status().collect::<Vec<_>>();
8503 assert_eq!(
8504 entries,
8505 [
8506 StatusEntry {
8507 repo_path: repo_path("a.txt"),
8508 status: StatusCode::Modified.worktree(),
8509 },
8510 StatusEntry {
8511 repo_path: repo_path("b.txt"),
8512 status: FileStatus::Untracked,
8513 },
8514 StatusEntry {
8515 repo_path: repo_path("c.txt"),
8516 status: StatusCode::Modified.worktree(),
8517 },
8518 StatusEntry {
8519 repo_path: repo_path("d.txt"),
8520 status: StatusCode::Deleted.worktree(),
8521 },
8522 ]
8523 );
8524 });
8525
8526 git_add("a.txt", &repo);
8527 git_add("c.txt", &repo);
8528 git_remove_index(Path::new("d.txt"), &repo);
8529 git_commit("Another commit", &repo);
8530 tree.flush_fs_events(cx).await;
8531 project
8532 .update(cx, |project, cx| project.git_scans_complete(cx))
8533 .await;
8534 cx.executor().run_until_parked();
8535
8536 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8537 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8538 tree.flush_fs_events(cx).await;
8539 project
8540 .update(cx, |project, cx| project.git_scans_complete(cx))
8541 .await;
8542 cx.executor().run_until_parked();
8543
8544 repository.read_with(cx, |repository, _cx| {
8545 let entries = repository.cached_status().collect::<Vec<_>>();
8546
8547 // Deleting an untracked entry, b.txt, should leave no status
8548 // a.txt was tracked, and so should have a status
8549 assert_eq!(
8550 entries,
8551 [StatusEntry {
8552 repo_path: repo_path("a.txt"),
8553 status: StatusCode::Deleted.worktree(),
8554 }]
8555 );
8556 });
8557}
8558
8559#[gpui::test]
8560#[ignore]
8561async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8562 init_test(cx);
8563 cx.executor().allow_parking();
8564
8565 let root = TempTree::new(json!({
8566 "project": {
8567 "sub": {},
8568 "a.txt": "",
8569 },
8570 }));
8571
8572 let work_dir = root.path().join("project");
8573 let repo = git_init(work_dir.as_path());
8574 // a.txt exists in HEAD and the working copy but is deleted in the index.
8575 git_add("a.txt", &repo);
8576 git_commit("Initial commit", &repo);
8577 git_remove_index("a.txt".as_ref(), &repo);
8578 // `sub` is a nested git repository.
8579 let _sub = git_init(&work_dir.join("sub"));
8580
8581 let project = Project::test(
8582 Arc::new(RealFs::new(None, cx.executor())),
8583 [root.path()],
8584 cx,
8585 )
8586 .await;
8587
8588 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8589 tree.flush_fs_events(cx).await;
8590 project
8591 .update(cx, |project, cx| project.git_scans_complete(cx))
8592 .await;
8593 cx.executor().run_until_parked();
8594
8595 let repository = project.read_with(cx, |project, cx| {
8596 project
8597 .repositories(cx)
8598 .values()
8599 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8600 .unwrap()
8601 .clone()
8602 });
8603
8604 repository.read_with(cx, |repository, _cx| {
8605 let entries = repository.cached_status().collect::<Vec<_>>();
8606
8607 // `sub` doesn't appear in our computed statuses.
8608 // a.txt appears with a combined `DA` status.
8609 assert_eq!(
8610 entries,
8611 [StatusEntry {
8612 repo_path: repo_path("a.txt"),
8613 status: TrackedStatus {
8614 index_status: StatusCode::Deleted,
8615 worktree_status: StatusCode::Added
8616 }
8617 .into(),
8618 }]
8619 )
8620 });
8621}
8622
8623#[track_caller]
8624/// We merge lhs into rhs.
8625fn merge_pending_ops_snapshots(
8626 source: Vec<pending_op::PendingOps>,
8627 mut target: Vec<pending_op::PendingOps>,
8628) -> Vec<pending_op::PendingOps> {
8629 for s_ops in source {
8630 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8631 if ops.repo_path == s_ops.repo_path {
8632 Some(idx)
8633 } else {
8634 None
8635 }
8636 }) {
8637 let t_ops = &mut target[idx];
8638 for s_op in s_ops.ops {
8639 if let Some(op_idx) = t_ops
8640 .ops
8641 .iter()
8642 .zip(0..)
8643 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8644 {
8645 let t_op = &mut t_ops.ops[op_idx];
8646 match (s_op.job_status, t_op.job_status) {
8647 (pending_op::JobStatus::Running, _) => {}
8648 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8649 (s_st, t_st) if s_st == t_st => {}
8650 _ => unreachable!(),
8651 }
8652 } else {
8653 t_ops.ops.push(s_op);
8654 }
8655 }
8656 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8657 } else {
8658 target.push(s_ops);
8659 }
8660 }
8661 target
8662}
8663
8664#[gpui::test]
8665async fn test_repository_pending_ops_staging(
8666 executor: gpui::BackgroundExecutor,
8667 cx: &mut gpui::TestAppContext,
8668) {
8669 init_test(cx);
8670
8671 let fs = FakeFs::new(executor);
8672 fs.insert_tree(
8673 path!("/root"),
8674 json!({
8675 "my-repo": {
8676 ".git": {},
8677 "a.txt": "a",
8678 }
8679
8680 }),
8681 )
8682 .await;
8683
8684 fs.set_status_for_repo(
8685 path!("/root/my-repo/.git").as_ref(),
8686 &[("a.txt", FileStatus::Untracked)],
8687 );
8688
8689 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8690 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8691 project.update(cx, |project, cx| {
8692 let pending_ops_all = pending_ops_all.clone();
8693 cx.subscribe(project.git_store(), move |_, _, e, _| {
8694 if let GitStoreEvent::RepositoryUpdated(
8695 _,
8696 RepositoryEvent::PendingOpsChanged { pending_ops },
8697 _,
8698 ) = e
8699 {
8700 let merged = merge_pending_ops_snapshots(
8701 pending_ops.items(()),
8702 pending_ops_all.lock().items(()),
8703 );
8704 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8705 }
8706 })
8707 .detach();
8708 });
8709 project
8710 .update(cx, |project, cx| project.git_scans_complete(cx))
8711 .await;
8712
8713 let repo = project.read_with(cx, |project, cx| {
8714 project.repositories(cx).values().next().unwrap().clone()
8715 });
8716
8717 // Ensure we have no pending ops for any of the untracked files
8718 repo.read_with(cx, |repo, _cx| {
8719 assert!(repo.pending_ops().next().is_none());
8720 });
8721
8722 let mut id = 1u16;
8723
8724 let mut assert_stage = async |path: RepoPath, stage| {
8725 let git_status = if stage {
8726 pending_op::GitStatus::Staged
8727 } else {
8728 pending_op::GitStatus::Unstaged
8729 };
8730 repo.update(cx, |repo, cx| {
8731 let task = if stage {
8732 repo.stage_entries(vec![path.clone()], cx)
8733 } else {
8734 repo.unstage_entries(vec![path.clone()], cx)
8735 };
8736 let ops = repo.pending_ops_for_path(&path).unwrap();
8737 assert_eq!(
8738 ops.ops.last(),
8739 Some(&pending_op::PendingOp {
8740 id: id.into(),
8741 git_status,
8742 job_status: pending_op::JobStatus::Running
8743 })
8744 );
8745 task
8746 })
8747 .await
8748 .unwrap();
8749
8750 repo.read_with(cx, |repo, _cx| {
8751 let ops = repo.pending_ops_for_path(&path).unwrap();
8752 assert_eq!(
8753 ops.ops.last(),
8754 Some(&pending_op::PendingOp {
8755 id: id.into(),
8756 git_status,
8757 job_status: pending_op::JobStatus::Finished
8758 })
8759 );
8760 });
8761
8762 id += 1;
8763 };
8764
8765 assert_stage(repo_path("a.txt"), true).await;
8766 assert_stage(repo_path("a.txt"), false).await;
8767 assert_stage(repo_path("a.txt"), true).await;
8768 assert_stage(repo_path("a.txt"), false).await;
8769 assert_stage(repo_path("a.txt"), true).await;
8770
8771 cx.run_until_parked();
8772
8773 assert_eq!(
8774 pending_ops_all
8775 .lock()
8776 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8777 .unwrap()
8778 .ops,
8779 vec![
8780 pending_op::PendingOp {
8781 id: 1u16.into(),
8782 git_status: pending_op::GitStatus::Staged,
8783 job_status: pending_op::JobStatus::Finished
8784 },
8785 pending_op::PendingOp {
8786 id: 2u16.into(),
8787 git_status: pending_op::GitStatus::Unstaged,
8788 job_status: pending_op::JobStatus::Finished
8789 },
8790 pending_op::PendingOp {
8791 id: 3u16.into(),
8792 git_status: pending_op::GitStatus::Staged,
8793 job_status: pending_op::JobStatus::Finished
8794 },
8795 pending_op::PendingOp {
8796 id: 4u16.into(),
8797 git_status: pending_op::GitStatus::Unstaged,
8798 job_status: pending_op::JobStatus::Finished
8799 },
8800 pending_op::PendingOp {
8801 id: 5u16.into(),
8802 git_status: pending_op::GitStatus::Staged,
8803 job_status: pending_op::JobStatus::Finished
8804 }
8805 ],
8806 );
8807
8808 repo.update(cx, |repo, _cx| {
8809 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8810
8811 assert_eq!(
8812 git_statuses,
8813 [StatusEntry {
8814 repo_path: repo_path("a.txt"),
8815 status: TrackedStatus {
8816 index_status: StatusCode::Added,
8817 worktree_status: StatusCode::Unmodified
8818 }
8819 .into(),
8820 }]
8821 );
8822 });
8823}
8824
8825#[gpui::test]
8826async fn test_repository_pending_ops_long_running_staging(
8827 executor: gpui::BackgroundExecutor,
8828 cx: &mut gpui::TestAppContext,
8829) {
8830 init_test(cx);
8831
8832 let fs = FakeFs::new(executor);
8833 fs.insert_tree(
8834 path!("/root"),
8835 json!({
8836 "my-repo": {
8837 ".git": {},
8838 "a.txt": "a",
8839 }
8840
8841 }),
8842 )
8843 .await;
8844
8845 fs.set_status_for_repo(
8846 path!("/root/my-repo/.git").as_ref(),
8847 &[("a.txt", FileStatus::Untracked)],
8848 );
8849
8850 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8851 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8852 project.update(cx, |project, cx| {
8853 let pending_ops_all = pending_ops_all.clone();
8854 cx.subscribe(project.git_store(), move |_, _, e, _| {
8855 if let GitStoreEvent::RepositoryUpdated(
8856 _,
8857 RepositoryEvent::PendingOpsChanged { pending_ops },
8858 _,
8859 ) = e
8860 {
8861 let merged = merge_pending_ops_snapshots(
8862 pending_ops.items(()),
8863 pending_ops_all.lock().items(()),
8864 );
8865 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8866 }
8867 })
8868 .detach();
8869 });
8870
8871 project
8872 .update(cx, |project, cx| project.git_scans_complete(cx))
8873 .await;
8874
8875 let repo = project.read_with(cx, |project, cx| {
8876 project.repositories(cx).values().next().unwrap().clone()
8877 });
8878
8879 repo.update(cx, |repo, cx| {
8880 repo.stage_entries(vec![repo_path("a.txt")], cx)
8881 })
8882 .detach();
8883
8884 repo.update(cx, |repo, cx| {
8885 repo.stage_entries(vec![repo_path("a.txt")], cx)
8886 })
8887 .unwrap()
8888 .with_timeout(Duration::from_secs(1), &cx.executor())
8889 .await
8890 .unwrap();
8891
8892 cx.run_until_parked();
8893
8894 assert_eq!(
8895 pending_ops_all
8896 .lock()
8897 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8898 .unwrap()
8899 .ops,
8900 vec![
8901 pending_op::PendingOp {
8902 id: 1u16.into(),
8903 git_status: pending_op::GitStatus::Staged,
8904 job_status: pending_op::JobStatus::Skipped
8905 },
8906 pending_op::PendingOp {
8907 id: 2u16.into(),
8908 git_status: pending_op::GitStatus::Staged,
8909 job_status: pending_op::JobStatus::Finished
8910 }
8911 ],
8912 );
8913
8914 repo.update(cx, |repo, _cx| {
8915 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8916
8917 assert_eq!(
8918 git_statuses,
8919 [StatusEntry {
8920 repo_path: repo_path("a.txt"),
8921 status: TrackedStatus {
8922 index_status: StatusCode::Added,
8923 worktree_status: StatusCode::Unmodified
8924 }
8925 .into(),
8926 }]
8927 );
8928 });
8929}
8930
8931#[gpui::test]
8932async fn test_repository_pending_ops_stage_all(
8933 executor: gpui::BackgroundExecutor,
8934 cx: &mut gpui::TestAppContext,
8935) {
8936 init_test(cx);
8937
8938 let fs = FakeFs::new(executor);
8939 fs.insert_tree(
8940 path!("/root"),
8941 json!({
8942 "my-repo": {
8943 ".git": {},
8944 "a.txt": "a",
8945 "b.txt": "b"
8946 }
8947
8948 }),
8949 )
8950 .await;
8951
8952 fs.set_status_for_repo(
8953 path!("/root/my-repo/.git").as_ref(),
8954 &[
8955 ("a.txt", FileStatus::Untracked),
8956 ("b.txt", FileStatus::Untracked),
8957 ],
8958 );
8959
8960 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8961 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8962 project.update(cx, |project, cx| {
8963 let pending_ops_all = pending_ops_all.clone();
8964 cx.subscribe(project.git_store(), move |_, _, e, _| {
8965 if let GitStoreEvent::RepositoryUpdated(
8966 _,
8967 RepositoryEvent::PendingOpsChanged { pending_ops },
8968 _,
8969 ) = e
8970 {
8971 let merged = merge_pending_ops_snapshots(
8972 pending_ops.items(()),
8973 pending_ops_all.lock().items(()),
8974 );
8975 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8976 }
8977 })
8978 .detach();
8979 });
8980 project
8981 .update(cx, |project, cx| project.git_scans_complete(cx))
8982 .await;
8983
8984 let repo = project.read_with(cx, |project, cx| {
8985 project.repositories(cx).values().next().unwrap().clone()
8986 });
8987
8988 repo.update(cx, |repo, cx| {
8989 repo.stage_entries(vec![repo_path("a.txt")], cx)
8990 })
8991 .await
8992 .unwrap();
8993 repo.update(cx, |repo, cx| repo.stage_all(cx))
8994 .await
8995 .unwrap();
8996 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8997 .await
8998 .unwrap();
8999
9000 cx.run_until_parked();
9001
9002 assert_eq!(
9003 pending_ops_all
9004 .lock()
9005 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9006 .unwrap()
9007 .ops,
9008 vec![
9009 pending_op::PendingOp {
9010 id: 1u16.into(),
9011 git_status: pending_op::GitStatus::Staged,
9012 job_status: pending_op::JobStatus::Finished
9013 },
9014 pending_op::PendingOp {
9015 id: 2u16.into(),
9016 git_status: pending_op::GitStatus::Unstaged,
9017 job_status: pending_op::JobStatus::Finished
9018 },
9019 ],
9020 );
9021 assert_eq!(
9022 pending_ops_all
9023 .lock()
9024 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9025 .unwrap()
9026 .ops,
9027 vec![
9028 pending_op::PendingOp {
9029 id: 1u16.into(),
9030 git_status: pending_op::GitStatus::Staged,
9031 job_status: pending_op::JobStatus::Finished
9032 },
9033 pending_op::PendingOp {
9034 id: 2u16.into(),
9035 git_status: pending_op::GitStatus::Unstaged,
9036 job_status: pending_op::JobStatus::Finished
9037 },
9038 ],
9039 );
9040
9041 repo.update(cx, |repo, _cx| {
9042 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9043
9044 assert_eq!(
9045 git_statuses,
9046 [
9047 StatusEntry {
9048 repo_path: repo_path("a.txt"),
9049 status: FileStatus::Untracked,
9050 },
9051 StatusEntry {
9052 repo_path: repo_path("b.txt"),
9053 status: FileStatus::Untracked,
9054 },
9055 ]
9056 );
9057 });
9058}
9059
9060#[gpui::test]
9061async fn test_repository_subfolder_git_status(
9062 executor: gpui::BackgroundExecutor,
9063 cx: &mut gpui::TestAppContext,
9064) {
9065 init_test(cx);
9066
9067 let fs = FakeFs::new(executor);
9068 fs.insert_tree(
9069 path!("/root"),
9070 json!({
9071 "my-repo": {
9072 ".git": {},
9073 "a.txt": "a",
9074 "sub-folder-1": {
9075 "sub-folder-2": {
9076 "c.txt": "cc",
9077 "d": {
9078 "e.txt": "eee"
9079 }
9080 },
9081 }
9082 },
9083 }),
9084 )
9085 .await;
9086
9087 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9088 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9089
9090 fs.set_status_for_repo(
9091 path!("/root/my-repo/.git").as_ref(),
9092 &[(E_TXT, FileStatus::Untracked)],
9093 );
9094
9095 let project = Project::test(
9096 fs.clone(),
9097 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9098 cx,
9099 )
9100 .await;
9101
9102 project
9103 .update(cx, |project, cx| project.git_scans_complete(cx))
9104 .await;
9105 cx.run_until_parked();
9106
9107 let repository = project.read_with(cx, |project, cx| {
9108 project.repositories(cx).values().next().unwrap().clone()
9109 });
9110
9111 // Ensure that the git status is loaded correctly
9112 repository.read_with(cx, |repository, _cx| {
9113 assert_eq!(
9114 repository.work_directory_abs_path,
9115 Path::new(path!("/root/my-repo")).into()
9116 );
9117
9118 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9119 assert_eq!(
9120 repository
9121 .status_for_path(&repo_path(E_TXT))
9122 .unwrap()
9123 .status,
9124 FileStatus::Untracked
9125 );
9126 });
9127
9128 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9129 project
9130 .update(cx, |project, cx| project.git_scans_complete(cx))
9131 .await;
9132 cx.run_until_parked();
9133
9134 repository.read_with(cx, |repository, _cx| {
9135 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9136 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9137 });
9138}
9139
9140// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9141#[cfg(any())]
9142#[gpui::test]
9143async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9144 init_test(cx);
9145 cx.executor().allow_parking();
9146
9147 let root = TempTree::new(json!({
9148 "project": {
9149 "a.txt": "a",
9150 },
9151 }));
9152 let root_path = root.path();
9153
9154 let repo = git_init(&root_path.join("project"));
9155 git_add("a.txt", &repo);
9156 git_commit("init", &repo);
9157
9158 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9159
9160 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9161 tree.flush_fs_events(cx).await;
9162 project
9163 .update(cx, |project, cx| project.git_scans_complete(cx))
9164 .await;
9165 cx.executor().run_until_parked();
9166
9167 let repository = project.read_with(cx, |project, cx| {
9168 project.repositories(cx).values().next().unwrap().clone()
9169 });
9170
9171 git_branch("other-branch", &repo);
9172 git_checkout("refs/heads/other-branch", &repo);
9173 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9174 git_add("a.txt", &repo);
9175 git_commit("capitalize", &repo);
9176 let commit = repo
9177 .head()
9178 .expect("Failed to get HEAD")
9179 .peel_to_commit()
9180 .expect("HEAD is not a commit");
9181 git_checkout("refs/heads/main", &repo);
9182 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9183 git_add("a.txt", &repo);
9184 git_commit("improve letter", &repo);
9185 git_cherry_pick(&commit, &repo);
9186 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9187 .expect("No CHERRY_PICK_HEAD");
9188 pretty_assertions::assert_eq!(
9189 git_status(&repo),
9190 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9191 );
9192 tree.flush_fs_events(cx).await;
9193 project
9194 .update(cx, |project, cx| project.git_scans_complete(cx))
9195 .await;
9196 cx.executor().run_until_parked();
9197 let conflicts = repository.update(cx, |repository, _| {
9198 repository
9199 .merge_conflicts
9200 .iter()
9201 .cloned()
9202 .collect::<Vec<_>>()
9203 });
9204 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9205
9206 git_add("a.txt", &repo);
9207 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9208 git_commit("whatevs", &repo);
9209 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9210 .expect("Failed to remove CHERRY_PICK_HEAD");
9211 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9212 tree.flush_fs_events(cx).await;
9213 let conflicts = repository.update(cx, |repository, _| {
9214 repository
9215 .merge_conflicts
9216 .iter()
9217 .cloned()
9218 .collect::<Vec<_>>()
9219 });
9220 pretty_assertions::assert_eq!(conflicts, []);
9221}
9222
9223#[gpui::test]
9224async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9225 init_test(cx);
9226 let fs = FakeFs::new(cx.background_executor.clone());
9227 fs.insert_tree(
9228 path!("/root"),
9229 json!({
9230 ".git": {},
9231 ".gitignore": "*.txt\n",
9232 "a.xml": "<a></a>",
9233 "b.txt": "Some text"
9234 }),
9235 )
9236 .await;
9237
9238 fs.set_head_and_index_for_repo(
9239 path!("/root/.git").as_ref(),
9240 &[
9241 (".gitignore", "*.txt\n".into()),
9242 ("a.xml", "<a></a>".into()),
9243 ],
9244 );
9245
9246 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9247
9248 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9249 tree.flush_fs_events(cx).await;
9250 project
9251 .update(cx, |project, cx| project.git_scans_complete(cx))
9252 .await;
9253 cx.executor().run_until_parked();
9254
9255 let repository = project.read_with(cx, |project, cx| {
9256 project.repositories(cx).values().next().unwrap().clone()
9257 });
9258
9259 // One file is unmodified, the other is ignored.
9260 cx.read(|cx| {
9261 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9262 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9263 });
9264
9265 // Change the gitignore, and stage the newly non-ignored file.
9266 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9267 .await
9268 .unwrap();
9269 fs.set_index_for_repo(
9270 Path::new(path!("/root/.git")),
9271 &[
9272 (".gitignore", "*.txt\n".into()),
9273 ("a.xml", "<a></a>".into()),
9274 ("b.txt", "Some text".into()),
9275 ],
9276 );
9277
9278 cx.executor().run_until_parked();
9279 cx.read(|cx| {
9280 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9281 assert_entry_git_state(
9282 tree.read(cx),
9283 repository.read(cx),
9284 "b.txt",
9285 Some(StatusCode::Added),
9286 false,
9287 );
9288 });
9289}
9290
9291// NOTE:
9292// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9293// a directory which some program has already open.
9294// This is a limitation of the Windows.
9295// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9296// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9297#[gpui::test]
9298#[cfg_attr(target_os = "windows", ignore)]
9299async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9300 init_test(cx);
9301 cx.executor().allow_parking();
9302 let root = TempTree::new(json!({
9303 "projects": {
9304 "project1": {
9305 "a": "",
9306 "b": "",
9307 }
9308 },
9309
9310 }));
9311 let root_path = root.path();
9312
9313 let repo = git_init(&root_path.join("projects/project1"));
9314 git_add("a", &repo);
9315 git_commit("init", &repo);
9316 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9317
9318 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9319
9320 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9321 tree.flush_fs_events(cx).await;
9322 project
9323 .update(cx, |project, cx| project.git_scans_complete(cx))
9324 .await;
9325 cx.executor().run_until_parked();
9326
9327 let repository = project.read_with(cx, |project, cx| {
9328 project.repositories(cx).values().next().unwrap().clone()
9329 });
9330
9331 repository.read_with(cx, |repository, _| {
9332 assert_eq!(
9333 repository.work_directory_abs_path.as_ref(),
9334 root_path.join("projects/project1").as_path()
9335 );
9336 assert_eq!(
9337 repository
9338 .status_for_path(&repo_path("a"))
9339 .map(|entry| entry.status),
9340 Some(StatusCode::Modified.worktree()),
9341 );
9342 assert_eq!(
9343 repository
9344 .status_for_path(&repo_path("b"))
9345 .map(|entry| entry.status),
9346 Some(FileStatus::Untracked),
9347 );
9348 });
9349
9350 std::fs::rename(
9351 root_path.join("projects/project1"),
9352 root_path.join("projects/project2"),
9353 )
9354 .unwrap();
9355 tree.flush_fs_events(cx).await;
9356
9357 repository.read_with(cx, |repository, _| {
9358 assert_eq!(
9359 repository.work_directory_abs_path.as_ref(),
9360 root_path.join("projects/project2").as_path()
9361 );
9362 assert_eq!(
9363 repository.status_for_path(&repo_path("a")).unwrap().status,
9364 StatusCode::Modified.worktree(),
9365 );
9366 assert_eq!(
9367 repository.status_for_path(&repo_path("b")).unwrap().status,
9368 FileStatus::Untracked,
9369 );
9370 });
9371}
9372
9373// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9374// you can't rename a directory which some program has already open. This is a
9375// limitation of the Windows. See:
9376// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9377// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9378#[gpui::test]
9379#[cfg_attr(target_os = "windows", ignore)]
9380async fn test_file_status(cx: &mut gpui::TestAppContext) {
9381 init_test(cx);
9382 cx.executor().allow_parking();
9383 const IGNORE_RULE: &str = "**/target";
9384
9385 let root = TempTree::new(json!({
9386 "project": {
9387 "a.txt": "a",
9388 "b.txt": "bb",
9389 "c": {
9390 "d": {
9391 "e.txt": "eee"
9392 }
9393 },
9394 "f.txt": "ffff",
9395 "target": {
9396 "build_file": "???"
9397 },
9398 ".gitignore": IGNORE_RULE
9399 },
9400
9401 }));
9402 let root_path = root.path();
9403
9404 const A_TXT: &str = "a.txt";
9405 const B_TXT: &str = "b.txt";
9406 const E_TXT: &str = "c/d/e.txt";
9407 const F_TXT: &str = "f.txt";
9408 const DOTGITIGNORE: &str = ".gitignore";
9409 const BUILD_FILE: &str = "target/build_file";
9410
9411 // Set up git repository before creating the worktree.
9412 let work_dir = root.path().join("project");
9413 let mut repo = git_init(work_dir.as_path());
9414 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9415 git_add(A_TXT, &repo);
9416 git_add(E_TXT, &repo);
9417 git_add(DOTGITIGNORE, &repo);
9418 git_commit("Initial commit", &repo);
9419
9420 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9421
9422 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9423 tree.flush_fs_events(cx).await;
9424 project
9425 .update(cx, |project, cx| project.git_scans_complete(cx))
9426 .await;
9427 cx.executor().run_until_parked();
9428
9429 let repository = project.read_with(cx, |project, cx| {
9430 project.repositories(cx).values().next().unwrap().clone()
9431 });
9432
9433 // Check that the right git state is observed on startup
9434 repository.read_with(cx, |repository, _cx| {
9435 assert_eq!(
9436 repository.work_directory_abs_path.as_ref(),
9437 root_path.join("project").as_path()
9438 );
9439
9440 assert_eq!(
9441 repository
9442 .status_for_path(&repo_path(B_TXT))
9443 .unwrap()
9444 .status,
9445 FileStatus::Untracked,
9446 );
9447 assert_eq!(
9448 repository
9449 .status_for_path(&repo_path(F_TXT))
9450 .unwrap()
9451 .status,
9452 FileStatus::Untracked,
9453 );
9454 });
9455
9456 // Modify a file in the working copy.
9457 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9458 tree.flush_fs_events(cx).await;
9459 project
9460 .update(cx, |project, cx| project.git_scans_complete(cx))
9461 .await;
9462 cx.executor().run_until_parked();
9463
9464 // The worktree detects that the file's git status has changed.
9465 repository.read_with(cx, |repository, _| {
9466 assert_eq!(
9467 repository
9468 .status_for_path(&repo_path(A_TXT))
9469 .unwrap()
9470 .status,
9471 StatusCode::Modified.worktree(),
9472 );
9473 });
9474
9475 // Create a commit in the git repository.
9476 git_add(A_TXT, &repo);
9477 git_add(B_TXT, &repo);
9478 git_commit("Committing modified and added", &repo);
9479 tree.flush_fs_events(cx).await;
9480 project
9481 .update(cx, |project, cx| project.git_scans_complete(cx))
9482 .await;
9483 cx.executor().run_until_parked();
9484
9485 // The worktree detects that the files' git status have changed.
9486 repository.read_with(cx, |repository, _cx| {
9487 assert_eq!(
9488 repository
9489 .status_for_path(&repo_path(F_TXT))
9490 .unwrap()
9491 .status,
9492 FileStatus::Untracked,
9493 );
9494 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9495 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9496 });
9497
9498 // Modify files in the working copy and perform git operations on other files.
9499 git_reset(0, &repo);
9500 git_remove_index(Path::new(B_TXT), &repo);
9501 git_stash(&mut repo);
9502 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9503 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9504 tree.flush_fs_events(cx).await;
9505 project
9506 .update(cx, |project, cx| project.git_scans_complete(cx))
9507 .await;
9508 cx.executor().run_until_parked();
9509
9510 // Check that more complex repo changes are tracked
9511 repository.read_with(cx, |repository, _cx| {
9512 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9513 assert_eq!(
9514 repository
9515 .status_for_path(&repo_path(B_TXT))
9516 .unwrap()
9517 .status,
9518 FileStatus::Untracked,
9519 );
9520 assert_eq!(
9521 repository
9522 .status_for_path(&repo_path(E_TXT))
9523 .unwrap()
9524 .status,
9525 StatusCode::Modified.worktree(),
9526 );
9527 });
9528
9529 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9530 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9531 std::fs::write(
9532 work_dir.join(DOTGITIGNORE),
9533 [IGNORE_RULE, "f.txt"].join("\n"),
9534 )
9535 .unwrap();
9536
9537 git_add(Path::new(DOTGITIGNORE), &repo);
9538 git_commit("Committing modified git ignore", &repo);
9539
9540 tree.flush_fs_events(cx).await;
9541 cx.executor().run_until_parked();
9542
9543 let mut renamed_dir_name = "first_directory/second_directory";
9544 const RENAMED_FILE: &str = "rf.txt";
9545
9546 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9547 std::fs::write(
9548 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9549 "new-contents",
9550 )
9551 .unwrap();
9552
9553 tree.flush_fs_events(cx).await;
9554 project
9555 .update(cx, |project, cx| project.git_scans_complete(cx))
9556 .await;
9557 cx.executor().run_until_parked();
9558
9559 repository.read_with(cx, |repository, _cx| {
9560 assert_eq!(
9561 repository
9562 .status_for_path(&RepoPath::from_rel_path(
9563 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9564 ))
9565 .unwrap()
9566 .status,
9567 FileStatus::Untracked,
9568 );
9569 });
9570
9571 renamed_dir_name = "new_first_directory/second_directory";
9572
9573 std::fs::rename(
9574 work_dir.join("first_directory"),
9575 work_dir.join("new_first_directory"),
9576 )
9577 .unwrap();
9578
9579 tree.flush_fs_events(cx).await;
9580 project
9581 .update(cx, |project, cx| project.git_scans_complete(cx))
9582 .await;
9583 cx.executor().run_until_parked();
9584
9585 repository.read_with(cx, |repository, _cx| {
9586 assert_eq!(
9587 repository
9588 .status_for_path(&RepoPath::from_rel_path(
9589 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9590 ))
9591 .unwrap()
9592 .status,
9593 FileStatus::Untracked,
9594 );
9595 });
9596}
9597
9598#[gpui::test]
9599#[ignore]
9600async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9601 init_test(cx);
9602 cx.executor().allow_parking();
9603
9604 const IGNORE_RULE: &str = "**/target";
9605
9606 let root = TempTree::new(json!({
9607 "project": {
9608 "src": {
9609 "main.rs": "fn main() {}"
9610 },
9611 "target": {
9612 "debug": {
9613 "important_text.txt": "important text",
9614 },
9615 },
9616 ".gitignore": IGNORE_RULE
9617 },
9618
9619 }));
9620 let root_path = root.path();
9621
9622 // Set up git repository before creating the worktree.
9623 let work_dir = root.path().join("project");
9624 let repo = git_init(work_dir.as_path());
9625 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9626 git_add("src/main.rs", &repo);
9627 git_add(".gitignore", &repo);
9628 git_commit("Initial commit", &repo);
9629
9630 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9631 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9632 let project_events = Arc::new(Mutex::new(Vec::new()));
9633 project.update(cx, |project, cx| {
9634 let repo_events = repository_updates.clone();
9635 cx.subscribe(project.git_store(), move |_, _, e, _| {
9636 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9637 repo_events.lock().push(e.clone());
9638 }
9639 })
9640 .detach();
9641 let project_events = project_events.clone();
9642 cx.subscribe_self(move |_, e, _| {
9643 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9644 project_events.lock().extend(
9645 updates
9646 .iter()
9647 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9648 .filter(|(path, _)| path != "fs-event-sentinel"),
9649 );
9650 }
9651 })
9652 .detach();
9653 });
9654
9655 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9656 tree.flush_fs_events(cx).await;
9657 tree.update(cx, |tree, cx| {
9658 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9659 })
9660 .await
9661 .unwrap();
9662 tree.update(cx, |tree, _| {
9663 assert_eq!(
9664 tree.entries(true, 0)
9665 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9666 .collect::<Vec<_>>(),
9667 vec![
9668 (rel_path(""), false),
9669 (rel_path("project/"), false),
9670 (rel_path("project/.gitignore"), false),
9671 (rel_path("project/src"), false),
9672 (rel_path("project/src/main.rs"), false),
9673 (rel_path("project/target"), true),
9674 (rel_path("project/target/debug"), true),
9675 (rel_path("project/target/debug/important_text.txt"), true),
9676 ]
9677 );
9678 });
9679
9680 assert_eq!(
9681 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9682 vec![
9683 RepositoryEvent::StatusesChanged,
9684 RepositoryEvent::MergeHeadsChanged,
9685 ],
9686 "Initial worktree scan should produce a repo update event"
9687 );
9688 assert_eq!(
9689 project_events.lock().drain(..).collect::<Vec<_>>(),
9690 vec![
9691 ("project/target".to_string(), PathChange::Loaded),
9692 ("project/target/debug".to_string(), PathChange::Loaded),
9693 (
9694 "project/target/debug/important_text.txt".to_string(),
9695 PathChange::Loaded
9696 ),
9697 ],
9698 "Initial project changes should show that all not-ignored and all opened files are loaded"
9699 );
9700
9701 let deps_dir = work_dir.join("target").join("debug").join("deps");
9702 std::fs::create_dir_all(&deps_dir).unwrap();
9703 tree.flush_fs_events(cx).await;
9704 project
9705 .update(cx, |project, cx| project.git_scans_complete(cx))
9706 .await;
9707 cx.executor().run_until_parked();
9708 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9709 tree.flush_fs_events(cx).await;
9710 project
9711 .update(cx, |project, cx| project.git_scans_complete(cx))
9712 .await;
9713 cx.executor().run_until_parked();
9714 std::fs::remove_dir_all(&deps_dir).unwrap();
9715 tree.flush_fs_events(cx).await;
9716 project
9717 .update(cx, |project, cx| project.git_scans_complete(cx))
9718 .await;
9719 cx.executor().run_until_parked();
9720
9721 tree.update(cx, |tree, _| {
9722 assert_eq!(
9723 tree.entries(true, 0)
9724 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9725 .collect::<Vec<_>>(),
9726 vec![
9727 (rel_path(""), false),
9728 (rel_path("project/"), false),
9729 (rel_path("project/.gitignore"), false),
9730 (rel_path("project/src"), false),
9731 (rel_path("project/src/main.rs"), false),
9732 (rel_path("project/target"), true),
9733 (rel_path("project/target/debug"), true),
9734 (rel_path("project/target/debug/important_text.txt"), true),
9735 ],
9736 "No stray temp files should be left after the flycheck changes"
9737 );
9738 });
9739
9740 assert_eq!(
9741 repository_updates
9742 .lock()
9743 .iter()
9744 .cloned()
9745 .collect::<Vec<_>>(),
9746 Vec::new(),
9747 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9748 );
9749 assert_eq!(
9750 project_events.lock().as_slice(),
9751 vec![
9752 ("project/target/debug/deps".to_string(), PathChange::Added),
9753 ("project/target/debug/deps".to_string(), PathChange::Removed),
9754 ],
9755 "Due to `debug` directory being tracked, it should get updates for entries inside it.
9756 No updates for more nested directories should happen as those are ignored",
9757 );
9758}
9759
9760// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
9761// to different timings/ordering of events.
9762#[ignore]
9763#[gpui::test]
9764async fn test_odd_events_for_ignored_dirs(
9765 executor: BackgroundExecutor,
9766 cx: &mut gpui::TestAppContext,
9767) {
9768 init_test(cx);
9769 let fs = FakeFs::new(executor);
9770 fs.insert_tree(
9771 path!("/root"),
9772 json!({
9773 ".git": {},
9774 ".gitignore": "**/target/",
9775 "src": {
9776 "main.rs": "fn main() {}",
9777 },
9778 "target": {
9779 "debug": {
9780 "foo.txt": "foo",
9781 "deps": {}
9782 }
9783 }
9784 }),
9785 )
9786 .await;
9787 fs.set_head_and_index_for_repo(
9788 path!("/root/.git").as_ref(),
9789 &[
9790 (".gitignore", "**/target/".into()),
9791 ("src/main.rs", "fn main() {}".into()),
9792 ],
9793 );
9794
9795 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9796 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9797 let project_events = Arc::new(Mutex::new(Vec::new()));
9798 project.update(cx, |project, cx| {
9799 let repository_updates = repository_updates.clone();
9800 cx.subscribe(project.git_store(), move |_, _, e, _| {
9801 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9802 repository_updates.lock().push(e.clone());
9803 }
9804 })
9805 .detach();
9806 let project_events = project_events.clone();
9807 cx.subscribe_self(move |_, e, _| {
9808 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9809 project_events.lock().extend(
9810 updates
9811 .iter()
9812 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9813 .filter(|(path, _)| path != "fs-event-sentinel"),
9814 );
9815 }
9816 })
9817 .detach();
9818 });
9819
9820 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9821 tree.update(cx, |tree, cx| {
9822 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9823 })
9824 .await
9825 .unwrap();
9826 tree.flush_fs_events(cx).await;
9827 project
9828 .update(cx, |project, cx| project.git_scans_complete(cx))
9829 .await;
9830 cx.run_until_parked();
9831 tree.update(cx, |tree, _| {
9832 assert_eq!(
9833 tree.entries(true, 0)
9834 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9835 .collect::<Vec<_>>(),
9836 vec![
9837 (rel_path(""), false),
9838 (rel_path(".gitignore"), false),
9839 (rel_path("src"), false),
9840 (rel_path("src/main.rs"), false),
9841 (rel_path("target"), true),
9842 (rel_path("target/debug"), true),
9843 (rel_path("target/debug/deps"), true),
9844 (rel_path("target/debug/foo.txt"), true),
9845 ]
9846 );
9847 });
9848
9849 assert_eq!(
9850 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9851 vec![
9852 RepositoryEvent::MergeHeadsChanged,
9853 RepositoryEvent::BranchChanged,
9854 RepositoryEvent::StatusesChanged,
9855 RepositoryEvent::StatusesChanged,
9856 ],
9857 "Initial worktree scan should produce a repo update event"
9858 );
9859 assert_eq!(
9860 project_events.lock().drain(..).collect::<Vec<_>>(),
9861 vec![
9862 ("target".to_string(), PathChange::Loaded),
9863 ("target/debug".to_string(), PathChange::Loaded),
9864 ("target/debug/deps".to_string(), PathChange::Loaded),
9865 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9866 ],
9867 "All non-ignored entries and all opened firs should be getting a project event",
9868 );
9869
9870 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9871 // This may happen multiple times during a single flycheck, but once is enough for testing.
9872 fs.emit_fs_event("/root/target/debug/deps", None);
9873 tree.flush_fs_events(cx).await;
9874 project
9875 .update(cx, |project, cx| project.git_scans_complete(cx))
9876 .await;
9877 cx.executor().run_until_parked();
9878
9879 assert_eq!(
9880 repository_updates
9881 .lock()
9882 .iter()
9883 .cloned()
9884 .collect::<Vec<_>>(),
9885 Vec::new(),
9886 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9887 );
9888 assert_eq!(
9889 project_events.lock().as_slice(),
9890 Vec::new(),
9891 "No further project events should happen, as only ignored dirs received FS events",
9892 );
9893}
9894
9895#[gpui::test]
9896async fn test_repos_in_invisible_worktrees(
9897 executor: BackgroundExecutor,
9898 cx: &mut gpui::TestAppContext,
9899) {
9900 init_test(cx);
9901 let fs = FakeFs::new(executor);
9902 fs.insert_tree(
9903 path!("/root"),
9904 json!({
9905 "dir1": {
9906 ".git": {},
9907 "dep1": {
9908 ".git": {},
9909 "src": {
9910 "a.txt": "",
9911 },
9912 },
9913 "b.txt": "",
9914 },
9915 }),
9916 )
9917 .await;
9918
9919 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9920 let _visible_worktree =
9921 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9922 project
9923 .update(cx, |project, cx| project.git_scans_complete(cx))
9924 .await;
9925
9926 let repos = project.read_with(cx, |project, cx| {
9927 project
9928 .repositories(cx)
9929 .values()
9930 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9931 .collect::<Vec<_>>()
9932 });
9933 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9934
9935 let (_invisible_worktree, _) = project
9936 .update(cx, |project, cx| {
9937 project.worktree_store.update(cx, |worktree_store, cx| {
9938 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9939 })
9940 })
9941 .await
9942 .expect("failed to create worktree");
9943 project
9944 .update(cx, |project, cx| project.git_scans_complete(cx))
9945 .await;
9946
9947 let repos = project.read_with(cx, |project, cx| {
9948 project
9949 .repositories(cx)
9950 .values()
9951 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9952 .collect::<Vec<_>>()
9953 });
9954 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9955}
9956
9957#[gpui::test(iterations = 10)]
9958async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9959 init_test(cx);
9960 cx.update(|cx| {
9961 cx.update_global::<SettingsStore, _>(|store, cx| {
9962 store.update_user_settings(cx, |settings| {
9963 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9964 });
9965 });
9966 });
9967 let fs = FakeFs::new(cx.background_executor.clone());
9968 fs.insert_tree(
9969 path!("/root"),
9970 json!({
9971 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9972 "tree": {
9973 ".git": {},
9974 ".gitignore": "ignored-dir\n",
9975 "tracked-dir": {
9976 "tracked-file1": "",
9977 "ancestor-ignored-file1": "",
9978 },
9979 "ignored-dir": {
9980 "ignored-file1": ""
9981 }
9982 }
9983 }),
9984 )
9985 .await;
9986 fs.set_head_and_index_for_repo(
9987 path!("/root/tree/.git").as_ref(),
9988 &[
9989 (".gitignore", "ignored-dir\n".into()),
9990 ("tracked-dir/tracked-file1", "".into()),
9991 ],
9992 );
9993
9994 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9995
9996 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9997 tree.flush_fs_events(cx).await;
9998 project
9999 .update(cx, |project, cx| project.git_scans_complete(cx))
10000 .await;
10001 cx.executor().run_until_parked();
10002
10003 let repository = project.read_with(cx, |project, cx| {
10004 project.repositories(cx).values().next().unwrap().clone()
10005 });
10006
10007 tree.read_with(cx, |tree, _| {
10008 tree.as_local()
10009 .unwrap()
10010 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10011 })
10012 .recv()
10013 .await;
10014
10015 cx.read(|cx| {
10016 assert_entry_git_state(
10017 tree.read(cx),
10018 repository.read(cx),
10019 "tracked-dir/tracked-file1",
10020 None,
10021 false,
10022 );
10023 assert_entry_git_state(
10024 tree.read(cx),
10025 repository.read(cx),
10026 "tracked-dir/ancestor-ignored-file1",
10027 None,
10028 false,
10029 );
10030 assert_entry_git_state(
10031 tree.read(cx),
10032 repository.read(cx),
10033 "ignored-dir/ignored-file1",
10034 None,
10035 true,
10036 );
10037 });
10038
10039 fs.create_file(
10040 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10041 Default::default(),
10042 )
10043 .await
10044 .unwrap();
10045 fs.set_index_for_repo(
10046 path!("/root/tree/.git").as_ref(),
10047 &[
10048 (".gitignore", "ignored-dir\n".into()),
10049 ("tracked-dir/tracked-file1", "".into()),
10050 ("tracked-dir/tracked-file2", "".into()),
10051 ],
10052 );
10053 fs.create_file(
10054 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10055 Default::default(),
10056 )
10057 .await
10058 .unwrap();
10059 fs.create_file(
10060 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10061 Default::default(),
10062 )
10063 .await
10064 .unwrap();
10065
10066 cx.executor().run_until_parked();
10067 cx.read(|cx| {
10068 assert_entry_git_state(
10069 tree.read(cx),
10070 repository.read(cx),
10071 "tracked-dir/tracked-file2",
10072 Some(StatusCode::Added),
10073 false,
10074 );
10075 assert_entry_git_state(
10076 tree.read(cx),
10077 repository.read(cx),
10078 "tracked-dir/ancestor-ignored-file2",
10079 None,
10080 false,
10081 );
10082 assert_entry_git_state(
10083 tree.read(cx),
10084 repository.read(cx),
10085 "ignored-dir/ignored-file2",
10086 None,
10087 true,
10088 );
10089 assert!(
10090 tree.read(cx)
10091 .entry_for_path(&rel_path(".git"))
10092 .unwrap()
10093 .is_ignored
10094 );
10095 });
10096}
10097
10098#[gpui::test]
10099async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10100 init_test(cx);
10101
10102 let fs = FakeFs::new(cx.executor());
10103 fs.insert_tree(
10104 path!("/project"),
10105 json!({
10106 ".git": {
10107 "worktrees": {
10108 "some-worktree": {
10109 "commondir": "../..\n",
10110 // For is_git_dir
10111 "HEAD": "",
10112 "config": ""
10113 }
10114 },
10115 "modules": {
10116 "subdir": {
10117 "some-submodule": {
10118 // For is_git_dir
10119 "HEAD": "",
10120 "config": "",
10121 }
10122 }
10123 }
10124 },
10125 "src": {
10126 "a.txt": "A",
10127 },
10128 "some-worktree": {
10129 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10130 "src": {
10131 "b.txt": "B",
10132 }
10133 },
10134 "subdir": {
10135 "some-submodule": {
10136 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10137 "c.txt": "C",
10138 }
10139 }
10140 }),
10141 )
10142 .await;
10143
10144 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10145 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10146 scan_complete.await;
10147
10148 let mut repositories = project.update(cx, |project, cx| {
10149 project
10150 .repositories(cx)
10151 .values()
10152 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10153 .collect::<Vec<_>>()
10154 });
10155 repositories.sort();
10156 pretty_assertions::assert_eq!(
10157 repositories,
10158 [
10159 Path::new(path!("/project")).into(),
10160 Path::new(path!("/project/some-worktree")).into(),
10161 Path::new(path!("/project/subdir/some-submodule")).into(),
10162 ]
10163 );
10164
10165 // Generate a git-related event for the worktree and check that it's refreshed.
10166 fs.with_git_state(
10167 path!("/project/some-worktree/.git").as_ref(),
10168 true,
10169 |state| {
10170 state
10171 .head_contents
10172 .insert(repo_path("src/b.txt"), "b".to_owned());
10173 state
10174 .index_contents
10175 .insert(repo_path("src/b.txt"), "b".to_owned());
10176 },
10177 )
10178 .unwrap();
10179 cx.run_until_parked();
10180
10181 let buffer = project
10182 .update(cx, |project, cx| {
10183 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10184 })
10185 .await
10186 .unwrap();
10187 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10188 let (repo, _) = project
10189 .git_store()
10190 .read(cx)
10191 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10192 .unwrap();
10193 pretty_assertions::assert_eq!(
10194 repo.read(cx).work_directory_abs_path,
10195 Path::new(path!("/project/some-worktree")).into(),
10196 );
10197 let barrier = repo.update(cx, |repo, _| repo.barrier());
10198 (repo.clone(), barrier)
10199 });
10200 barrier.await.unwrap();
10201 worktree_repo.update(cx, |repo, _| {
10202 pretty_assertions::assert_eq!(
10203 repo.status_for_path(&repo_path("src/b.txt"))
10204 .unwrap()
10205 .status,
10206 StatusCode::Modified.worktree(),
10207 );
10208 });
10209
10210 // The same for the submodule.
10211 fs.with_git_state(
10212 path!("/project/subdir/some-submodule/.git").as_ref(),
10213 true,
10214 |state| {
10215 state
10216 .head_contents
10217 .insert(repo_path("c.txt"), "c".to_owned());
10218 state
10219 .index_contents
10220 .insert(repo_path("c.txt"), "c".to_owned());
10221 },
10222 )
10223 .unwrap();
10224 cx.run_until_parked();
10225
10226 let buffer = project
10227 .update(cx, |project, cx| {
10228 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10229 })
10230 .await
10231 .unwrap();
10232 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10233 let (repo, _) = project
10234 .git_store()
10235 .read(cx)
10236 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10237 .unwrap();
10238 pretty_assertions::assert_eq!(
10239 repo.read(cx).work_directory_abs_path,
10240 Path::new(path!("/project/subdir/some-submodule")).into(),
10241 );
10242 let barrier = repo.update(cx, |repo, _| repo.barrier());
10243 (repo.clone(), barrier)
10244 });
10245 barrier.await.unwrap();
10246 submodule_repo.update(cx, |repo, _| {
10247 pretty_assertions::assert_eq!(
10248 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10249 StatusCode::Modified.worktree(),
10250 );
10251 });
10252}
10253
10254#[gpui::test]
10255async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10256 init_test(cx);
10257 let fs = FakeFs::new(cx.background_executor.clone());
10258 fs.insert_tree(
10259 path!("/root"),
10260 json!({
10261 "project": {
10262 ".git": {},
10263 "child1": {
10264 "a.txt": "A",
10265 },
10266 "child2": {
10267 "b.txt": "B",
10268 }
10269 }
10270 }),
10271 )
10272 .await;
10273
10274 let project = Project::test(
10275 fs.clone(),
10276 [
10277 path!("/root/project/child1").as_ref(),
10278 path!("/root/project/child2").as_ref(),
10279 ],
10280 cx,
10281 )
10282 .await;
10283
10284 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10285 tree.flush_fs_events(cx).await;
10286 project
10287 .update(cx, |project, cx| project.git_scans_complete(cx))
10288 .await;
10289 cx.executor().run_until_parked();
10290
10291 let repos = project.read_with(cx, |project, cx| {
10292 project
10293 .repositories(cx)
10294 .values()
10295 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10296 .collect::<Vec<_>>()
10297 });
10298 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10299}
10300
10301#[gpui::test]
10302async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10303 init_test(cx);
10304
10305 let file_1_committed = String::from(r#"file_1_committed"#);
10306 let file_1_staged = String::from(r#"file_1_staged"#);
10307 let file_2_committed = String::from(r#"file_2_committed"#);
10308 let file_2_staged = String::from(r#"file_2_staged"#);
10309 let buffer_contents = String::from(r#"buffer"#);
10310
10311 let fs = FakeFs::new(cx.background_executor.clone());
10312 fs.insert_tree(
10313 path!("/dir"),
10314 json!({
10315 ".git": {},
10316 "src": {
10317 "file_1.rs": file_1_committed.clone(),
10318 "file_2.rs": file_2_committed.clone(),
10319 }
10320 }),
10321 )
10322 .await;
10323
10324 fs.set_head_for_repo(
10325 path!("/dir/.git").as_ref(),
10326 &[
10327 ("src/file_1.rs", file_1_committed.clone()),
10328 ("src/file_2.rs", file_2_committed.clone()),
10329 ],
10330 "deadbeef",
10331 );
10332 fs.set_index_for_repo(
10333 path!("/dir/.git").as_ref(),
10334 &[
10335 ("src/file_1.rs", file_1_staged.clone()),
10336 ("src/file_2.rs", file_2_staged.clone()),
10337 ],
10338 );
10339
10340 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10341
10342 let buffer = project
10343 .update(cx, |project, cx| {
10344 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10345 })
10346 .await
10347 .unwrap();
10348
10349 buffer.update(cx, |buffer, cx| {
10350 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10351 });
10352
10353 let unstaged_diff = project
10354 .update(cx, |project, cx| {
10355 project.open_unstaged_diff(buffer.clone(), cx)
10356 })
10357 .await
10358 .unwrap();
10359
10360 cx.run_until_parked();
10361
10362 unstaged_diff.update(cx, |unstaged_diff, cx| {
10363 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10364 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10365 });
10366
10367 // Save the buffer as `file_2.rs`, which should trigger the
10368 // `BufferChangedFilePath` event.
10369 project
10370 .update(cx, |project, cx| {
10371 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10372 let path = ProjectPath {
10373 worktree_id,
10374 path: rel_path("src/file_2.rs").into(),
10375 };
10376 project.save_buffer_as(buffer.clone(), path, cx)
10377 })
10378 .await
10379 .unwrap();
10380
10381 cx.run_until_parked();
10382
10383 // Verify that the diff bases have been updated to file_2's contents due to
10384 // the `BufferChangedFilePath` event being handled.
10385 unstaged_diff.update(cx, |unstaged_diff, cx| {
10386 let snapshot = buffer.read(cx).snapshot();
10387 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10388 assert_eq!(
10389 base_text, file_2_staged,
10390 "Diff bases should be automatically updated to file_2 staged content"
10391 );
10392
10393 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10394 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10395 });
10396
10397 let uncommitted_diff = project
10398 .update(cx, |project, cx| {
10399 project.open_uncommitted_diff(buffer.clone(), cx)
10400 })
10401 .await
10402 .unwrap();
10403
10404 cx.run_until_parked();
10405
10406 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10407 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10408 assert_eq!(
10409 base_text, file_2_committed,
10410 "Uncommitted diff should compare against file_2 committed content"
10411 );
10412 });
10413}
10414
10415async fn search(
10416 project: &Entity<Project>,
10417 query: SearchQuery,
10418 cx: &mut gpui::TestAppContext,
10419) -> Result<HashMap<String, Vec<Range<usize>>>> {
10420 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10421 let mut results = HashMap::default();
10422 while let Ok(search_result) = search_rx.rx.recv().await {
10423 match search_result {
10424 SearchResult::Buffer { buffer, ranges } => {
10425 results.entry(buffer).or_insert(ranges);
10426 }
10427 SearchResult::LimitReached => {}
10428 }
10429 }
10430 Ok(results
10431 .into_iter()
10432 .map(|(buffer, ranges)| {
10433 buffer.update(cx, |buffer, cx| {
10434 let path = buffer
10435 .file()
10436 .unwrap()
10437 .full_path(cx)
10438 .to_string_lossy()
10439 .to_string();
10440 let ranges = ranges
10441 .into_iter()
10442 .map(|range| range.to_offset(buffer))
10443 .collect::<Vec<_>>();
10444 (path, ranges)
10445 })
10446 })
10447 .collect())
10448}
10449
10450pub fn init_test(cx: &mut gpui::TestAppContext) {
10451 zlog::init_test();
10452
10453 cx.update(|cx| {
10454 let settings_store = SettingsStore::test(cx);
10455 cx.set_global(settings_store);
10456 release_channel::init(semver::Version::new(0, 0, 0), cx);
10457 });
10458}
10459
10460fn json_lang() -> Arc<Language> {
10461 Arc::new(Language::new(
10462 LanguageConfig {
10463 name: "JSON".into(),
10464 matcher: LanguageMatcher {
10465 path_suffixes: vec!["json".to_string()],
10466 ..Default::default()
10467 },
10468 ..Default::default()
10469 },
10470 None,
10471 ))
10472}
10473
10474fn js_lang() -> Arc<Language> {
10475 Arc::new(Language::new(
10476 LanguageConfig {
10477 name: "JavaScript".into(),
10478 matcher: LanguageMatcher {
10479 path_suffixes: vec!["js".to_string()],
10480 ..Default::default()
10481 },
10482 ..Default::default()
10483 },
10484 None,
10485 ))
10486}
10487
10488fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10489 struct PythonMootToolchainLister(Arc<FakeFs>);
10490 #[async_trait]
10491 impl ToolchainLister for PythonMootToolchainLister {
10492 async fn list(
10493 &self,
10494 worktree_root: PathBuf,
10495 subroot_relative_path: Arc<RelPath>,
10496 _: Option<HashMap<String, String>>,
10497 _: &dyn Fs,
10498 ) -> ToolchainList {
10499 // This lister will always return a path .venv directories within ancestors
10500 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10501 let mut toolchains = vec![];
10502 for ancestor in ancestors {
10503 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10504 if self.0.is_dir(&venv_path).await {
10505 toolchains.push(Toolchain {
10506 name: SharedString::new("Python Venv"),
10507 path: venv_path.to_string_lossy().into_owned().into(),
10508 language_name: LanguageName(SharedString::new_static("Python")),
10509 as_json: serde_json::Value::Null,
10510 })
10511 }
10512 }
10513 ToolchainList {
10514 toolchains,
10515 ..Default::default()
10516 }
10517 }
10518 async fn resolve(
10519 &self,
10520 _: PathBuf,
10521 _: Option<HashMap<String, String>>,
10522 _: &dyn Fs,
10523 ) -> anyhow::Result<Toolchain> {
10524 Err(anyhow::anyhow!("Not implemented"))
10525 }
10526 fn meta(&self) -> ToolchainMetadata {
10527 ToolchainMetadata {
10528 term: SharedString::new_static("Virtual Environment"),
10529 new_toolchain_placeholder: SharedString::new_static(
10530 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10531 ),
10532 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10533 }
10534 }
10535 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10536 vec![]
10537 }
10538 }
10539 Arc::new(
10540 Language::new(
10541 LanguageConfig {
10542 name: "Python".into(),
10543 matcher: LanguageMatcher {
10544 path_suffixes: vec!["py".to_string()],
10545 ..Default::default()
10546 },
10547 ..Default::default()
10548 },
10549 None, // We're not testing Python parsing with this language.
10550 )
10551 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10552 "pyproject.toml",
10553 ))))
10554 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10555 )
10556}
10557
10558fn typescript_lang() -> Arc<Language> {
10559 Arc::new(Language::new(
10560 LanguageConfig {
10561 name: "TypeScript".into(),
10562 matcher: LanguageMatcher {
10563 path_suffixes: vec!["ts".to_string()],
10564 ..Default::default()
10565 },
10566 ..Default::default()
10567 },
10568 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10569 ))
10570}
10571
10572fn tsx_lang() -> Arc<Language> {
10573 Arc::new(Language::new(
10574 LanguageConfig {
10575 name: "tsx".into(),
10576 matcher: LanguageMatcher {
10577 path_suffixes: vec!["tsx".to_string()],
10578 ..Default::default()
10579 },
10580 ..Default::default()
10581 },
10582 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10583 ))
10584}
10585
10586fn get_all_tasks(
10587 project: &Entity<Project>,
10588 task_contexts: Arc<TaskContexts>,
10589 cx: &mut App,
10590) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10591 let new_tasks = project.update(cx, |project, cx| {
10592 project.task_store.update(cx, |task_store, cx| {
10593 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10594 this.used_and_current_resolved_tasks(task_contexts, cx)
10595 })
10596 })
10597 });
10598
10599 cx.background_spawn(async move {
10600 let (mut old, new) = new_tasks.await;
10601 old.extend(new);
10602 old
10603 })
10604}
10605
10606#[track_caller]
10607fn assert_entry_git_state(
10608 tree: &Worktree,
10609 repository: &Repository,
10610 path: &str,
10611 index_status: Option<StatusCode>,
10612 is_ignored: bool,
10613) {
10614 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10615 let entry = tree
10616 .entry_for_path(&rel_path(path))
10617 .unwrap_or_else(|| panic!("entry {path} not found"));
10618 let status = repository
10619 .status_for_path(&repo_path(path))
10620 .map(|entry| entry.status);
10621 let expected = index_status.map(|index_status| {
10622 TrackedStatus {
10623 index_status,
10624 worktree_status: StatusCode::Unmodified,
10625 }
10626 .into()
10627 });
10628 assert_eq!(
10629 status, expected,
10630 "expected {path} to have git status: {expected:?}"
10631 );
10632 assert_eq!(
10633 entry.is_ignored, is_ignored,
10634 "expected {path} to have is_ignored: {is_ignored}"
10635 );
10636}
10637
10638#[track_caller]
10639fn git_init(path: &Path) -> git2::Repository {
10640 let mut init_opts = RepositoryInitOptions::new();
10641 init_opts.initial_head("main");
10642 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10643}
10644
10645#[track_caller]
10646fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10647 let path = path.as_ref();
10648 let mut index = repo.index().expect("Failed to get index");
10649 index.add_path(path).expect("Failed to add file");
10650 index.write().expect("Failed to write index");
10651}
10652
10653#[track_caller]
10654fn git_remove_index(path: &Path, repo: &git2::Repository) {
10655 let mut index = repo.index().expect("Failed to get index");
10656 index.remove_path(path).expect("Failed to add file");
10657 index.write().expect("Failed to write index");
10658}
10659
10660#[track_caller]
10661fn git_commit(msg: &'static str, repo: &git2::Repository) {
10662 use git2::Signature;
10663
10664 let signature = Signature::now("test", "test@zed.dev").unwrap();
10665 let oid = repo.index().unwrap().write_tree().unwrap();
10666 let tree = repo.find_tree(oid).unwrap();
10667 if let Ok(head) = repo.head() {
10668 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10669
10670 let parent_commit = parent_obj.as_commit().unwrap();
10671
10672 repo.commit(
10673 Some("HEAD"),
10674 &signature,
10675 &signature,
10676 msg,
10677 &tree,
10678 &[parent_commit],
10679 )
10680 .expect("Failed to commit with parent");
10681 } else {
10682 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10683 .expect("Failed to commit");
10684 }
10685}
10686
10687#[cfg(any())]
10688#[track_caller]
10689fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10690 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10691}
10692
10693#[track_caller]
10694fn git_stash(repo: &mut git2::Repository) {
10695 use git2::Signature;
10696
10697 let signature = Signature::now("test", "test@zed.dev").unwrap();
10698 repo.stash_save(&signature, "N/A", None)
10699 .expect("Failed to stash");
10700}
10701
10702#[track_caller]
10703fn git_reset(offset: usize, repo: &git2::Repository) {
10704 let head = repo.head().expect("Couldn't get repo head");
10705 let object = head.peel(git2::ObjectType::Commit).unwrap();
10706 let commit = object.as_commit().unwrap();
10707 let new_head = commit
10708 .parents()
10709 .inspect(|parnet| {
10710 parnet.message();
10711 })
10712 .nth(offset)
10713 .expect("Not enough history");
10714 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10715 .expect("Could not reset");
10716}
10717
10718#[cfg(any())]
10719#[track_caller]
10720fn git_branch(name: &str, repo: &git2::Repository) {
10721 let head = repo
10722 .head()
10723 .expect("Couldn't get repo head")
10724 .peel_to_commit()
10725 .expect("HEAD is not a commit");
10726 repo.branch(name, &head, false).expect("Failed to commit");
10727}
10728
10729#[cfg(any())]
10730#[track_caller]
10731fn git_checkout(name: &str, repo: &git2::Repository) {
10732 repo.set_head(name).expect("Failed to set head");
10733 repo.checkout_head(None).expect("Failed to check out head");
10734}
10735
10736#[cfg(any())]
10737#[track_caller]
10738fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10739 repo.statuses(None)
10740 .unwrap()
10741 .iter()
10742 .map(|status| (status.path().unwrap().to_string(), status.status()))
10743 .collect()
10744}
10745
10746#[gpui::test]
10747async fn test_find_project_path_abs(
10748 background_executor: BackgroundExecutor,
10749 cx: &mut gpui::TestAppContext,
10750) {
10751 // find_project_path should work with absolute paths
10752 init_test(cx);
10753
10754 let fs = FakeFs::new(background_executor);
10755 fs.insert_tree(
10756 path!("/root"),
10757 json!({
10758 "project1": {
10759 "file1.txt": "content1",
10760 "subdir": {
10761 "file2.txt": "content2"
10762 }
10763 },
10764 "project2": {
10765 "file3.txt": "content3"
10766 }
10767 }),
10768 )
10769 .await;
10770
10771 let project = Project::test(
10772 fs.clone(),
10773 [
10774 path!("/root/project1").as_ref(),
10775 path!("/root/project2").as_ref(),
10776 ],
10777 cx,
10778 )
10779 .await;
10780
10781 // Make sure the worktrees are fully initialized
10782 project
10783 .update(cx, |project, cx| project.git_scans_complete(cx))
10784 .await;
10785 cx.run_until_parked();
10786
10787 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10788 project.read_with(cx, |project, cx| {
10789 let worktrees: Vec<_> = project.worktrees(cx).collect();
10790 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10791 let id1 = worktrees[0].read(cx).id();
10792 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10793 let id2 = worktrees[1].read(cx).id();
10794 (abs_path1, id1, abs_path2, id2)
10795 });
10796
10797 project.update(cx, |project, cx| {
10798 let abs_path = project1_abs_path.join("file1.txt");
10799 let found_path = project.find_project_path(abs_path, cx).unwrap();
10800 assert_eq!(found_path.worktree_id, project1_id);
10801 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10802
10803 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10804 let found_path = project.find_project_path(abs_path, cx).unwrap();
10805 assert_eq!(found_path.worktree_id, project1_id);
10806 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10807
10808 let abs_path = project2_abs_path.join("file3.txt");
10809 let found_path = project.find_project_path(abs_path, cx).unwrap();
10810 assert_eq!(found_path.worktree_id, project2_id);
10811 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10812
10813 let abs_path = project1_abs_path.join("nonexistent.txt");
10814 let found_path = project.find_project_path(abs_path, cx);
10815 assert!(
10816 found_path.is_some(),
10817 "Should find project path for nonexistent file in worktree"
10818 );
10819
10820 // Test with an absolute path outside any worktree
10821 let abs_path = Path::new("/some/other/path");
10822 let found_path = project.find_project_path(abs_path, cx);
10823 assert!(
10824 found_path.is_none(),
10825 "Should not find project path for path outside any worktree"
10826 );
10827 });
10828}
10829
10830#[gpui::test]
10831async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10832 init_test(cx);
10833
10834 let fs = FakeFs::new(cx.executor());
10835 fs.insert_tree(
10836 path!("/root"),
10837 json!({
10838 "a": {
10839 ".git": {},
10840 "src": {
10841 "main.rs": "fn main() {}",
10842 }
10843 },
10844 "b": {
10845 ".git": {},
10846 "src": {
10847 "main.rs": "fn main() {}",
10848 },
10849 "script": {
10850 "run.sh": "#!/bin/bash"
10851 }
10852 }
10853 }),
10854 )
10855 .await;
10856
10857 let project = Project::test(
10858 fs.clone(),
10859 [
10860 path!("/root/a").as_ref(),
10861 path!("/root/b/script").as_ref(),
10862 path!("/root/b").as_ref(),
10863 ],
10864 cx,
10865 )
10866 .await;
10867 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10868 scan_complete.await;
10869
10870 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10871 assert_eq!(worktrees.len(), 3);
10872
10873 let worktree_id_by_abs_path = worktrees
10874 .into_iter()
10875 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10876 .collect::<HashMap<_, _>>();
10877 let worktree_id = worktree_id_by_abs_path
10878 .get(Path::new(path!("/root/b/script")))
10879 .unwrap();
10880
10881 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10882 assert_eq!(repos.len(), 2);
10883
10884 project.update(cx, |project, cx| {
10885 project.remove_worktree(*worktree_id, cx);
10886 });
10887 cx.run_until_parked();
10888
10889 let mut repo_paths = project
10890 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10891 .values()
10892 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10893 .collect::<Vec<_>>();
10894 repo_paths.sort();
10895
10896 pretty_assertions::assert_eq!(
10897 repo_paths,
10898 [
10899 Path::new(path!("/root/a")).into(),
10900 Path::new(path!("/root/b")).into(),
10901 ]
10902 );
10903
10904 let active_repo_path = project
10905 .read_with(cx, |p, cx| {
10906 p.active_repository(cx)
10907 .map(|r| r.read(cx).work_directory_abs_path.clone())
10908 })
10909 .unwrap();
10910 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10911
10912 let worktree_id = worktree_id_by_abs_path
10913 .get(Path::new(path!("/root/a")))
10914 .unwrap();
10915 project.update(cx, |project, cx| {
10916 project.remove_worktree(*worktree_id, cx);
10917 });
10918 cx.run_until_parked();
10919
10920 let active_repo_path = project
10921 .read_with(cx, |p, cx| {
10922 p.active_repository(cx)
10923 .map(|r| r.read(cx).work_directory_abs_path.clone())
10924 })
10925 .unwrap();
10926 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10927
10928 let worktree_id = worktree_id_by_abs_path
10929 .get(Path::new(path!("/root/b")))
10930 .unwrap();
10931 project.update(cx, |project, cx| {
10932 project.remove_worktree(*worktree_id, cx);
10933 });
10934 cx.run_until_parked();
10935
10936 let active_repo_path = project.read_with(cx, |p, cx| {
10937 p.active_repository(cx)
10938 .map(|r| r.read(cx).work_directory_abs_path.clone())
10939 });
10940 assert!(active_repo_path.is_none());
10941}
10942
10943#[gpui::test]
10944async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
10945 use DiffHunkSecondaryStatus::*;
10946 init_test(cx);
10947
10948 let committed_contents = r#"
10949 one
10950 two
10951 three
10952 "#
10953 .unindent();
10954 let file_contents = r#"
10955 one
10956 TWO
10957 three
10958 "#
10959 .unindent();
10960
10961 let fs = FakeFs::new(cx.background_executor.clone());
10962 fs.insert_tree(
10963 path!("/dir"),
10964 json!({
10965 ".git": {},
10966 "file.txt": file_contents.clone()
10967 }),
10968 )
10969 .await;
10970
10971 fs.set_head_and_index_for_repo(
10972 path!("/dir/.git").as_ref(),
10973 &[("file.txt", committed_contents.clone())],
10974 );
10975
10976 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10977
10978 let buffer = project
10979 .update(cx, |project, cx| {
10980 project.open_local_buffer(path!("/dir/file.txt"), cx)
10981 })
10982 .await
10983 .unwrap();
10984 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
10985 let uncommitted_diff = project
10986 .update(cx, |project, cx| {
10987 project.open_uncommitted_diff(buffer.clone(), cx)
10988 })
10989 .await
10990 .unwrap();
10991
10992 // The hunk is initially unstaged.
10993 uncommitted_diff.read_with(cx, |diff, cx| {
10994 assert_hunks(
10995 diff.snapshot(cx).hunks(&snapshot),
10996 &snapshot,
10997 &diff.base_text_string(cx).unwrap(),
10998 &[(
10999 1..2,
11000 "two\n",
11001 "TWO\n",
11002 DiffHunkStatus::modified(HasSecondaryHunk),
11003 )],
11004 );
11005 });
11006
11007 // Get the repository handle.
11008 let repo = project.read_with(cx, |project, cx| {
11009 project.repositories(cx).values().next().unwrap().clone()
11010 });
11011
11012 // Stage the file.
11013 let stage_task = repo.update(cx, |repo, cx| {
11014 repo.stage_entries(vec![repo_path("file.txt")], cx)
11015 });
11016
11017 // Run a few ticks to let the job start and mark hunks as pending,
11018 // but don't run_until_parked which would complete the entire operation.
11019 for _ in 0..10 {
11020 cx.executor().tick();
11021 let [hunk]: [_; 1] = uncommitted_diff
11022 .read_with(cx, |diff, cx| {
11023 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11024 })
11025 .try_into()
11026 .unwrap();
11027 match hunk.secondary_status {
11028 HasSecondaryHunk => {}
11029 SecondaryHunkRemovalPending => break,
11030 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11031 _ => panic!("unexpected hunk state"),
11032 }
11033 }
11034 uncommitted_diff.read_with(cx, |diff, cx| {
11035 assert_hunks(
11036 diff.snapshot(cx).hunks(&snapshot),
11037 &snapshot,
11038 &diff.base_text_string(cx).unwrap(),
11039 &[(
11040 1..2,
11041 "two\n",
11042 "TWO\n",
11043 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11044 )],
11045 );
11046 });
11047
11048 // Let the staging complete.
11049 stage_task.await.unwrap();
11050 cx.run_until_parked();
11051
11052 // The hunk is now fully staged.
11053 uncommitted_diff.read_with(cx, |diff, cx| {
11054 assert_hunks(
11055 diff.snapshot(cx).hunks(&snapshot),
11056 &snapshot,
11057 &diff.base_text_string(cx).unwrap(),
11058 &[(
11059 1..2,
11060 "two\n",
11061 "TWO\n",
11062 DiffHunkStatus::modified(NoSecondaryHunk),
11063 )],
11064 );
11065 });
11066
11067 // Simulate a commit by updating HEAD to match the current file contents.
11068 // The FakeGitRepository's commit method is a no-op, so we need to manually
11069 // update HEAD to simulate the commit completing.
11070 fs.set_head_for_repo(
11071 path!("/dir/.git").as_ref(),
11072 &[("file.txt", file_contents.clone())],
11073 "newhead",
11074 );
11075 cx.run_until_parked();
11076
11077 // After committing, there are no more hunks.
11078 uncommitted_diff.read_with(cx, |diff, cx| {
11079 assert_hunks(
11080 diff.snapshot(cx).hunks(&snapshot),
11081 &snapshot,
11082 &diff.base_text_string(cx).unwrap(),
11083 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11084 );
11085 });
11086}
11087
11088#[gpui::test]
11089async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11090 init_test(cx);
11091
11092 // Configure read_only_files setting
11093 cx.update(|cx| {
11094 cx.update_global::<SettingsStore, _>(|store, cx| {
11095 store.update_user_settings(cx, |settings| {
11096 settings.project.worktree.read_only_files = Some(vec![
11097 "**/generated/**".to_string(),
11098 "**/*.gen.rs".to_string(),
11099 ]);
11100 });
11101 });
11102 });
11103
11104 let fs = FakeFs::new(cx.background_executor.clone());
11105 fs.insert_tree(
11106 path!("/root"),
11107 json!({
11108 "src": {
11109 "main.rs": "fn main() {}",
11110 "types.gen.rs": "// Generated file",
11111 },
11112 "generated": {
11113 "schema.rs": "// Auto-generated schema",
11114 }
11115 }),
11116 )
11117 .await;
11118
11119 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11120
11121 // Open a regular file - should be read-write
11122 let regular_buffer = project
11123 .update(cx, |project, cx| {
11124 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11125 })
11126 .await
11127 .unwrap();
11128
11129 regular_buffer.read_with(cx, |buffer, _| {
11130 assert!(!buffer.read_only(), "Regular file should not be read-only");
11131 });
11132
11133 // Open a file matching *.gen.rs pattern - should be read-only
11134 let gen_buffer = project
11135 .update(cx, |project, cx| {
11136 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11137 })
11138 .await
11139 .unwrap();
11140
11141 gen_buffer.read_with(cx, |buffer, _| {
11142 assert!(
11143 buffer.read_only(),
11144 "File matching *.gen.rs pattern should be read-only"
11145 );
11146 });
11147
11148 // Open a file in generated directory - should be read-only
11149 let generated_buffer = project
11150 .update(cx, |project, cx| {
11151 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11152 })
11153 .await
11154 .unwrap();
11155
11156 generated_buffer.read_with(cx, |buffer, _| {
11157 assert!(
11158 buffer.read_only(),
11159 "File in generated directory should be read-only"
11160 );
11161 });
11162}
11163
11164#[gpui::test]
11165async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11166 init_test(cx);
11167
11168 // Explicitly set read_only_files to empty (default behavior)
11169 cx.update(|cx| {
11170 cx.update_global::<SettingsStore, _>(|store, cx| {
11171 store.update_user_settings(cx, |settings| {
11172 settings.project.worktree.read_only_files = Some(vec![]);
11173 });
11174 });
11175 });
11176
11177 let fs = FakeFs::new(cx.background_executor.clone());
11178 fs.insert_tree(
11179 path!("/root"),
11180 json!({
11181 "src": {
11182 "main.rs": "fn main() {}",
11183 },
11184 "generated": {
11185 "schema.rs": "// Auto-generated schema",
11186 }
11187 }),
11188 )
11189 .await;
11190
11191 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11192
11193 // All files should be read-write when read_only_files is empty
11194 let main_buffer = project
11195 .update(cx, |project, cx| {
11196 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11197 })
11198 .await
11199 .unwrap();
11200
11201 main_buffer.read_with(cx, |buffer, _| {
11202 assert!(
11203 !buffer.read_only(),
11204 "Files should not be read-only when read_only_files is empty"
11205 );
11206 });
11207
11208 let generated_buffer = project
11209 .update(cx, |project, cx| {
11210 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11211 })
11212 .await
11213 .unwrap();
11214
11215 generated_buffer.read_with(cx, |buffer, _| {
11216 assert!(
11217 !buffer.read_only(),
11218 "Generated files should not be read-only when read_only_files is empty"
11219 );
11220 });
11221}
11222
11223#[gpui::test]
11224async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11225 init_test(cx);
11226
11227 // Configure to make lock files read-only
11228 cx.update(|cx| {
11229 cx.update_global::<SettingsStore, _>(|store, cx| {
11230 store.update_user_settings(cx, |settings| {
11231 settings.project.worktree.read_only_files = Some(vec![
11232 "**/*.lock".to_string(),
11233 "**/package-lock.json".to_string(),
11234 ]);
11235 });
11236 });
11237 });
11238
11239 let fs = FakeFs::new(cx.background_executor.clone());
11240 fs.insert_tree(
11241 path!("/root"),
11242 json!({
11243 "Cargo.lock": "# Lock file",
11244 "Cargo.toml": "[package]",
11245 "package-lock.json": "{}",
11246 "package.json": "{}",
11247 }),
11248 )
11249 .await;
11250
11251 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11252
11253 // Cargo.lock should be read-only
11254 let cargo_lock = project
11255 .update(cx, |project, cx| {
11256 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11257 })
11258 .await
11259 .unwrap();
11260
11261 cargo_lock.read_with(cx, |buffer, _| {
11262 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11263 });
11264
11265 // Cargo.toml should be read-write
11266 let cargo_toml = project
11267 .update(cx, |project, cx| {
11268 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11269 })
11270 .await
11271 .unwrap();
11272
11273 cargo_toml.read_with(cx, |buffer, _| {
11274 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11275 });
11276
11277 // package-lock.json should be read-only
11278 let package_lock = project
11279 .update(cx, |project, cx| {
11280 project.open_local_buffer(path!("/root/package-lock.json"), cx)
11281 })
11282 .await
11283 .unwrap();
11284
11285 package_lock.read_with(cx, |buffer, _| {
11286 assert!(buffer.read_only(), "package-lock.json should be read-only");
11287 });
11288
11289 // package.json should be read-write
11290 let package_json = project
11291 .update(cx, |project, cx| {
11292 project.open_local_buffer(path!("/root/package.json"), cx)
11293 })
11294 .await
11295 .unwrap();
11296
11297 package_json.read_with(cx, |buffer, _| {
11298 assert!(!buffer.read_only(), "package.json should not be read-only");
11299 });
11300}