1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
13};
14use fs::FakeFs;
15use futures::{StreamExt, future};
16use git::{
17 GitHostingProviderRegistry,
18 repository::{RepoPath, repo_path},
19 status::{StatusCode, TrackedStatus},
20};
21use git2::RepositoryInitOptions;
22use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
23use itertools::Itertools;
24use language::{
25 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
26 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
27 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
28 ToolchainLister,
29 language_settings::{LanguageSettingsContent, language_settings},
30 rust_lang, tree_sitter_typescript,
31};
32use lsp::{
33 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
34 Uri, WillRenameFiles, notification::DidRenameFiles,
35};
36use parking_lot::Mutex;
37use paths::{config_dir, global_gitignore_path, tasks_file};
38use postage::stream::Stream as _;
39use pretty_assertions::{assert_eq, assert_matches};
40use rand::{Rng as _, rngs::StdRng};
41use serde_json::json;
42#[cfg(not(windows))]
43use std::os;
44use std::{
45 env, mem,
46 num::NonZeroU32,
47 ops::Range,
48 str::FromStr,
49 sync::{Arc, OnceLock},
50 task::Poll,
51};
52use sum_tree::SumTree;
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .foreground_executor()
214 .block_on(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250 cx.update(|cx| {
251 GitHostingProviderRegistry::default_global(cx);
252 git_hosting_providers::init(cx);
253 });
254
255 let fs = FakeFs::new(cx.executor());
256 let str_path = path!("/dir");
257 let path = Path::new(str_path);
258
259 fs.insert_tree(
260 path!("/dir"),
261 json!({
262 ".zed": {
263 "settings.json": r#"{
264 "git_hosting_providers": [
265 {
266 "provider": "gitlab",
267 "base_url": "https://google.com",
268 "name": "foo"
269 }
270 ]
271 }"#
272 },
273 }),
274 )
275 .await;
276
277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
278 let (_worktree, _) =
279 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
280 cx.executor().run_until_parked();
281
282 cx.update(|cx| {
283 let provider = GitHostingProviderRegistry::global(cx);
284 assert!(
285 provider
286 .list_hosting_providers()
287 .into_iter()
288 .any(|provider| provider.name() == "foo")
289 );
290 });
291
292 fs.atomic_write(
293 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
294 "{}".into(),
295 )
296 .await
297 .unwrap();
298
299 cx.run_until_parked();
300
301 cx.update(|cx| {
302 let provider = GitHostingProviderRegistry::global(cx);
303 assert!(
304 !provider
305 .list_hosting_providers()
306 .into_iter()
307 .any(|provider| provider.name() == "foo")
308 );
309 });
310}
311
312#[gpui::test]
313async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315 TaskStore::init(None);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 path!("/dir"),
320 json!({
321 ".zed": {
322 "settings.json": r#"{ "tab_size": 8 }"#,
323 "tasks.json": r#"[{
324 "label": "cargo check all",
325 "command": "cargo",
326 "args": ["check", "--all"]
327 },]"#,
328 },
329 "a": {
330 "a.rs": "fn a() {\n A\n}"
331 },
332 "b": {
333 ".zed": {
334 "settings.json": r#"{ "tab_size": 2 }"#,
335 "tasks.json": r#"[{
336 "label": "cargo check",
337 "command": "cargo",
338 "args": ["check"]
339 },]"#,
340 },
341 "b.rs": "fn b() {\n B\n}"
342 }
343 }),
344 )
345 .await;
346
347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
348 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
349
350 cx.executor().run_until_parked();
351 let worktree_id = cx.update(|cx| {
352 project.update(cx, |project, cx| {
353 project.worktrees(cx).next().unwrap().read(cx).id()
354 })
355 });
356
357 let mut task_contexts = TaskContexts::default();
358 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
359 let task_contexts = Arc::new(task_contexts);
360
361 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
362 id: worktree_id,
363 directory_in_worktree: rel_path(".zed").into(),
364 id_base: "local worktree tasks from directory \".zed\"".into(),
365 };
366
367 let all_tasks = cx
368 .update(|cx| {
369 let tree = worktree.read(cx);
370
371 let file_a = File::for_entry(
372 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
373 worktree.clone(),
374 ) as _;
375 let settings_a = language_settings(None, Some(&file_a), cx);
376 let file_b = File::for_entry(
377 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
378 worktree.clone(),
379 ) as _;
380 let settings_b = language_settings(None, Some(&file_b), cx);
381
382 assert_eq!(settings_a.tab_size.get(), 8);
383 assert_eq!(settings_b.tab_size.get(), 2);
384
385 get_all_tasks(&project, task_contexts.clone(), cx)
386 })
387 .await
388 .into_iter()
389 .map(|(source_kind, task)| {
390 let resolved = task.resolved;
391 (
392 source_kind,
393 task.resolved_label,
394 resolved.args,
395 resolved.env,
396 )
397 })
398 .collect::<Vec<_>>();
399 assert_eq!(
400 all_tasks,
401 vec![
402 (
403 TaskSourceKind::Worktree {
404 id: worktree_id,
405 directory_in_worktree: rel_path("b/.zed").into(),
406 id_base: "local worktree tasks from directory \"b/.zed\"".into()
407 },
408 "cargo check".to_string(),
409 vec!["check".to_string()],
410 HashMap::default(),
411 ),
412 (
413 topmost_local_task_source_kind.clone(),
414 "cargo check all".to_string(),
415 vec!["check".to_string(), "--all".to_string()],
416 HashMap::default(),
417 ),
418 ]
419 );
420
421 let (_, resolved_task) = cx
422 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
423 .await
424 .into_iter()
425 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
426 .expect("should have one global task");
427 project.update(cx, |project, cx| {
428 let task_inventory = project
429 .task_store
430 .read(cx)
431 .task_inventory()
432 .cloned()
433 .unwrap();
434 task_inventory.update(cx, |inventory, _| {
435 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
436 inventory
437 .update_file_based_tasks(
438 TaskSettingsLocation::Global(tasks_file()),
439 Some(
440 &json!([{
441 "label": "cargo check unstable",
442 "command": "cargo",
443 "args": [
444 "check",
445 "--all",
446 "--all-targets"
447 ],
448 "env": {
449 "RUSTFLAGS": "-Zunstable-options"
450 }
451 }])
452 .to_string(),
453 ),
454 )
455 .unwrap();
456 });
457 });
458 cx.run_until_parked();
459
460 let all_tasks = cx
461 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
462 .await
463 .into_iter()
464 .map(|(source_kind, task)| {
465 let resolved = task.resolved;
466 (
467 source_kind,
468 task.resolved_label,
469 resolved.args,
470 resolved.env,
471 )
472 })
473 .collect::<Vec<_>>();
474 assert_eq!(
475 all_tasks,
476 vec![
477 (
478 topmost_local_task_source_kind.clone(),
479 "cargo check all".to_string(),
480 vec!["check".to_string(), "--all".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: rel_path("b/.zed").into(),
487 id_base: "local worktree tasks from directory \"b/.zed\"".into()
488 },
489 "cargo check".to_string(),
490 vec!["check".to_string()],
491 HashMap::default(),
492 ),
493 (
494 TaskSourceKind::AbsPath {
495 abs_path: paths::tasks_file().clone(),
496 id_base: "global tasks.json".into(),
497 },
498 "cargo check unstable".to_string(),
499 vec![
500 "check".to_string(),
501 "--all".to_string(),
502 "--all-targets".to_string(),
503 ],
504 HashMap::from_iter(Some((
505 "RUSTFLAGS".to_string(),
506 "-Zunstable-options".to_string()
507 ))),
508 ),
509 ]
510 );
511}
512
513#[gpui::test]
514async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
515 init_test(cx);
516 TaskStore::init(None);
517
518 let fs = FakeFs::new(cx.executor());
519 fs.insert_tree(
520 path!("/dir"),
521 json!({
522 ".zed": {
523 "tasks.json": r#"[{
524 "label": "test worktree root",
525 "command": "echo $ZED_WORKTREE_ROOT"
526 }]"#,
527 },
528 "a": {
529 "a.rs": "fn a() {\n A\n}"
530 },
531 }),
532 )
533 .await;
534
535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
536 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
537
538 cx.executor().run_until_parked();
539 let worktree_id = cx.update(|cx| {
540 project.update(cx, |project, cx| {
541 project.worktrees(cx).next().unwrap().read(cx).id()
542 })
543 });
544
545 let active_non_worktree_item_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: None,
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 }),
556 cx,
557 )
558 })
559 .await;
560 assert!(
561 active_non_worktree_item_tasks.is_empty(),
562 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
563 );
564
565 let active_worktree_tasks = cx
566 .update(|cx| {
567 get_all_tasks(
568 &project,
569 Arc::new(TaskContexts {
570 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
571 active_worktree_context: Some((worktree_id, {
572 let mut worktree_context = TaskContext::default();
573 worktree_context
574 .task_variables
575 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
576 worktree_context
577 })),
578 other_worktree_contexts: Vec::new(),
579 lsp_task_sources: HashMap::default(),
580 latest_selection: None,
581 }),
582 cx,
583 )
584 })
585 .await;
586 assert_eq!(
587 active_worktree_tasks
588 .into_iter()
589 .map(|(source_kind, task)| {
590 let resolved = task.resolved;
591 (source_kind, resolved.command.unwrap())
592 })
593 .collect::<Vec<_>>(),
594 vec![(
595 TaskSourceKind::Worktree {
596 id: worktree_id,
597 directory_in_worktree: rel_path(".zed").into(),
598 id_base: "local worktree tasks from directory \".zed\"".into(),
599 },
600 "echo /dir".to_string(),
601 )]
602 );
603}
604
605#[gpui::test]
606async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
607 cx: &mut gpui::TestAppContext,
608) {
609 pub(crate) struct PyprojectTomlManifestProvider;
610
611 impl ManifestProvider for PyprojectTomlManifestProvider {
612 fn name(&self) -> ManifestName {
613 SharedString::new_static("pyproject.toml").into()
614 }
615
616 fn search(
617 &self,
618 ManifestQuery {
619 path,
620 depth,
621 delegate,
622 }: ManifestQuery,
623 ) -> Option<Arc<RelPath>> {
624 for path in path.ancestors().take(depth) {
625 let p = path.join(rel_path("pyproject.toml"));
626 if delegate.exists(&p, Some(false)) {
627 return Some(path.into());
628 }
629 }
630
631 None
632 }
633 }
634
635 init_test(cx);
636 let fs = FakeFs::new(cx.executor());
637
638 fs.insert_tree(
639 path!("/the-root"),
640 json!({
641 ".zed": {
642 "settings.json": r#"
643 {
644 "languages": {
645 "Python": {
646 "language_servers": ["ty"]
647 }
648 }
649 }"#
650 },
651 "project-a": {
652 ".venv": {},
653 "file.py": "",
654 "pyproject.toml": ""
655 },
656 "project-b": {
657 ".venv": {},
658 "source_file.py":"",
659 "another_file.py": "",
660 "pyproject.toml": ""
661 }
662 }),
663 )
664 .await;
665 cx.update(|cx| {
666 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
667 });
668
669 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
671 let _fake_python_server = language_registry.register_fake_lsp(
672 "Python",
673 FakeLspAdapter {
674 name: "ty",
675 capabilities: lsp::ServerCapabilities {
676 ..Default::default()
677 },
678 ..Default::default()
679 },
680 );
681
682 language_registry.add(python_lang(fs.clone()));
683 let (first_buffer, _handle) = project
684 .update(cx, |project, cx| {
685 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
686 })
687 .await
688 .unwrap();
689 cx.executor().run_until_parked();
690 let servers = project.update(cx, |project, cx| {
691 project.lsp_store.update(cx, |this, cx| {
692 first_buffer.update(cx, |buffer, cx| {
693 this.running_language_servers_for_local_buffer(buffer, cx)
694 .map(|(adapter, server)| (adapter.clone(), server.clone()))
695 .collect::<Vec<_>>()
696 })
697 })
698 });
699 cx.executor().run_until_parked();
700 assert_eq!(servers.len(), 1);
701 let (adapter, server) = servers.into_iter().next().unwrap();
702 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
703 assert_eq!(server.server_id(), LanguageServerId(0));
704 // `workspace_folders` are set to the rooting point.
705 assert_eq!(
706 server.workspace_folders(),
707 BTreeSet::from_iter(
708 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
709 )
710 );
711
712 let (second_project_buffer, _other_handle) = project
713 .update(cx, |project, cx| {
714 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
715 })
716 .await
717 .unwrap();
718 cx.executor().run_until_parked();
719 let servers = project.update(cx, |project, cx| {
720 project.lsp_store.update(cx, |this, cx| {
721 second_project_buffer.update(cx, |buffer, cx| {
722 this.running_language_servers_for_local_buffer(buffer, cx)
723 .map(|(adapter, server)| (adapter.clone(), server.clone()))
724 .collect::<Vec<_>>()
725 })
726 })
727 });
728 cx.executor().run_until_parked();
729 assert_eq!(servers.len(), 1);
730 let (adapter, server) = servers.into_iter().next().unwrap();
731 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
732 // We're not using venvs at all here, so both folders should fall under the same root.
733 assert_eq!(server.server_id(), LanguageServerId(0));
734 // Now, let's select a different toolchain for one of subprojects.
735
736 let Toolchains {
737 toolchains: available_toolchains_for_b,
738 root_path,
739 ..
740 } = project
741 .update(cx, |this, cx| {
742 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
743 this.available_toolchains(
744 ProjectPath {
745 worktree_id,
746 path: rel_path("project-b/source_file.py").into(),
747 },
748 LanguageName::new_static("Python"),
749 cx,
750 )
751 })
752 .await
753 .expect("A toolchain to be discovered");
754 assert_eq!(root_path.as_ref(), rel_path("project-b"));
755 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
756 let currently_active_toolchain = project
757 .update(cx, |this, cx| {
758 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
759 this.active_toolchain(
760 ProjectPath {
761 worktree_id,
762 path: rel_path("project-b/source_file.py").into(),
763 },
764 LanguageName::new_static("Python"),
765 cx,
766 )
767 })
768 .await;
769
770 assert!(currently_active_toolchain.is_none());
771 let _ = project
772 .update(cx, |this, cx| {
773 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
774 this.activate_toolchain(
775 ProjectPath {
776 worktree_id,
777 path: root_path,
778 },
779 available_toolchains_for_b
780 .toolchains
781 .into_iter()
782 .next()
783 .unwrap(),
784 cx,
785 )
786 })
787 .await
788 .unwrap();
789 cx.run_until_parked();
790 let servers = project.update(cx, |project, cx| {
791 project.lsp_store.update(cx, |this, cx| {
792 second_project_buffer.update(cx, |buffer, cx| {
793 this.running_language_servers_for_local_buffer(buffer, cx)
794 .map(|(adapter, server)| (adapter.clone(), server.clone()))
795 .collect::<Vec<_>>()
796 })
797 })
798 });
799 cx.executor().run_until_parked();
800 assert_eq!(servers.len(), 1);
801 let (adapter, server) = servers.into_iter().next().unwrap();
802 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
803 // There's a new language server in town.
804 assert_eq!(server.server_id(), LanguageServerId(1));
805}
806
807#[gpui::test]
808async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
809 init_test(cx);
810
811 let fs = FakeFs::new(cx.executor());
812 fs.insert_tree(
813 path!("/dir"),
814 json!({
815 "test.rs": "const A: i32 = 1;",
816 "test2.rs": "",
817 "Cargo.toml": "a = 1",
818 "package.json": "{\"a\": 1}",
819 }),
820 )
821 .await;
822
823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825
826 let mut fake_rust_servers = language_registry.register_fake_lsp(
827 "Rust",
828 FakeLspAdapter {
829 name: "the-rust-language-server",
830 capabilities: lsp::ServerCapabilities {
831 completion_provider: Some(lsp::CompletionOptions {
832 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
833 ..Default::default()
834 }),
835 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
836 lsp::TextDocumentSyncOptions {
837 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
838 ..Default::default()
839 },
840 )),
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 );
846 let mut fake_json_servers = language_registry.register_fake_lsp(
847 "JSON",
848 FakeLspAdapter {
849 name: "the-json-language-server",
850 capabilities: lsp::ServerCapabilities {
851 completion_provider: Some(lsp::CompletionOptions {
852 trigger_characters: Some(vec![":".to_string()]),
853 ..Default::default()
854 }),
855 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
856 lsp::TextDocumentSyncOptions {
857 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
858 ..Default::default()
859 },
860 )),
861 ..Default::default()
862 },
863 ..Default::default()
864 },
865 );
866
867 // Open a buffer without an associated language server.
868 let (toml_buffer, _handle) = project
869 .update(cx, |project, cx| {
870 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
871 })
872 .await
873 .unwrap();
874
875 // Open a buffer with an associated language server before the language for it has been loaded.
876 let (rust_buffer, _handle2) = project
877 .update(cx, |project, cx| {
878 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
879 })
880 .await
881 .unwrap();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), None);
884 });
885
886 // Now we add the languages to the project, and ensure they get assigned to all
887 // the relevant open buffers.
888 language_registry.add(json_lang());
889 language_registry.add(rust_lang());
890 cx.executor().run_until_parked();
891 rust_buffer.update(cx, |buffer, _| {
892 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
893 });
894
895 // A server is started up, and it is notified about Rust files.
896 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
897 assert_eq!(
898 fake_rust_server
899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
900 .await
901 .text_document,
902 lsp::TextDocumentItem {
903 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
904 version: 0,
905 text: "const A: i32 = 1;".to_string(),
906 language_id: "rust".to_string(),
907 }
908 );
909
910 // The buffer is configured based on the language server's capabilities.
911 rust_buffer.update(cx, |buffer, _| {
912 assert_eq!(
913 buffer
914 .completion_triggers()
915 .iter()
916 .cloned()
917 .collect::<Vec<_>>(),
918 &[".".to_string(), "::".to_string()]
919 );
920 });
921 toml_buffer.update(cx, |buffer, _| {
922 assert!(buffer.completion_triggers().is_empty());
923 });
924
925 // Edit a buffer. The changes are reported to the language server.
926 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
927 assert_eq!(
928 fake_rust_server
929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
930 .await
931 .text_document,
932 lsp::VersionedTextDocumentIdentifier::new(
933 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
934 1
935 )
936 );
937
938 // Open a third buffer with a different associated language server.
939 let (json_buffer, _json_handle) = project
940 .update(cx, |project, cx| {
941 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
942 })
943 .await
944 .unwrap();
945
946 // A json language server is started up and is only notified about the json buffer.
947 let mut fake_json_server = fake_json_servers.next().await.unwrap();
948 assert_eq!(
949 fake_json_server
950 .receive_notification::<lsp::notification::DidOpenTextDocument>()
951 .await
952 .text_document,
953 lsp::TextDocumentItem {
954 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
955 version: 0,
956 text: "{\"a\": 1}".to_string(),
957 language_id: "json".to_string(),
958 }
959 );
960
961 // This buffer is configured based on the second language server's
962 // capabilities.
963 json_buffer.update(cx, |buffer, _| {
964 assert_eq!(
965 buffer
966 .completion_triggers()
967 .iter()
968 .cloned()
969 .collect::<Vec<_>>(),
970 &[":".to_string()]
971 );
972 });
973
974 // When opening another buffer whose language server is already running,
975 // it is also configured based on the existing language server's capabilities.
976 let (rust_buffer2, _handle4) = project
977 .update(cx, |project, cx| {
978 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
979 })
980 .await
981 .unwrap();
982 rust_buffer2.update(cx, |buffer, _| {
983 assert_eq!(
984 buffer
985 .completion_triggers()
986 .iter()
987 .cloned()
988 .collect::<Vec<_>>(),
989 &[".".to_string(), "::".to_string()]
990 );
991 });
992
993 // Changes are reported only to servers matching the buffer's language.
994 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
995 rust_buffer2.update(cx, |buffer, cx| {
996 buffer.edit([(0..0, "let x = 1;")], None, cx)
997 });
998 assert_eq!(
999 fake_rust_server
1000 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1001 .await
1002 .text_document,
1003 lsp::VersionedTextDocumentIdentifier::new(
1004 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1005 1
1006 )
1007 );
1008
1009 // Save notifications are reported to all servers.
1010 project
1011 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1012 .await
1013 .unwrap();
1014 assert_eq!(
1015 fake_rust_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023 assert_eq!(
1024 fake_json_server
1025 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1026 .await
1027 .text_document,
1028 lsp::TextDocumentIdentifier::new(
1029 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1030 )
1031 );
1032
1033 // Renames are reported only to servers matching the buffer's language.
1034 fs.rename(
1035 Path::new(path!("/dir/test2.rs")),
1036 Path::new(path!("/dir/test3.rs")),
1037 Default::default(),
1038 )
1039 .await
1040 .unwrap();
1041 assert_eq!(
1042 fake_rust_server
1043 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1044 .await
1045 .text_document,
1046 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1047 );
1048 assert_eq!(
1049 fake_rust_server
1050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1051 .await
1052 .text_document,
1053 lsp::TextDocumentItem {
1054 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1055 version: 0,
1056 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1057 language_id: "rust".to_string(),
1058 },
1059 );
1060
1061 rust_buffer2.update(cx, |buffer, cx| {
1062 buffer.update_diagnostics(
1063 LanguageServerId(0),
1064 DiagnosticSet::from_sorted_entries(
1065 vec![DiagnosticEntry {
1066 diagnostic: Default::default(),
1067 range: Anchor::MIN..Anchor::MAX,
1068 }],
1069 &buffer.snapshot(),
1070 ),
1071 cx,
1072 );
1073 assert_eq!(
1074 buffer
1075 .snapshot()
1076 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1077 .count(),
1078 1
1079 );
1080 });
1081
1082 // When the rename changes the extension of the file, the buffer gets closed on the old
1083 // language server and gets opened on the new one.
1084 fs.rename(
1085 Path::new(path!("/dir/test3.rs")),
1086 Path::new(path!("/dir/test3.json")),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091 assert_eq!(
1092 fake_rust_server
1093 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1094 .await
1095 .text_document,
1096 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1097 );
1098 assert_eq!(
1099 fake_json_server
1100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1101 .await
1102 .text_document,
1103 lsp::TextDocumentItem {
1104 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1105 version: 0,
1106 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1107 language_id: "json".to_string(),
1108 },
1109 );
1110
1111 // We clear the diagnostics, since the language has changed.
1112 rust_buffer2.update(cx, |buffer, _| {
1113 assert_eq!(
1114 buffer
1115 .snapshot()
1116 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1117 .count(),
1118 0
1119 );
1120 });
1121
1122 // The renamed file's version resets after changing language server.
1123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1124 assert_eq!(
1125 fake_json_server
1126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1127 .await
1128 .text_document,
1129 lsp::VersionedTextDocumentIdentifier::new(
1130 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1131 1
1132 )
1133 );
1134
1135 // Restart language servers
1136 project.update(cx, |project, cx| {
1137 project.restart_language_servers_for_buffers(
1138 vec![rust_buffer.clone(), json_buffer.clone()],
1139 HashSet::default(),
1140 cx,
1141 );
1142 });
1143
1144 let mut rust_shutdown_requests = fake_rust_server
1145 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1146 let mut json_shutdown_requests = fake_json_server
1147 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1148 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1149
1150 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1151 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1152
1153 // Ensure rust document is reopened in new rust language server
1154 assert_eq!(
1155 fake_rust_server
1156 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1157 .await
1158 .text_document,
1159 lsp::TextDocumentItem {
1160 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1161 version: 0,
1162 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1163 language_id: "rust".to_string(),
1164 }
1165 );
1166
1167 // Ensure json documents are reopened in new json language server
1168 assert_set_eq!(
1169 [
1170 fake_json_server
1171 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1172 .await
1173 .text_document,
1174 fake_json_server
1175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1176 .await
1177 .text_document,
1178 ],
1179 [
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1182 version: 0,
1183 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 },
1186 lsp::TextDocumentItem {
1187 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1188 version: 0,
1189 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1190 language_id: "json".to_string(),
1191 }
1192 ]
1193 );
1194
1195 // Close notifications are reported only to servers matching the buffer's language.
1196 cx.update(|_| drop(_json_handle));
1197 let close_message = lsp::DidCloseTextDocumentParams {
1198 text_document: lsp::TextDocumentIdentifier::new(
1199 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1200 ),
1201 };
1202 assert_eq!(
1203 fake_json_server
1204 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1205 .await,
1206 close_message,
1207 );
1208}
1209
1210#[gpui::test]
1211async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let settings_json_contents = json!({
1215 "languages": {
1216 "Rust": {
1217 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1218 }
1219 },
1220 "lsp": {
1221 "my_fake_lsp": {
1222 "binary": {
1223 // file exists, so this is treated as a relative path
1224 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1225 }
1226 },
1227 "lsp_on_path": {
1228 "binary": {
1229 // file doesn't exist, so it will fall back on PATH env var
1230 "path": path!("lsp_on_path.exe").to_string(),
1231 }
1232 }
1233 },
1234 });
1235
1236 let fs = FakeFs::new(cx.executor());
1237 fs.insert_tree(
1238 path!("/the-root"),
1239 json!({
1240 ".zed": {
1241 "settings.json": settings_json_contents.to_string(),
1242 },
1243 ".relative_path": {
1244 "to": {
1245 "my_fake_lsp.exe": "",
1246 },
1247 },
1248 "src": {
1249 "main.rs": "",
1250 }
1251 }),
1252 )
1253 .await;
1254
1255 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1257 language_registry.add(rust_lang());
1258
1259 let mut my_fake_lsp = language_registry.register_fake_lsp(
1260 "Rust",
1261 FakeLspAdapter {
1262 name: "my_fake_lsp",
1263 ..Default::default()
1264 },
1265 );
1266 let mut lsp_on_path = language_registry.register_fake_lsp(
1267 "Rust",
1268 FakeLspAdapter {
1269 name: "lsp_on_path",
1270 ..Default::default()
1271 },
1272 );
1273
1274 cx.run_until_parked();
1275
1276 // Start the language server by opening a buffer with a compatible file extension.
1277 project
1278 .update(cx, |project, cx| {
1279 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1280 })
1281 .await
1282 .unwrap();
1283
1284 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1285 assert_eq!(
1286 lsp_path.to_string_lossy(),
1287 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1288 );
1289
1290 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1291 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1292}
1293
1294#[gpui::test]
1295async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1296 init_test(cx);
1297
1298 let settings_json_contents = json!({
1299 "languages": {
1300 "Rust": {
1301 "language_servers": ["tilde_lsp"]
1302 }
1303 },
1304 "lsp": {
1305 "tilde_lsp": {
1306 "binary": {
1307 "path": "~/.local/bin/rust-analyzer",
1308 }
1309 }
1310 },
1311 });
1312
1313 let fs = FakeFs::new(cx.executor());
1314 fs.insert_tree(
1315 path!("/root"),
1316 json!({
1317 ".zed": {
1318 "settings.json": settings_json_contents.to_string(),
1319 },
1320 "src": {
1321 "main.rs": "fn main() {}",
1322 }
1323 }),
1324 )
1325 .await;
1326
1327 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1328 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1329 language_registry.add(rust_lang());
1330
1331 let mut tilde_lsp = language_registry.register_fake_lsp(
1332 "Rust",
1333 FakeLspAdapter {
1334 name: "tilde_lsp",
1335 ..Default::default()
1336 },
1337 );
1338 cx.run_until_parked();
1339
1340 project
1341 .update(cx, |project, cx| {
1342 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1343 })
1344 .await
1345 .unwrap();
1346
1347 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1348 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1349 assert_eq!(
1350 lsp_path, expected_path,
1351 "Tilde path should expand to home directory"
1352 );
1353}
1354
1355#[gpui::test]
1356async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1357 init_test(cx);
1358
1359 let fs = FakeFs::new(cx.executor());
1360 fs.insert_tree(
1361 path!("/the-root"),
1362 json!({
1363 ".gitignore": "target\n",
1364 "Cargo.lock": "",
1365 "src": {
1366 "a.rs": "",
1367 "b.rs": "",
1368 },
1369 "target": {
1370 "x": {
1371 "out": {
1372 "x.rs": ""
1373 }
1374 },
1375 "y": {
1376 "out": {
1377 "y.rs": "",
1378 }
1379 },
1380 "z": {
1381 "out": {
1382 "z.rs": ""
1383 }
1384 }
1385 }
1386 }),
1387 )
1388 .await;
1389 fs.insert_tree(
1390 path!("/the-registry"),
1391 json!({
1392 "dep1": {
1393 "src": {
1394 "dep1.rs": "",
1395 }
1396 },
1397 "dep2": {
1398 "src": {
1399 "dep2.rs": "",
1400 }
1401 },
1402 }),
1403 )
1404 .await;
1405 fs.insert_tree(
1406 path!("/the/stdlib"),
1407 json!({
1408 "LICENSE": "",
1409 "src": {
1410 "string.rs": "",
1411 }
1412 }),
1413 )
1414 .await;
1415
1416 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1417 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1418 (project.languages().clone(), project.lsp_store())
1419 });
1420 language_registry.add(rust_lang());
1421 let mut fake_servers = language_registry.register_fake_lsp(
1422 "Rust",
1423 FakeLspAdapter {
1424 name: "the-language-server",
1425 ..Default::default()
1426 },
1427 );
1428
1429 cx.executor().run_until_parked();
1430
1431 // Start the language server by opening a buffer with a compatible file extension.
1432 project
1433 .update(cx, |project, cx| {
1434 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1435 })
1436 .await
1437 .unwrap();
1438
1439 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1440 project.update(cx, |project, cx| {
1441 let worktree = project.worktrees(cx).next().unwrap();
1442 assert_eq!(
1443 worktree
1444 .read(cx)
1445 .snapshot()
1446 .entries(true, 0)
1447 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1448 .collect::<Vec<_>>(),
1449 &[
1450 ("", false),
1451 (".gitignore", false),
1452 ("Cargo.lock", false),
1453 ("src", false),
1454 ("src/a.rs", false),
1455 ("src/b.rs", false),
1456 ("target", true),
1457 ]
1458 );
1459 });
1460
1461 let prev_read_dir_count = fs.read_dir_call_count();
1462
1463 let fake_server = fake_servers.next().await.unwrap();
1464 cx.executor().run_until_parked();
1465 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1466 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1467 id
1468 });
1469
1470 // Simulate jumping to a definition in a dependency outside of the worktree.
1471 let _out_of_worktree_buffer = project
1472 .update(cx, |project, cx| {
1473 project.open_local_buffer_via_lsp(
1474 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1475 server_id,
1476 cx,
1477 )
1478 })
1479 .await
1480 .unwrap();
1481
1482 // Keep track of the FS events reported to the language server.
1483 let file_changes = Arc::new(Mutex::new(Vec::new()));
1484 fake_server
1485 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1486 registrations: vec![lsp::Registration {
1487 id: Default::default(),
1488 method: "workspace/didChangeWatchedFiles".to_string(),
1489 register_options: serde_json::to_value(
1490 lsp::DidChangeWatchedFilesRegistrationOptions {
1491 watchers: vec![
1492 lsp::FileSystemWatcher {
1493 glob_pattern: lsp::GlobPattern::String(
1494 path!("/the-root/Cargo.toml").to_string(),
1495 ),
1496 kind: None,
1497 },
1498 lsp::FileSystemWatcher {
1499 glob_pattern: lsp::GlobPattern::String(
1500 path!("/the-root/src/*.{rs,c}").to_string(),
1501 ),
1502 kind: None,
1503 },
1504 lsp::FileSystemWatcher {
1505 glob_pattern: lsp::GlobPattern::String(
1506 path!("/the-root/target/y/**/*.rs").to_string(),
1507 ),
1508 kind: None,
1509 },
1510 lsp::FileSystemWatcher {
1511 glob_pattern: lsp::GlobPattern::String(
1512 path!("/the/stdlib/src/**/*.rs").to_string(),
1513 ),
1514 kind: None,
1515 },
1516 lsp::FileSystemWatcher {
1517 glob_pattern: lsp::GlobPattern::String(
1518 path!("**/Cargo.lock").to_string(),
1519 ),
1520 kind: None,
1521 },
1522 ],
1523 },
1524 )
1525 .ok(),
1526 }],
1527 })
1528 .await
1529 .into_response()
1530 .unwrap();
1531 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1532 let file_changes = file_changes.clone();
1533 move |params, _| {
1534 let mut file_changes = file_changes.lock();
1535 file_changes.extend(params.changes);
1536 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1537 }
1538 });
1539
1540 cx.executor().run_until_parked();
1541 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1542 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1543
1544 let mut new_watched_paths = fs.watched_paths();
1545 new_watched_paths.retain(|path| {
1546 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1547 });
1548 assert_eq!(
1549 &new_watched_paths,
1550 &[
1551 Path::new(path!("/the-root")),
1552 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1553 Path::new(path!("/the/stdlib/src"))
1554 ]
1555 );
1556
1557 // Now the language server has asked us to watch an ignored directory path,
1558 // so we recursively load it.
1559 project.update(cx, |project, cx| {
1560 let worktree = project.visible_worktrees(cx).next().unwrap();
1561 assert_eq!(
1562 worktree
1563 .read(cx)
1564 .snapshot()
1565 .entries(true, 0)
1566 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1567 .collect::<Vec<_>>(),
1568 &[
1569 ("", false),
1570 (".gitignore", false),
1571 ("Cargo.lock", false),
1572 ("src", false),
1573 ("src/a.rs", false),
1574 ("src/b.rs", false),
1575 ("target", true),
1576 ("target/x", true),
1577 ("target/y", true),
1578 ("target/y/out", true),
1579 ("target/y/out/y.rs", true),
1580 ("target/z", true),
1581 ]
1582 );
1583 });
1584
1585 // Perform some file system mutations, two of which match the watched patterns,
1586 // and one of which does not.
1587 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1588 .await
1589 .unwrap();
1590 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1591 .await
1592 .unwrap();
1593 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1594 .await
1595 .unwrap();
1596 fs.create_file(
1597 path!("/the-root/target/x/out/x2.rs").as_ref(),
1598 Default::default(),
1599 )
1600 .await
1601 .unwrap();
1602 fs.create_file(
1603 path!("/the-root/target/y/out/y2.rs").as_ref(),
1604 Default::default(),
1605 )
1606 .await
1607 .unwrap();
1608 fs.save(
1609 path!("/the-root/Cargo.lock").as_ref(),
1610 &"".into(),
1611 Default::default(),
1612 )
1613 .await
1614 .unwrap();
1615 fs.save(
1616 path!("/the-stdlib/LICENSE").as_ref(),
1617 &"".into(),
1618 Default::default(),
1619 )
1620 .await
1621 .unwrap();
1622 fs.save(
1623 path!("/the/stdlib/src/string.rs").as_ref(),
1624 &"".into(),
1625 Default::default(),
1626 )
1627 .await
1628 .unwrap();
1629
1630 // The language server receives events for the FS mutations that match its watch patterns.
1631 cx.executor().run_until_parked();
1632 assert_eq!(
1633 &*file_changes.lock(),
1634 &[
1635 lsp::FileEvent {
1636 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1637 typ: lsp::FileChangeType::CHANGED,
1638 },
1639 lsp::FileEvent {
1640 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1641 typ: lsp::FileChangeType::DELETED,
1642 },
1643 lsp::FileEvent {
1644 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1645 typ: lsp::FileChangeType::CREATED,
1646 },
1647 lsp::FileEvent {
1648 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1649 typ: lsp::FileChangeType::CREATED,
1650 },
1651 lsp::FileEvent {
1652 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1653 typ: lsp::FileChangeType::CHANGED,
1654 },
1655 ]
1656 );
1657}
1658
1659#[gpui::test]
1660async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1661 init_test(cx);
1662
1663 let fs = FakeFs::new(cx.executor());
1664 fs.insert_tree(
1665 path!("/dir"),
1666 json!({
1667 "a.rs": "let a = 1;",
1668 "b.rs": "let b = 2;"
1669 }),
1670 )
1671 .await;
1672
1673 let project = Project::test(
1674 fs,
1675 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1676 cx,
1677 )
1678 .await;
1679 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1680
1681 let buffer_a = project
1682 .update(cx, |project, cx| {
1683 project.open_local_buffer(path!("/dir/a.rs"), cx)
1684 })
1685 .await
1686 .unwrap();
1687 let buffer_b = project
1688 .update(cx, |project, cx| {
1689 project.open_local_buffer(path!("/dir/b.rs"), cx)
1690 })
1691 .await
1692 .unwrap();
1693
1694 lsp_store.update(cx, |lsp_store, cx| {
1695 lsp_store
1696 .update_diagnostics(
1697 LanguageServerId(0),
1698 lsp::PublishDiagnosticsParams {
1699 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1700 version: None,
1701 diagnostics: vec![lsp::Diagnostic {
1702 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1703 severity: Some(lsp::DiagnosticSeverity::ERROR),
1704 message: "error 1".to_string(),
1705 ..Default::default()
1706 }],
1707 },
1708 None,
1709 DiagnosticSourceKind::Pushed,
1710 &[],
1711 cx,
1712 )
1713 .unwrap();
1714 lsp_store
1715 .update_diagnostics(
1716 LanguageServerId(0),
1717 lsp::PublishDiagnosticsParams {
1718 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1719 version: None,
1720 diagnostics: vec![lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1722 severity: Some(DiagnosticSeverity::WARNING),
1723 message: "error 2".to_string(),
1724 ..Default::default()
1725 }],
1726 },
1727 None,
1728 DiagnosticSourceKind::Pushed,
1729 &[],
1730 cx,
1731 )
1732 .unwrap();
1733 });
1734
1735 buffer_a.update(cx, |buffer, _| {
1736 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1737 assert_eq!(
1738 chunks
1739 .iter()
1740 .map(|(s, d)| (s.as_str(), *d))
1741 .collect::<Vec<_>>(),
1742 &[
1743 ("let ", None),
1744 ("a", Some(DiagnosticSeverity::ERROR)),
1745 (" = 1;", None),
1746 ]
1747 );
1748 });
1749 buffer_b.update(cx, |buffer, _| {
1750 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1751 assert_eq!(
1752 chunks
1753 .iter()
1754 .map(|(s, d)| (s.as_str(), *d))
1755 .collect::<Vec<_>>(),
1756 &[
1757 ("let ", None),
1758 ("b", Some(DiagnosticSeverity::WARNING)),
1759 (" = 2;", None),
1760 ]
1761 );
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(
1771 path!("/root"),
1772 json!({
1773 "dir": {
1774 ".git": {
1775 "HEAD": "ref: refs/heads/main",
1776 },
1777 ".gitignore": "b.rs",
1778 "a.rs": "let a = 1;",
1779 "b.rs": "let b = 2;",
1780 },
1781 "other.rs": "let b = c;"
1782 }),
1783 )
1784 .await;
1785
1786 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1787 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1788 let (worktree, _) = project
1789 .update(cx, |project, cx| {
1790 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1791 })
1792 .await
1793 .unwrap();
1794 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1795
1796 let (worktree, _) = project
1797 .update(cx, |project, cx| {
1798 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1799 })
1800 .await
1801 .unwrap();
1802 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1803
1804 let server_id = LanguageServerId(0);
1805 lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store
1807 .update_diagnostics(
1808 server_id,
1809 lsp::PublishDiagnosticsParams {
1810 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1811 version: None,
1812 diagnostics: vec![lsp::Diagnostic {
1813 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1814 severity: Some(lsp::DiagnosticSeverity::ERROR),
1815 message: "unused variable 'b'".to_string(),
1816 ..Default::default()
1817 }],
1818 },
1819 None,
1820 DiagnosticSourceKind::Pushed,
1821 &[],
1822 cx,
1823 )
1824 .unwrap();
1825 lsp_store
1826 .update_diagnostics(
1827 server_id,
1828 lsp::PublishDiagnosticsParams {
1829 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1830 version: None,
1831 diagnostics: vec![lsp::Diagnostic {
1832 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1833 severity: Some(lsp::DiagnosticSeverity::ERROR),
1834 message: "unknown variable 'c'".to_string(),
1835 ..Default::default()
1836 }],
1837 },
1838 None,
1839 DiagnosticSourceKind::Pushed,
1840 &[],
1841 cx,
1842 )
1843 .unwrap();
1844 });
1845
1846 let main_ignored_buffer = project
1847 .update(cx, |project, cx| {
1848 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1849 })
1850 .await
1851 .unwrap();
1852 main_ignored_buffer.update(cx, |buffer, _| {
1853 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1854 assert_eq!(
1855 chunks
1856 .iter()
1857 .map(|(s, d)| (s.as_str(), *d))
1858 .collect::<Vec<_>>(),
1859 &[
1860 ("let ", None),
1861 ("b", Some(DiagnosticSeverity::ERROR)),
1862 (" = 2;", None),
1863 ],
1864 "Gigitnored buffers should still get in-buffer diagnostics",
1865 );
1866 });
1867 let other_buffer = project
1868 .update(cx, |project, cx| {
1869 project.open_buffer((other_worktree_id, rel_path("")), cx)
1870 })
1871 .await
1872 .unwrap();
1873 other_buffer.update(cx, |buffer, _| {
1874 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1875 assert_eq!(
1876 chunks
1877 .iter()
1878 .map(|(s, d)| (s.as_str(), *d))
1879 .collect::<Vec<_>>(),
1880 &[
1881 ("let b = ", None),
1882 ("c", Some(DiagnosticSeverity::ERROR)),
1883 (";", None),
1884 ],
1885 "Buffers from hidden projects should still get in-buffer diagnostics"
1886 );
1887 });
1888
1889 project.update(cx, |project, cx| {
1890 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1891 assert_eq!(
1892 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1893 vec![(
1894 ProjectPath {
1895 worktree_id: main_worktree_id,
1896 path: rel_path("b.rs").into(),
1897 },
1898 server_id,
1899 DiagnosticSummary {
1900 error_count: 1,
1901 warning_count: 0,
1902 }
1903 )]
1904 );
1905 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1906 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1907 });
1908}
1909
1910#[gpui::test]
1911async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1912 init_test(cx);
1913
1914 let progress_token = "the-progress-token";
1915
1916 let fs = FakeFs::new(cx.executor());
1917 fs.insert_tree(
1918 path!("/dir"),
1919 json!({
1920 "a.rs": "fn a() { A }",
1921 "b.rs": "const y: i32 = 1",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1928
1929 language_registry.add(rust_lang());
1930 let mut fake_servers = language_registry.register_fake_lsp(
1931 "Rust",
1932 FakeLspAdapter {
1933 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1934 disk_based_diagnostics_sources: vec!["disk".into()],
1935 ..Default::default()
1936 },
1937 );
1938
1939 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1940
1941 // Cause worktree to start the fake language server
1942 let _ = project
1943 .update(cx, |project, cx| {
1944 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1945 })
1946 .await
1947 .unwrap();
1948
1949 let mut events = cx.events(&project);
1950
1951 let fake_server = fake_servers.next().await.unwrap();
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(0),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 ),
1959 );
1960
1961 fake_server
1962 .start_progress(format!("{}/0", progress_token))
1963 .await;
1964 assert_eq!(
1965 events.next().await.unwrap(),
1966 Event::DiskBasedDiagnosticsStarted {
1967 language_server_id: LanguageServerId(0),
1968 }
1969 );
1970
1971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1973 version: None,
1974 diagnostics: vec![lsp::Diagnostic {
1975 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1976 severity: Some(lsp::DiagnosticSeverity::ERROR),
1977 message: "undefined variable 'A'".to_string(),
1978 ..Default::default()
1979 }],
1980 });
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiagnosticsUpdated {
1984 language_server_id: LanguageServerId(0),
1985 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1986 }
1987 );
1988
1989 fake_server.end_progress(format!("{}/0", progress_token));
1990 assert_eq!(
1991 events.next().await.unwrap(),
1992 Event::DiskBasedDiagnosticsFinished {
1993 language_server_id: LanguageServerId(0)
1994 }
1995 );
1996
1997 let buffer = project
1998 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1999 .await
2000 .unwrap();
2001
2002 buffer.update(cx, |buffer, _| {
2003 let snapshot = buffer.snapshot();
2004 let diagnostics = snapshot
2005 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2006 .collect::<Vec<_>>();
2007 assert_eq!(
2008 diagnostics,
2009 &[DiagnosticEntryRef {
2010 range: Point::new(0, 9)..Point::new(0, 10),
2011 diagnostic: &Diagnostic {
2012 severity: lsp::DiagnosticSeverity::ERROR,
2013 message: "undefined variable 'A'".to_string(),
2014 group_id: 0,
2015 is_primary: true,
2016 source_kind: DiagnosticSourceKind::Pushed,
2017 ..Diagnostic::default()
2018 }
2019 }]
2020 )
2021 });
2022
2023 // Ensure publishing empty diagnostics twice only results in one update event.
2024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2025 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2026 version: None,
2027 diagnostics: Default::default(),
2028 });
2029 assert_eq!(
2030 events.next().await.unwrap(),
2031 Event::DiagnosticsUpdated {
2032 language_server_id: LanguageServerId(0),
2033 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2034 }
2035 );
2036
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: Default::default(),
2041 });
2042 cx.executor().run_until_parked();
2043 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2044}
2045
2046#[gpui::test]
2047async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2048 init_test(cx);
2049
2050 let progress_token = "the-progress-token";
2051
2052 let fs = FakeFs::new(cx.executor());
2053 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2054
2055 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2056
2057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2058 language_registry.add(rust_lang());
2059 let mut fake_servers = language_registry.register_fake_lsp(
2060 "Rust",
2061 FakeLspAdapter {
2062 name: "the-language-server",
2063 disk_based_diagnostics_sources: vec!["disk".into()],
2064 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2065 ..FakeLspAdapter::default()
2066 },
2067 );
2068
2069 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2070
2071 let (buffer, _handle) = project
2072 .update(cx, |project, cx| {
2073 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2074 })
2075 .await
2076 .unwrap();
2077 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2078 // Simulate diagnostics starting to update.
2079 let fake_server = fake_servers.next().await.unwrap();
2080 cx.executor().run_until_parked();
2081 fake_server.start_progress(progress_token).await;
2082
2083 // Restart the server before the diagnostics finish updating.
2084 project.update(cx, |project, cx| {
2085 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2086 });
2087 let mut events = cx.events(&project);
2088
2089 // Simulate the newly started server sending more diagnostics.
2090 let fake_server = fake_servers.next().await.unwrap();
2091 cx.executor().run_until_parked();
2092 assert_eq!(
2093 events.next().await.unwrap(),
2094 Event::LanguageServerRemoved(LanguageServerId(0))
2095 );
2096 assert_eq!(
2097 events.next().await.unwrap(),
2098 Event::LanguageServerAdded(
2099 LanguageServerId(1),
2100 fake_server.server.name(),
2101 Some(worktree_id)
2102 )
2103 );
2104 fake_server.start_progress(progress_token).await;
2105 assert_eq!(
2106 events.next().await.unwrap(),
2107 Event::LanguageServerBufferRegistered {
2108 server_id: LanguageServerId(1),
2109 buffer_id,
2110 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2111 name: Some(fake_server.server.name())
2112 }
2113 );
2114 assert_eq!(
2115 events.next().await.unwrap(),
2116 Event::DiskBasedDiagnosticsStarted {
2117 language_server_id: LanguageServerId(1)
2118 }
2119 );
2120 project.update(cx, |project, cx| {
2121 assert_eq!(
2122 project
2123 .language_servers_running_disk_based_diagnostics(cx)
2124 .collect::<Vec<_>>(),
2125 [LanguageServerId(1)]
2126 );
2127 });
2128
2129 // All diagnostics are considered done, despite the old server's diagnostic
2130 // task never completing.
2131 fake_server.end_progress(progress_token);
2132 assert_eq!(
2133 events.next().await.unwrap(),
2134 Event::DiskBasedDiagnosticsFinished {
2135 language_server_id: LanguageServerId(1)
2136 }
2137 );
2138 project.update(cx, |project, cx| {
2139 assert_eq!(
2140 project
2141 .language_servers_running_disk_based_diagnostics(cx)
2142 .collect::<Vec<_>>(),
2143 [] as [language::LanguageServerId; 0]
2144 );
2145 });
2146}
2147
2148#[gpui::test]
2149async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2150 init_test(cx);
2151
2152 let fs = FakeFs::new(cx.executor());
2153 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2154
2155 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2156
2157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2158 language_registry.add(rust_lang());
2159 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2160
2161 let (buffer, _) = project
2162 .update(cx, |project, cx| {
2163 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2164 })
2165 .await
2166 .unwrap();
2167
2168 // Publish diagnostics
2169 let fake_server = fake_servers.next().await.unwrap();
2170 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2171 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2172 version: None,
2173 diagnostics: vec![lsp::Diagnostic {
2174 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2175 severity: Some(lsp::DiagnosticSeverity::ERROR),
2176 message: "the message".to_string(),
2177 ..Default::default()
2178 }],
2179 });
2180
2181 cx.executor().run_until_parked();
2182 buffer.update(cx, |buffer, _| {
2183 assert_eq!(
2184 buffer
2185 .snapshot()
2186 .diagnostics_in_range::<_, usize>(0..1, false)
2187 .map(|entry| entry.diagnostic.message.clone())
2188 .collect::<Vec<_>>(),
2189 ["the message".to_string()]
2190 );
2191 });
2192 project.update(cx, |project, cx| {
2193 assert_eq!(
2194 project.diagnostic_summary(false, cx),
2195 DiagnosticSummary {
2196 error_count: 1,
2197 warning_count: 0,
2198 }
2199 );
2200 });
2201
2202 project.update(cx, |project, cx| {
2203 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2204 });
2205
2206 // The diagnostics are cleared.
2207 cx.executor().run_until_parked();
2208 buffer.update(cx, |buffer, _| {
2209 assert_eq!(
2210 buffer
2211 .snapshot()
2212 .diagnostics_in_range::<_, usize>(0..1, false)
2213 .map(|entry| entry.diagnostic.message.clone())
2214 .collect::<Vec<_>>(),
2215 Vec::<String>::new(),
2216 );
2217 });
2218 project.update(cx, |project, cx| {
2219 assert_eq!(
2220 project.diagnostic_summary(false, cx),
2221 DiagnosticSummary {
2222 error_count: 0,
2223 warning_count: 0,
2224 }
2225 );
2226 });
2227}
2228
2229#[gpui::test]
2230async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2231 init_test(cx);
2232
2233 let fs = FakeFs::new(cx.executor());
2234 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2235
2236 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2237 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2238
2239 language_registry.add(rust_lang());
2240 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2241
2242 let (buffer, _handle) = project
2243 .update(cx, |project, cx| {
2244 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2245 })
2246 .await
2247 .unwrap();
2248
2249 // Before restarting the server, report diagnostics with an unknown buffer version.
2250 let fake_server = fake_servers.next().await.unwrap();
2251 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2252 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2253 version: Some(10000),
2254 diagnostics: Vec::new(),
2255 });
2256 cx.executor().run_until_parked();
2257 project.update(cx, |project, cx| {
2258 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2259 });
2260
2261 let mut fake_server = fake_servers.next().await.unwrap();
2262 let notification = fake_server
2263 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2264 .await
2265 .text_document;
2266 assert_eq!(notification.version, 0);
2267}
2268
2269#[gpui::test]
2270async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2271 init_test(cx);
2272
2273 let progress_token = "the-progress-token";
2274
2275 let fs = FakeFs::new(cx.executor());
2276 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2277
2278 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2279
2280 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2281 language_registry.add(rust_lang());
2282 let mut fake_servers = language_registry.register_fake_lsp(
2283 "Rust",
2284 FakeLspAdapter {
2285 name: "the-language-server",
2286 disk_based_diagnostics_sources: vec!["disk".into()],
2287 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2288 ..Default::default()
2289 },
2290 );
2291
2292 let (buffer, _handle) = project
2293 .update(cx, |project, cx| {
2294 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2295 })
2296 .await
2297 .unwrap();
2298
2299 // Simulate diagnostics starting to update.
2300 let mut fake_server = fake_servers.next().await.unwrap();
2301 fake_server
2302 .start_progress_with(
2303 "another-token",
2304 lsp::WorkDoneProgressBegin {
2305 cancellable: Some(false),
2306 ..Default::default()
2307 },
2308 )
2309 .await;
2310 fake_server
2311 .start_progress_with(
2312 progress_token,
2313 lsp::WorkDoneProgressBegin {
2314 cancellable: Some(true),
2315 ..Default::default()
2316 },
2317 )
2318 .await;
2319 cx.executor().run_until_parked();
2320
2321 project.update(cx, |project, cx| {
2322 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2323 });
2324
2325 let cancel_notification = fake_server
2326 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2327 .await;
2328 assert_eq!(
2329 cancel_notification.token,
2330 NumberOrString::String(progress_token.into())
2331 );
2332}
2333
2334#[gpui::test]
2335async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2336 init_test(cx);
2337
2338 let fs = FakeFs::new(cx.executor());
2339 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2340 .await;
2341
2342 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2343 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2344
2345 let mut fake_rust_servers = language_registry.register_fake_lsp(
2346 "Rust",
2347 FakeLspAdapter {
2348 name: "rust-lsp",
2349 ..Default::default()
2350 },
2351 );
2352 let mut fake_js_servers = language_registry.register_fake_lsp(
2353 "JavaScript",
2354 FakeLspAdapter {
2355 name: "js-lsp",
2356 ..Default::default()
2357 },
2358 );
2359 language_registry.add(rust_lang());
2360 language_registry.add(js_lang());
2361
2362 let _rs_buffer = project
2363 .update(cx, |project, cx| {
2364 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2365 })
2366 .await
2367 .unwrap();
2368 let _js_buffer = project
2369 .update(cx, |project, cx| {
2370 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2371 })
2372 .await
2373 .unwrap();
2374
2375 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2376 assert_eq!(
2377 fake_rust_server_1
2378 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2379 .await
2380 .text_document
2381 .uri
2382 .as_str(),
2383 uri!("file:///dir/a.rs")
2384 );
2385
2386 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2387 assert_eq!(
2388 fake_js_server
2389 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2390 .await
2391 .text_document
2392 .uri
2393 .as_str(),
2394 uri!("file:///dir/b.js")
2395 );
2396
2397 // Disable Rust language server, ensuring only that server gets stopped.
2398 cx.update(|cx| {
2399 SettingsStore::update_global(cx, |settings, cx| {
2400 settings.update_user_settings(cx, |settings| {
2401 settings.languages_mut().insert(
2402 "Rust".into(),
2403 LanguageSettingsContent {
2404 enable_language_server: Some(false),
2405 ..Default::default()
2406 },
2407 );
2408 });
2409 })
2410 });
2411 fake_rust_server_1
2412 .receive_notification::<lsp::notification::Exit>()
2413 .await;
2414
2415 // Enable Rust and disable JavaScript language servers, ensuring that the
2416 // former gets started again and that the latter stops.
2417 cx.update(|cx| {
2418 SettingsStore::update_global(cx, |settings, cx| {
2419 settings.update_user_settings(cx, |settings| {
2420 settings.languages_mut().insert(
2421 "Rust".into(),
2422 LanguageSettingsContent {
2423 enable_language_server: Some(true),
2424 ..Default::default()
2425 },
2426 );
2427 settings.languages_mut().insert(
2428 "JavaScript".into(),
2429 LanguageSettingsContent {
2430 enable_language_server: Some(false),
2431 ..Default::default()
2432 },
2433 );
2434 });
2435 })
2436 });
2437 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2438 assert_eq!(
2439 fake_rust_server_2
2440 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2441 .await
2442 .text_document
2443 .uri
2444 .as_str(),
2445 uri!("file:///dir/a.rs")
2446 );
2447 fake_js_server
2448 .receive_notification::<lsp::notification::Exit>()
2449 .await;
2450}
2451
2452#[gpui::test(iterations = 3)]
2453async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2454 init_test(cx);
2455
2456 let text = "
2457 fn a() { A }
2458 fn b() { BB }
2459 fn c() { CCC }
2460 "
2461 .unindent();
2462
2463 let fs = FakeFs::new(cx.executor());
2464 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2465
2466 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2467 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2468
2469 language_registry.add(rust_lang());
2470 let mut fake_servers = language_registry.register_fake_lsp(
2471 "Rust",
2472 FakeLspAdapter {
2473 disk_based_diagnostics_sources: vec!["disk".into()],
2474 ..Default::default()
2475 },
2476 );
2477
2478 let buffer = project
2479 .update(cx, |project, cx| {
2480 project.open_local_buffer(path!("/dir/a.rs"), cx)
2481 })
2482 .await
2483 .unwrap();
2484
2485 let _handle = project.update(cx, |project, cx| {
2486 project.register_buffer_with_language_servers(&buffer, cx)
2487 });
2488
2489 let mut fake_server = fake_servers.next().await.unwrap();
2490 let open_notification = fake_server
2491 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2492 .await;
2493
2494 // Edit the buffer, moving the content down
2495 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2496 let change_notification_1 = fake_server
2497 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2498 .await;
2499 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2500
2501 // Report some diagnostics for the initial version of the buffer
2502 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2503 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2504 version: Some(open_notification.text_document.version),
2505 diagnostics: vec![
2506 lsp::Diagnostic {
2507 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2508 severity: Some(DiagnosticSeverity::ERROR),
2509 message: "undefined variable 'A'".to_string(),
2510 source: Some("disk".to_string()),
2511 ..Default::default()
2512 },
2513 lsp::Diagnostic {
2514 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2515 severity: Some(DiagnosticSeverity::ERROR),
2516 message: "undefined variable 'BB'".to_string(),
2517 source: Some("disk".to_string()),
2518 ..Default::default()
2519 },
2520 lsp::Diagnostic {
2521 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2522 severity: Some(DiagnosticSeverity::ERROR),
2523 source: Some("disk".to_string()),
2524 message: "undefined variable 'CCC'".to_string(),
2525 ..Default::default()
2526 },
2527 ],
2528 });
2529
2530 // The diagnostics have moved down since they were created.
2531 cx.executor().run_until_parked();
2532 buffer.update(cx, |buffer, _| {
2533 assert_eq!(
2534 buffer
2535 .snapshot()
2536 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2537 .collect::<Vec<_>>(),
2538 &[
2539 DiagnosticEntry {
2540 range: Point::new(3, 9)..Point::new(3, 11),
2541 diagnostic: Diagnostic {
2542 source: Some("disk".into()),
2543 severity: DiagnosticSeverity::ERROR,
2544 message: "undefined variable 'BB'".to_string(),
2545 is_disk_based: true,
2546 group_id: 1,
2547 is_primary: true,
2548 source_kind: DiagnosticSourceKind::Pushed,
2549 ..Diagnostic::default()
2550 },
2551 },
2552 DiagnosticEntry {
2553 range: Point::new(4, 9)..Point::new(4, 12),
2554 diagnostic: Diagnostic {
2555 source: Some("disk".into()),
2556 severity: DiagnosticSeverity::ERROR,
2557 message: "undefined variable 'CCC'".to_string(),
2558 is_disk_based: true,
2559 group_id: 2,
2560 is_primary: true,
2561 source_kind: DiagnosticSourceKind::Pushed,
2562 ..Diagnostic::default()
2563 }
2564 }
2565 ]
2566 );
2567 assert_eq!(
2568 chunks_with_diagnostics(buffer, 0..buffer.len()),
2569 [
2570 ("\n\nfn a() { ".to_string(), None),
2571 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2572 (" }\nfn b() { ".to_string(), None),
2573 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2574 (" }\nfn c() { ".to_string(), None),
2575 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2576 (" }\n".to_string(), None),
2577 ]
2578 );
2579 assert_eq!(
2580 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2581 [
2582 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2583 (" }\nfn c() { ".to_string(), None),
2584 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2585 ]
2586 );
2587 });
2588
2589 // Ensure overlapping diagnostics are highlighted correctly.
2590 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2591 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2592 version: Some(open_notification.text_document.version),
2593 diagnostics: vec![
2594 lsp::Diagnostic {
2595 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2596 severity: Some(DiagnosticSeverity::ERROR),
2597 message: "undefined variable 'A'".to_string(),
2598 source: Some("disk".to_string()),
2599 ..Default::default()
2600 },
2601 lsp::Diagnostic {
2602 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2603 severity: Some(DiagnosticSeverity::WARNING),
2604 message: "unreachable statement".to_string(),
2605 source: Some("disk".to_string()),
2606 ..Default::default()
2607 },
2608 ],
2609 });
2610
2611 cx.executor().run_until_parked();
2612 buffer.update(cx, |buffer, _| {
2613 assert_eq!(
2614 buffer
2615 .snapshot()
2616 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2617 .collect::<Vec<_>>(),
2618 &[
2619 DiagnosticEntry {
2620 range: Point::new(2, 9)..Point::new(2, 12),
2621 diagnostic: Diagnostic {
2622 source: Some("disk".into()),
2623 severity: DiagnosticSeverity::WARNING,
2624 message: "unreachable statement".to_string(),
2625 is_disk_based: true,
2626 group_id: 4,
2627 is_primary: true,
2628 source_kind: DiagnosticSourceKind::Pushed,
2629 ..Diagnostic::default()
2630 }
2631 },
2632 DiagnosticEntry {
2633 range: Point::new(2, 9)..Point::new(2, 10),
2634 diagnostic: Diagnostic {
2635 source: Some("disk".into()),
2636 severity: DiagnosticSeverity::ERROR,
2637 message: "undefined variable 'A'".to_string(),
2638 is_disk_based: true,
2639 group_id: 3,
2640 is_primary: true,
2641 source_kind: DiagnosticSourceKind::Pushed,
2642 ..Diagnostic::default()
2643 },
2644 }
2645 ]
2646 );
2647 assert_eq!(
2648 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2649 [
2650 ("fn a() { ".to_string(), None),
2651 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2652 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2653 ("\n".to_string(), None),
2654 ]
2655 );
2656 assert_eq!(
2657 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2658 [
2659 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2660 ("\n".to_string(), None),
2661 ]
2662 );
2663 });
2664
2665 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2666 // changes since the last save.
2667 buffer.update(cx, |buffer, cx| {
2668 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2669 buffer.edit(
2670 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2671 None,
2672 cx,
2673 );
2674 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2675 });
2676 let change_notification_2 = fake_server
2677 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2678 .await;
2679 assert!(
2680 change_notification_2.text_document.version > change_notification_1.text_document.version
2681 );
2682
2683 // Handle out-of-order diagnostics
2684 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2685 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2686 version: Some(change_notification_2.text_document.version),
2687 diagnostics: vec![
2688 lsp::Diagnostic {
2689 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2690 severity: Some(DiagnosticSeverity::ERROR),
2691 message: "undefined variable 'BB'".to_string(),
2692 source: Some("disk".to_string()),
2693 ..Default::default()
2694 },
2695 lsp::Diagnostic {
2696 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2697 severity: Some(DiagnosticSeverity::WARNING),
2698 message: "undefined variable 'A'".to_string(),
2699 source: Some("disk".to_string()),
2700 ..Default::default()
2701 },
2702 ],
2703 });
2704
2705 cx.executor().run_until_parked();
2706 buffer.update(cx, |buffer, _| {
2707 assert_eq!(
2708 buffer
2709 .snapshot()
2710 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2711 .collect::<Vec<_>>(),
2712 &[
2713 DiagnosticEntry {
2714 range: Point::new(2, 21)..Point::new(2, 22),
2715 diagnostic: Diagnostic {
2716 source: Some("disk".into()),
2717 severity: DiagnosticSeverity::WARNING,
2718 message: "undefined variable 'A'".to_string(),
2719 is_disk_based: true,
2720 group_id: 6,
2721 is_primary: true,
2722 source_kind: DiagnosticSourceKind::Pushed,
2723 ..Diagnostic::default()
2724 }
2725 },
2726 DiagnosticEntry {
2727 range: Point::new(3, 9)..Point::new(3, 14),
2728 diagnostic: Diagnostic {
2729 source: Some("disk".into()),
2730 severity: DiagnosticSeverity::ERROR,
2731 message: "undefined variable 'BB'".to_string(),
2732 is_disk_based: true,
2733 group_id: 5,
2734 is_primary: true,
2735 source_kind: DiagnosticSourceKind::Pushed,
2736 ..Diagnostic::default()
2737 },
2738 }
2739 ]
2740 );
2741 });
2742}
2743
2744#[gpui::test]
2745async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2746 init_test(cx);
2747
2748 let text = concat!(
2749 "let one = ;\n", //
2750 "let two = \n",
2751 "let three = 3;\n",
2752 );
2753
2754 let fs = FakeFs::new(cx.executor());
2755 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2756
2757 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2758 let buffer = project
2759 .update(cx, |project, cx| {
2760 project.open_local_buffer(path!("/dir/a.rs"), cx)
2761 })
2762 .await
2763 .unwrap();
2764
2765 project.update(cx, |project, cx| {
2766 project.lsp_store.update(cx, |lsp_store, cx| {
2767 lsp_store
2768 .update_diagnostic_entries(
2769 LanguageServerId(0),
2770 PathBuf::from(path!("/dir/a.rs")),
2771 None,
2772 None,
2773 vec![
2774 DiagnosticEntry {
2775 range: Unclipped(PointUtf16::new(0, 10))
2776 ..Unclipped(PointUtf16::new(0, 10)),
2777 diagnostic: Diagnostic {
2778 severity: DiagnosticSeverity::ERROR,
2779 message: "syntax error 1".to_string(),
2780 source_kind: DiagnosticSourceKind::Pushed,
2781 ..Diagnostic::default()
2782 },
2783 },
2784 DiagnosticEntry {
2785 range: Unclipped(PointUtf16::new(1, 10))
2786 ..Unclipped(PointUtf16::new(1, 10)),
2787 diagnostic: Diagnostic {
2788 severity: DiagnosticSeverity::ERROR,
2789 message: "syntax error 2".to_string(),
2790 source_kind: DiagnosticSourceKind::Pushed,
2791 ..Diagnostic::default()
2792 },
2793 },
2794 ],
2795 cx,
2796 )
2797 .unwrap();
2798 })
2799 });
2800
2801 // An empty range is extended forward to include the following character.
2802 // At the end of a line, an empty range is extended backward to include
2803 // the preceding character.
2804 buffer.update(cx, |buffer, _| {
2805 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2806 assert_eq!(
2807 chunks
2808 .iter()
2809 .map(|(s, d)| (s.as_str(), *d))
2810 .collect::<Vec<_>>(),
2811 &[
2812 ("let one = ", None),
2813 (";", Some(DiagnosticSeverity::ERROR)),
2814 ("\nlet two =", None),
2815 (" ", Some(DiagnosticSeverity::ERROR)),
2816 ("\nlet three = 3;\n", None)
2817 ]
2818 );
2819 });
2820}
2821
2822#[gpui::test]
2823async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2824 init_test(cx);
2825
2826 let fs = FakeFs::new(cx.executor());
2827 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
2828 .await;
2829
2830 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2831 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2832
2833 lsp_store.update(cx, |lsp_store, cx| {
2834 lsp_store
2835 .update_diagnostic_entries(
2836 LanguageServerId(0),
2837 Path::new(path!("/dir/a.rs")).to_owned(),
2838 None,
2839 None,
2840 vec![DiagnosticEntry {
2841 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2842 diagnostic: Diagnostic {
2843 severity: DiagnosticSeverity::ERROR,
2844 is_primary: true,
2845 message: "syntax error a1".to_string(),
2846 source_kind: DiagnosticSourceKind::Pushed,
2847 ..Diagnostic::default()
2848 },
2849 }],
2850 cx,
2851 )
2852 .unwrap();
2853 lsp_store
2854 .update_diagnostic_entries(
2855 LanguageServerId(1),
2856 Path::new(path!("/dir/a.rs")).to_owned(),
2857 None,
2858 None,
2859 vec![DiagnosticEntry {
2860 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2861 diagnostic: Diagnostic {
2862 severity: DiagnosticSeverity::ERROR,
2863 is_primary: true,
2864 message: "syntax error b1".to_string(),
2865 source_kind: DiagnosticSourceKind::Pushed,
2866 ..Diagnostic::default()
2867 },
2868 }],
2869 cx,
2870 )
2871 .unwrap();
2872
2873 assert_eq!(
2874 lsp_store.diagnostic_summary(false, cx),
2875 DiagnosticSummary {
2876 error_count: 2,
2877 warning_count: 0,
2878 }
2879 );
2880 });
2881}
2882
2883#[gpui::test]
2884async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2885 init_test(cx);
2886
2887 let text = "
2888 fn a() {
2889 f1();
2890 }
2891 fn b() {
2892 f2();
2893 }
2894 fn c() {
2895 f3();
2896 }
2897 "
2898 .unindent();
2899
2900 let fs = FakeFs::new(cx.executor());
2901 fs.insert_tree(
2902 path!("/dir"),
2903 json!({
2904 "a.rs": text.clone(),
2905 }),
2906 )
2907 .await;
2908
2909 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2910 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2911
2912 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2913 language_registry.add(rust_lang());
2914 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2915
2916 let (buffer, _handle) = project
2917 .update(cx, |project, cx| {
2918 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2919 })
2920 .await
2921 .unwrap();
2922
2923 let mut fake_server = fake_servers.next().await.unwrap();
2924 let lsp_document_version = fake_server
2925 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2926 .await
2927 .text_document
2928 .version;
2929
2930 // Simulate editing the buffer after the language server computes some edits.
2931 buffer.update(cx, |buffer, cx| {
2932 buffer.edit(
2933 [(
2934 Point::new(0, 0)..Point::new(0, 0),
2935 "// above first function\n",
2936 )],
2937 None,
2938 cx,
2939 );
2940 buffer.edit(
2941 [(
2942 Point::new(2, 0)..Point::new(2, 0),
2943 " // inside first function\n",
2944 )],
2945 None,
2946 cx,
2947 );
2948 buffer.edit(
2949 [(
2950 Point::new(6, 4)..Point::new(6, 4),
2951 "// inside second function ",
2952 )],
2953 None,
2954 cx,
2955 );
2956
2957 assert_eq!(
2958 buffer.text(),
2959 "
2960 // above first function
2961 fn a() {
2962 // inside first function
2963 f1();
2964 }
2965 fn b() {
2966 // inside second function f2();
2967 }
2968 fn c() {
2969 f3();
2970 }
2971 "
2972 .unindent()
2973 );
2974 });
2975
2976 let edits = lsp_store
2977 .update(cx, |lsp_store, cx| {
2978 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2979 &buffer,
2980 vec![
2981 // replace body of first function
2982 lsp::TextEdit {
2983 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2984 new_text: "
2985 fn a() {
2986 f10();
2987 }
2988 "
2989 .unindent(),
2990 },
2991 // edit inside second function
2992 lsp::TextEdit {
2993 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2994 new_text: "00".into(),
2995 },
2996 // edit inside third function via two distinct edits
2997 lsp::TextEdit {
2998 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2999 new_text: "4000".into(),
3000 },
3001 lsp::TextEdit {
3002 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3003 new_text: "".into(),
3004 },
3005 ],
3006 LanguageServerId(0),
3007 Some(lsp_document_version),
3008 cx,
3009 )
3010 })
3011 .await
3012 .unwrap();
3013
3014 buffer.update(cx, |buffer, cx| {
3015 for (range, new_text) in edits {
3016 buffer.edit([(range, new_text)], None, cx);
3017 }
3018 assert_eq!(
3019 buffer.text(),
3020 "
3021 // above first function
3022 fn a() {
3023 // inside first function
3024 f10();
3025 }
3026 fn b() {
3027 // inside second function f200();
3028 }
3029 fn c() {
3030 f4000();
3031 }
3032 "
3033 .unindent()
3034 );
3035 });
3036}
3037
3038#[gpui::test]
3039async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3040 init_test(cx);
3041
3042 let text = "
3043 use a::b;
3044 use a::c;
3045
3046 fn f() {
3047 b();
3048 c();
3049 }
3050 "
3051 .unindent();
3052
3053 let fs = FakeFs::new(cx.executor());
3054 fs.insert_tree(
3055 path!("/dir"),
3056 json!({
3057 "a.rs": text.clone(),
3058 }),
3059 )
3060 .await;
3061
3062 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3063 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3064 let buffer = project
3065 .update(cx, |project, cx| {
3066 project.open_local_buffer(path!("/dir/a.rs"), cx)
3067 })
3068 .await
3069 .unwrap();
3070
3071 // Simulate the language server sending us a small edit in the form of a very large diff.
3072 // Rust-analyzer does this when performing a merge-imports code action.
3073 let edits = lsp_store
3074 .update(cx, |lsp_store, cx| {
3075 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3076 &buffer,
3077 [
3078 // Replace the first use statement without editing the semicolon.
3079 lsp::TextEdit {
3080 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3081 new_text: "a::{b, c}".into(),
3082 },
3083 // Reinsert the remainder of the file between the semicolon and the final
3084 // newline of the file.
3085 lsp::TextEdit {
3086 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3087 new_text: "\n\n".into(),
3088 },
3089 lsp::TextEdit {
3090 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3091 new_text: "
3092 fn f() {
3093 b();
3094 c();
3095 }"
3096 .unindent(),
3097 },
3098 // Delete everything after the first newline of the file.
3099 lsp::TextEdit {
3100 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3101 new_text: "".into(),
3102 },
3103 ],
3104 LanguageServerId(0),
3105 None,
3106 cx,
3107 )
3108 })
3109 .await
3110 .unwrap();
3111
3112 buffer.update(cx, |buffer, cx| {
3113 let edits = edits
3114 .into_iter()
3115 .map(|(range, text)| {
3116 (
3117 range.start.to_point(buffer)..range.end.to_point(buffer),
3118 text,
3119 )
3120 })
3121 .collect::<Vec<_>>();
3122
3123 assert_eq!(
3124 edits,
3125 [
3126 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3127 (Point::new(1, 0)..Point::new(2, 0), "".into())
3128 ]
3129 );
3130
3131 for (range, new_text) in edits {
3132 buffer.edit([(range, new_text)], None, cx);
3133 }
3134 assert_eq!(
3135 buffer.text(),
3136 "
3137 use a::{b, c};
3138
3139 fn f() {
3140 b();
3141 c();
3142 }
3143 "
3144 .unindent()
3145 );
3146 });
3147}
3148
3149#[gpui::test]
3150async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3151 cx: &mut gpui::TestAppContext,
3152) {
3153 init_test(cx);
3154
3155 let text = "Path()";
3156
3157 let fs = FakeFs::new(cx.executor());
3158 fs.insert_tree(
3159 path!("/dir"),
3160 json!({
3161 "a.rs": text
3162 }),
3163 )
3164 .await;
3165
3166 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3167 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3168 let buffer = project
3169 .update(cx, |project, cx| {
3170 project.open_local_buffer(path!("/dir/a.rs"), cx)
3171 })
3172 .await
3173 .unwrap();
3174
3175 // Simulate the language server sending us a pair of edits at the same location,
3176 // with an insertion following a replacement (which violates the LSP spec).
3177 let edits = lsp_store
3178 .update(cx, |lsp_store, cx| {
3179 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3180 &buffer,
3181 [
3182 lsp::TextEdit {
3183 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3184 new_text: "Path".into(),
3185 },
3186 lsp::TextEdit {
3187 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3188 new_text: "from path import Path\n\n\n".into(),
3189 },
3190 ],
3191 LanguageServerId(0),
3192 None,
3193 cx,
3194 )
3195 })
3196 .await
3197 .unwrap();
3198
3199 buffer.update(cx, |buffer, cx| {
3200 buffer.edit(edits, None, cx);
3201 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3202 });
3203}
3204
3205#[gpui::test]
3206async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3207 init_test(cx);
3208
3209 let text = "
3210 use a::b;
3211 use a::c;
3212
3213 fn f() {
3214 b();
3215 c();
3216 }
3217 "
3218 .unindent();
3219
3220 let fs = FakeFs::new(cx.executor());
3221 fs.insert_tree(
3222 path!("/dir"),
3223 json!({
3224 "a.rs": text.clone(),
3225 }),
3226 )
3227 .await;
3228
3229 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3230 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3231 let buffer = project
3232 .update(cx, |project, cx| {
3233 project.open_local_buffer(path!("/dir/a.rs"), cx)
3234 })
3235 .await
3236 .unwrap();
3237
3238 // Simulate the language server sending us edits in a non-ordered fashion,
3239 // with ranges sometimes being inverted or pointing to invalid locations.
3240 let edits = lsp_store
3241 .update(cx, |lsp_store, cx| {
3242 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3243 &buffer,
3244 [
3245 lsp::TextEdit {
3246 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3247 new_text: "\n\n".into(),
3248 },
3249 lsp::TextEdit {
3250 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3251 new_text: "a::{b, c}".into(),
3252 },
3253 lsp::TextEdit {
3254 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3255 new_text: "".into(),
3256 },
3257 lsp::TextEdit {
3258 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3259 new_text: "
3260 fn f() {
3261 b();
3262 c();
3263 }"
3264 .unindent(),
3265 },
3266 ],
3267 LanguageServerId(0),
3268 None,
3269 cx,
3270 )
3271 })
3272 .await
3273 .unwrap();
3274
3275 buffer.update(cx, |buffer, cx| {
3276 let edits = edits
3277 .into_iter()
3278 .map(|(range, text)| {
3279 (
3280 range.start.to_point(buffer)..range.end.to_point(buffer),
3281 text,
3282 )
3283 })
3284 .collect::<Vec<_>>();
3285
3286 assert_eq!(
3287 edits,
3288 [
3289 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3290 (Point::new(1, 0)..Point::new(2, 0), "".into())
3291 ]
3292 );
3293
3294 for (range, new_text) in edits {
3295 buffer.edit([(range, new_text)], None, cx);
3296 }
3297 assert_eq!(
3298 buffer.text(),
3299 "
3300 use a::{b, c};
3301
3302 fn f() {
3303 b();
3304 c();
3305 }
3306 "
3307 .unindent()
3308 );
3309 });
3310}
3311
3312fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3313 buffer: &Buffer,
3314 range: Range<T>,
3315) -> Vec<(String, Option<DiagnosticSeverity>)> {
3316 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3317 for chunk in buffer.snapshot().chunks(range, true) {
3318 if chunks
3319 .last()
3320 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3321 {
3322 chunks.last_mut().unwrap().0.push_str(chunk.text);
3323 } else {
3324 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3325 }
3326 }
3327 chunks
3328}
3329
3330#[gpui::test(iterations = 10)]
3331async fn test_definition(cx: &mut gpui::TestAppContext) {
3332 init_test(cx);
3333
3334 let fs = FakeFs::new(cx.executor());
3335 fs.insert_tree(
3336 path!("/dir"),
3337 json!({
3338 "a.rs": "const fn a() { A }",
3339 "b.rs": "const y: i32 = crate::a()",
3340 }),
3341 )
3342 .await;
3343
3344 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3345
3346 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3347 language_registry.add(rust_lang());
3348 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3349
3350 let (buffer, _handle) = project
3351 .update(cx, |project, cx| {
3352 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3353 })
3354 .await
3355 .unwrap();
3356
3357 let fake_server = fake_servers.next().await.unwrap();
3358 cx.executor().run_until_parked();
3359
3360 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3361 let params = params.text_document_position_params;
3362 assert_eq!(
3363 params.text_document.uri.to_file_path().unwrap(),
3364 Path::new(path!("/dir/b.rs")),
3365 );
3366 assert_eq!(params.position, lsp::Position::new(0, 22));
3367
3368 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3369 lsp::Location::new(
3370 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3371 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3372 ),
3373 )))
3374 });
3375 let mut definitions = project
3376 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3377 .await
3378 .unwrap()
3379 .unwrap();
3380
3381 // Assert no new language server started
3382 cx.executor().run_until_parked();
3383 assert!(fake_servers.try_next().is_err());
3384
3385 assert_eq!(definitions.len(), 1);
3386 let definition = definitions.pop().unwrap();
3387 cx.update(|cx| {
3388 let target_buffer = definition.target.buffer.read(cx);
3389 assert_eq!(
3390 target_buffer
3391 .file()
3392 .unwrap()
3393 .as_local()
3394 .unwrap()
3395 .abs_path(cx),
3396 Path::new(path!("/dir/a.rs")),
3397 );
3398 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3399 assert_eq!(
3400 list_worktrees(&project, cx),
3401 [
3402 (path!("/dir/a.rs").as_ref(), false),
3403 (path!("/dir/b.rs").as_ref(), true)
3404 ],
3405 );
3406
3407 drop(definition);
3408 });
3409 cx.update(|cx| {
3410 assert_eq!(
3411 list_worktrees(&project, cx),
3412 [(path!("/dir/b.rs").as_ref(), true)]
3413 );
3414 });
3415
3416 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3417 project
3418 .read(cx)
3419 .worktrees(cx)
3420 .map(|worktree| {
3421 let worktree = worktree.read(cx);
3422 (
3423 worktree.as_local().unwrap().abs_path().as_ref(),
3424 worktree.is_visible(),
3425 )
3426 })
3427 .collect::<Vec<_>>()
3428 }
3429}
3430
3431#[gpui::test]
3432async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3433 init_test(cx);
3434
3435 let fs = FakeFs::new(cx.executor());
3436 fs.insert_tree(
3437 path!("/dir"),
3438 json!({
3439 "a.ts": "",
3440 }),
3441 )
3442 .await;
3443
3444 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3445
3446 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3447 language_registry.add(typescript_lang());
3448 let mut fake_language_servers = language_registry.register_fake_lsp(
3449 "TypeScript",
3450 FakeLspAdapter {
3451 capabilities: lsp::ServerCapabilities {
3452 completion_provider: Some(lsp::CompletionOptions {
3453 trigger_characters: Some(vec![".".to_string()]),
3454 ..Default::default()
3455 }),
3456 ..Default::default()
3457 },
3458 ..Default::default()
3459 },
3460 );
3461
3462 let (buffer, _handle) = project
3463 .update(cx, |p, cx| {
3464 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3465 })
3466 .await
3467 .unwrap();
3468
3469 let fake_server = fake_language_servers.next().await.unwrap();
3470 cx.executor().run_until_parked();
3471
3472 // When text_edit exists, it takes precedence over insert_text and label
3473 let text = "let a = obj.fqn";
3474 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3475 let completions = project.update(cx, |project, cx| {
3476 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3477 });
3478
3479 fake_server
3480 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3481 Ok(Some(lsp::CompletionResponse::Array(vec![
3482 lsp::CompletionItem {
3483 label: "labelText".into(),
3484 insert_text: Some("insertText".into()),
3485 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3486 range: lsp::Range::new(
3487 lsp::Position::new(0, text.len() as u32 - 3),
3488 lsp::Position::new(0, text.len() as u32),
3489 ),
3490 new_text: "textEditText".into(),
3491 })),
3492 ..Default::default()
3493 },
3494 ])))
3495 })
3496 .next()
3497 .await;
3498
3499 let completions = completions
3500 .await
3501 .unwrap()
3502 .into_iter()
3503 .flat_map(|response| response.completions)
3504 .collect::<Vec<_>>();
3505 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3506
3507 assert_eq!(completions.len(), 1);
3508 assert_eq!(completions[0].new_text, "textEditText");
3509 assert_eq!(
3510 completions[0].replace_range.to_offset(&snapshot),
3511 text.len() - 3..text.len()
3512 );
3513}
3514
3515#[gpui::test]
3516async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3517 init_test(cx);
3518
3519 let fs = FakeFs::new(cx.executor());
3520 fs.insert_tree(
3521 path!("/dir"),
3522 json!({
3523 "a.ts": "",
3524 }),
3525 )
3526 .await;
3527
3528 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3529
3530 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3531 language_registry.add(typescript_lang());
3532 let mut fake_language_servers = language_registry.register_fake_lsp(
3533 "TypeScript",
3534 FakeLspAdapter {
3535 capabilities: lsp::ServerCapabilities {
3536 completion_provider: Some(lsp::CompletionOptions {
3537 trigger_characters: Some(vec![".".to_string()]),
3538 ..Default::default()
3539 }),
3540 ..Default::default()
3541 },
3542 ..Default::default()
3543 },
3544 );
3545
3546 let (buffer, _handle) = project
3547 .update(cx, |p, cx| {
3548 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3549 })
3550 .await
3551 .unwrap();
3552
3553 let fake_server = fake_language_servers.next().await.unwrap();
3554 cx.executor().run_until_parked();
3555 let text = "let a = obj.fqn";
3556
3557 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3558 {
3559 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3560 let completions = project.update(cx, |project, cx| {
3561 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3562 });
3563
3564 fake_server
3565 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3566 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3567 is_incomplete: false,
3568 item_defaults: Some(lsp::CompletionListItemDefaults {
3569 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3570 lsp::Range::new(
3571 lsp::Position::new(0, text.len() as u32 - 3),
3572 lsp::Position::new(0, text.len() as u32),
3573 ),
3574 )),
3575 ..Default::default()
3576 }),
3577 items: vec![lsp::CompletionItem {
3578 label: "labelText".into(),
3579 text_edit_text: Some("textEditText".into()),
3580 text_edit: None,
3581 ..Default::default()
3582 }],
3583 })))
3584 })
3585 .next()
3586 .await;
3587
3588 let completions = completions
3589 .await
3590 .unwrap()
3591 .into_iter()
3592 .flat_map(|response| response.completions)
3593 .collect::<Vec<_>>();
3594 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3595
3596 assert_eq!(completions.len(), 1);
3597 assert_eq!(completions[0].new_text, "textEditText");
3598 assert_eq!(
3599 completions[0].replace_range.to_offset(&snapshot),
3600 text.len() - 3..text.len()
3601 );
3602 }
3603
3604 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3605 {
3606 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3607 let completions = project.update(cx, |project, cx| {
3608 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3609 });
3610
3611 fake_server
3612 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3613 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3614 is_incomplete: false,
3615 item_defaults: Some(lsp::CompletionListItemDefaults {
3616 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3617 lsp::Range::new(
3618 lsp::Position::new(0, text.len() as u32 - 3),
3619 lsp::Position::new(0, text.len() as u32),
3620 ),
3621 )),
3622 ..Default::default()
3623 }),
3624 items: vec![lsp::CompletionItem {
3625 label: "labelText".into(),
3626 text_edit_text: None,
3627 insert_text: Some("irrelevant".into()),
3628 text_edit: None,
3629 ..Default::default()
3630 }],
3631 })))
3632 })
3633 .next()
3634 .await;
3635
3636 let completions = completions
3637 .await
3638 .unwrap()
3639 .into_iter()
3640 .flat_map(|response| response.completions)
3641 .collect::<Vec<_>>();
3642 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3643
3644 assert_eq!(completions.len(), 1);
3645 assert_eq!(completions[0].new_text, "labelText");
3646 assert_eq!(
3647 completions[0].replace_range.to_offset(&snapshot),
3648 text.len() - 3..text.len()
3649 );
3650 }
3651}
3652
3653#[gpui::test]
3654async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3655 init_test(cx);
3656
3657 let fs = FakeFs::new(cx.executor());
3658 fs.insert_tree(
3659 path!("/dir"),
3660 json!({
3661 "a.ts": "",
3662 }),
3663 )
3664 .await;
3665
3666 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3667
3668 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3669 language_registry.add(typescript_lang());
3670 let mut fake_language_servers = language_registry.register_fake_lsp(
3671 "TypeScript",
3672 FakeLspAdapter {
3673 capabilities: lsp::ServerCapabilities {
3674 completion_provider: Some(lsp::CompletionOptions {
3675 trigger_characters: Some(vec![":".to_string()]),
3676 ..Default::default()
3677 }),
3678 ..Default::default()
3679 },
3680 ..Default::default()
3681 },
3682 );
3683
3684 let (buffer, _handle) = project
3685 .update(cx, |p, cx| {
3686 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3687 })
3688 .await
3689 .unwrap();
3690
3691 let fake_server = fake_language_servers.next().await.unwrap();
3692 cx.executor().run_until_parked();
3693
3694 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3695 let text = "let a = b.fqn";
3696 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3697 let completions = project.update(cx, |project, cx| {
3698 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3699 });
3700
3701 fake_server
3702 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3703 Ok(Some(lsp::CompletionResponse::Array(vec![
3704 lsp::CompletionItem {
3705 label: "fullyQualifiedName?".into(),
3706 insert_text: Some("fullyQualifiedName".into()),
3707 ..Default::default()
3708 },
3709 ])))
3710 })
3711 .next()
3712 .await;
3713 let completions = completions
3714 .await
3715 .unwrap()
3716 .into_iter()
3717 .flat_map(|response| response.completions)
3718 .collect::<Vec<_>>();
3719 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3720 assert_eq!(completions.len(), 1);
3721 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3722 assert_eq!(
3723 completions[0].replace_range.to_offset(&snapshot),
3724 text.len() - 3..text.len()
3725 );
3726
3727 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3728 let text = "let a = \"atoms/cmp\"";
3729 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3730 let completions = project.update(cx, |project, cx| {
3731 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3732 });
3733
3734 fake_server
3735 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3736 Ok(Some(lsp::CompletionResponse::Array(vec![
3737 lsp::CompletionItem {
3738 label: "component".into(),
3739 ..Default::default()
3740 },
3741 ])))
3742 })
3743 .next()
3744 .await;
3745 let completions = completions
3746 .await
3747 .unwrap()
3748 .into_iter()
3749 .flat_map(|response| response.completions)
3750 .collect::<Vec<_>>();
3751 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3752 assert_eq!(completions.len(), 1);
3753 assert_eq!(completions[0].new_text, "component");
3754 assert_eq!(
3755 completions[0].replace_range.to_offset(&snapshot),
3756 text.len() - 4..text.len() - 1
3757 );
3758}
3759
3760#[gpui::test]
3761async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3762 init_test(cx);
3763
3764 let fs = FakeFs::new(cx.executor());
3765 fs.insert_tree(
3766 path!("/dir"),
3767 json!({
3768 "a.ts": "",
3769 }),
3770 )
3771 .await;
3772
3773 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3774
3775 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3776 language_registry.add(typescript_lang());
3777 let mut fake_language_servers = language_registry.register_fake_lsp(
3778 "TypeScript",
3779 FakeLspAdapter {
3780 capabilities: lsp::ServerCapabilities {
3781 completion_provider: Some(lsp::CompletionOptions {
3782 trigger_characters: Some(vec![":".to_string()]),
3783 ..Default::default()
3784 }),
3785 ..Default::default()
3786 },
3787 ..Default::default()
3788 },
3789 );
3790
3791 let (buffer, _handle) = project
3792 .update(cx, |p, cx| {
3793 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3794 })
3795 .await
3796 .unwrap();
3797
3798 let fake_server = fake_language_servers.next().await.unwrap();
3799 cx.executor().run_until_parked();
3800
3801 let text = "let a = b.fqn";
3802 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3803 let completions = project.update(cx, |project, cx| {
3804 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3805 });
3806
3807 fake_server
3808 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3809 Ok(Some(lsp::CompletionResponse::Array(vec![
3810 lsp::CompletionItem {
3811 label: "fullyQualifiedName?".into(),
3812 insert_text: Some("fully\rQualified\r\nName".into()),
3813 ..Default::default()
3814 },
3815 ])))
3816 })
3817 .next()
3818 .await;
3819 let completions = completions
3820 .await
3821 .unwrap()
3822 .into_iter()
3823 .flat_map(|response| response.completions)
3824 .collect::<Vec<_>>();
3825 assert_eq!(completions.len(), 1);
3826 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3827}
3828
3829#[gpui::test(iterations = 10)]
3830async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3831 init_test(cx);
3832
3833 let fs = FakeFs::new(cx.executor());
3834 fs.insert_tree(
3835 path!("/dir"),
3836 json!({
3837 "a.ts": "a",
3838 }),
3839 )
3840 .await;
3841
3842 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3843
3844 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3845 language_registry.add(typescript_lang());
3846 let mut fake_language_servers = language_registry.register_fake_lsp(
3847 "TypeScript",
3848 FakeLspAdapter {
3849 capabilities: lsp::ServerCapabilities {
3850 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3851 lsp::CodeActionOptions {
3852 resolve_provider: Some(true),
3853 ..lsp::CodeActionOptions::default()
3854 },
3855 )),
3856 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3857 commands: vec!["_the/command".to_string()],
3858 ..lsp::ExecuteCommandOptions::default()
3859 }),
3860 ..lsp::ServerCapabilities::default()
3861 },
3862 ..FakeLspAdapter::default()
3863 },
3864 );
3865
3866 let (buffer, _handle) = project
3867 .update(cx, |p, cx| {
3868 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3869 })
3870 .await
3871 .unwrap();
3872
3873 let fake_server = fake_language_servers.next().await.unwrap();
3874 cx.executor().run_until_parked();
3875
3876 // Language server returns code actions that contain commands, and not edits.
3877 let actions = project.update(cx, |project, cx| {
3878 project.code_actions(&buffer, 0..0, None, cx)
3879 });
3880 fake_server
3881 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3882 Ok(Some(vec![
3883 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3884 title: "The code action".into(),
3885 data: Some(serde_json::json!({
3886 "command": "_the/command",
3887 })),
3888 ..lsp::CodeAction::default()
3889 }),
3890 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3891 title: "two".into(),
3892 ..lsp::CodeAction::default()
3893 }),
3894 ]))
3895 })
3896 .next()
3897 .await;
3898
3899 let action = actions.await.unwrap().unwrap()[0].clone();
3900 let apply = project.update(cx, |project, cx| {
3901 project.apply_code_action(buffer.clone(), action, true, cx)
3902 });
3903
3904 // Resolving the code action does not populate its edits. In absence of
3905 // edits, we must execute the given command.
3906 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3907 |mut action, _| async move {
3908 if action.data.is_some() {
3909 action.command = Some(lsp::Command {
3910 title: "The command".into(),
3911 command: "_the/command".into(),
3912 arguments: Some(vec![json!("the-argument")]),
3913 });
3914 }
3915 Ok(action)
3916 },
3917 );
3918
3919 // While executing the command, the language server sends the editor
3920 // a `workspaceEdit` request.
3921 fake_server
3922 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3923 let fake = fake_server.clone();
3924 move |params, _| {
3925 assert_eq!(params.command, "_the/command");
3926 let fake = fake.clone();
3927 async move {
3928 fake.server
3929 .request::<lsp::request::ApplyWorkspaceEdit>(
3930 lsp::ApplyWorkspaceEditParams {
3931 label: None,
3932 edit: lsp::WorkspaceEdit {
3933 changes: Some(
3934 [(
3935 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3936 vec![lsp::TextEdit {
3937 range: lsp::Range::new(
3938 lsp::Position::new(0, 0),
3939 lsp::Position::new(0, 0),
3940 ),
3941 new_text: "X".into(),
3942 }],
3943 )]
3944 .into_iter()
3945 .collect(),
3946 ),
3947 ..Default::default()
3948 },
3949 },
3950 )
3951 .await
3952 .into_response()
3953 .unwrap();
3954 Ok(Some(json!(null)))
3955 }
3956 }
3957 })
3958 .next()
3959 .await;
3960
3961 // Applying the code action returns a project transaction containing the edits
3962 // sent by the language server in its `workspaceEdit` request.
3963 let transaction = apply.await.unwrap();
3964 assert!(transaction.0.contains_key(&buffer));
3965 buffer.update(cx, |buffer, cx| {
3966 assert_eq!(buffer.text(), "Xa");
3967 buffer.undo(cx);
3968 assert_eq!(buffer.text(), "a");
3969 });
3970}
3971
3972#[gpui::test]
3973async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3974 init_test(cx);
3975 let fs = FakeFs::new(cx.background_executor.clone());
3976 let expected_contents = "content";
3977 fs.as_fake()
3978 .insert_tree(
3979 "/root",
3980 json!({
3981 "test.txt": expected_contents
3982 }),
3983 )
3984 .await;
3985
3986 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3987
3988 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3989 let worktree = project.worktrees(cx).next().unwrap();
3990 let entry_id = worktree
3991 .read(cx)
3992 .entry_for_path(rel_path("test.txt"))
3993 .unwrap()
3994 .id;
3995 (worktree, entry_id)
3996 });
3997 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3998 let _result = project
3999 .update(cx, |project, cx| {
4000 project.rename_entry(
4001 entry_id,
4002 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
4003 cx,
4004 )
4005 })
4006 .await
4007 .unwrap();
4008 worktree.read_with(cx, |worktree, _| {
4009 assert!(
4010 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4011 "Old file should have been removed"
4012 );
4013 assert!(
4014 worktree
4015 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4016 .is_some(),
4017 "Whole directory hierarchy and the new file should have been created"
4018 );
4019 });
4020 assert_eq!(
4021 worktree
4022 .update(cx, |worktree, cx| {
4023 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4024 })
4025 .await
4026 .unwrap()
4027 .text,
4028 expected_contents,
4029 "Moved file's contents should be preserved"
4030 );
4031
4032 let entry_id = worktree.read_with(cx, |worktree, _| {
4033 worktree
4034 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4035 .unwrap()
4036 .id
4037 });
4038
4039 let _result = project
4040 .update(cx, |project, cx| {
4041 project.rename_entry(
4042 entry_id,
4043 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4044 cx,
4045 )
4046 })
4047 .await
4048 .unwrap();
4049 worktree.read_with(cx, |worktree, _| {
4050 assert!(
4051 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4052 "First file should not reappear"
4053 );
4054 assert!(
4055 worktree
4056 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4057 .is_none(),
4058 "Old file should have been removed"
4059 );
4060 assert!(
4061 worktree
4062 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4063 .is_some(),
4064 "No error should have occurred after moving into existing directory"
4065 );
4066 });
4067 assert_eq!(
4068 worktree
4069 .update(cx, |worktree, cx| {
4070 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4071 })
4072 .await
4073 .unwrap()
4074 .text,
4075 expected_contents,
4076 "Moved file's contents should be preserved"
4077 );
4078}
4079
4080#[gpui::test(iterations = 10)]
4081async fn test_save_file(cx: &mut gpui::TestAppContext) {
4082 init_test(cx);
4083
4084 let fs = FakeFs::new(cx.executor());
4085 fs.insert_tree(
4086 path!("/dir"),
4087 json!({
4088 "file1": "the old contents",
4089 }),
4090 )
4091 .await;
4092
4093 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4094 let buffer = project
4095 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4096 .await
4097 .unwrap();
4098 buffer.update(cx, |buffer, cx| {
4099 assert_eq!(buffer.text(), "the old contents");
4100 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4101 });
4102
4103 project
4104 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4105 .await
4106 .unwrap();
4107
4108 let new_text = fs
4109 .load(Path::new(path!("/dir/file1")))
4110 .await
4111 .unwrap()
4112 .replace("\r\n", "\n");
4113 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4114}
4115
4116#[gpui::test(iterations = 10)]
4117async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4118 // Issue: #24349
4119 init_test(cx);
4120
4121 let fs = FakeFs::new(cx.executor());
4122 fs.insert_tree(path!("/dir"), json!({})).await;
4123
4124 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4125 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4126
4127 language_registry.add(rust_lang());
4128 let mut fake_rust_servers = language_registry.register_fake_lsp(
4129 "Rust",
4130 FakeLspAdapter {
4131 name: "the-rust-language-server",
4132 capabilities: lsp::ServerCapabilities {
4133 completion_provider: Some(lsp::CompletionOptions {
4134 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4135 ..Default::default()
4136 }),
4137 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4138 lsp::TextDocumentSyncOptions {
4139 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4140 ..Default::default()
4141 },
4142 )),
4143 ..Default::default()
4144 },
4145 ..Default::default()
4146 },
4147 );
4148
4149 let buffer = project
4150 .update(cx, |this, cx| this.create_buffer(false, cx))
4151 .unwrap()
4152 .await;
4153 project.update(cx, |this, cx| {
4154 this.register_buffer_with_language_servers(&buffer, cx);
4155 buffer.update(cx, |buffer, cx| {
4156 assert!(!this.has_language_servers_for(buffer, cx));
4157 })
4158 });
4159
4160 project
4161 .update(cx, |this, cx| {
4162 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4163 this.save_buffer_as(
4164 buffer.clone(),
4165 ProjectPath {
4166 worktree_id,
4167 path: rel_path("file.rs").into(),
4168 },
4169 cx,
4170 )
4171 })
4172 .await
4173 .unwrap();
4174 // A server is started up, and it is notified about Rust files.
4175 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4176 assert_eq!(
4177 fake_rust_server
4178 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4179 .await
4180 .text_document,
4181 lsp::TextDocumentItem {
4182 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4183 version: 0,
4184 text: "".to_string(),
4185 language_id: "rust".to_string(),
4186 }
4187 );
4188
4189 project.update(cx, |this, cx| {
4190 buffer.update(cx, |buffer, cx| {
4191 assert!(this.has_language_servers_for(buffer, cx));
4192 })
4193 });
4194}
4195
4196#[gpui::test(iterations = 30)]
4197async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4198 init_test(cx);
4199
4200 let fs = FakeFs::new(cx.executor());
4201 fs.insert_tree(
4202 path!("/dir"),
4203 json!({
4204 "file1": "the original contents",
4205 }),
4206 )
4207 .await;
4208
4209 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4210 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4211 let buffer = project
4212 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4213 .await
4214 .unwrap();
4215
4216 // Change the buffer's file on disk, and then wait for the file change
4217 // to be detected by the worktree, so that the buffer starts reloading.
4218 fs.save(
4219 path!("/dir/file1").as_ref(),
4220 &"the first contents".into(),
4221 Default::default(),
4222 )
4223 .await
4224 .unwrap();
4225 worktree.next_event(cx).await;
4226
4227 // Change the buffer's file again. Depending on the random seed, the
4228 // previous file change may still be in progress.
4229 fs.save(
4230 path!("/dir/file1").as_ref(),
4231 &"the second contents".into(),
4232 Default::default(),
4233 )
4234 .await
4235 .unwrap();
4236 worktree.next_event(cx).await;
4237
4238 cx.executor().run_until_parked();
4239 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4240 buffer.read_with(cx, |buffer, _| {
4241 assert_eq!(buffer.text(), on_disk_text);
4242 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4243 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4244 });
4245}
4246
4247#[gpui::test(iterations = 30)]
4248async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4249 init_test(cx);
4250
4251 let fs = FakeFs::new(cx.executor());
4252 fs.insert_tree(
4253 path!("/dir"),
4254 json!({
4255 "file1": "the original contents",
4256 }),
4257 )
4258 .await;
4259
4260 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4261 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4262 let buffer = project
4263 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4264 .await
4265 .unwrap();
4266
4267 // Change the buffer's file on disk, and then wait for the file change
4268 // to be detected by the worktree, so that the buffer starts reloading.
4269 fs.save(
4270 path!("/dir/file1").as_ref(),
4271 &"the first contents".into(),
4272 Default::default(),
4273 )
4274 .await
4275 .unwrap();
4276 worktree.next_event(cx).await;
4277
4278 cx.executor()
4279 .spawn(cx.executor().simulate_random_delay())
4280 .await;
4281
4282 // Perform a noop edit, causing the buffer's version to increase.
4283 buffer.update(cx, |buffer, cx| {
4284 buffer.edit([(0..0, " ")], None, cx);
4285 buffer.undo(cx);
4286 });
4287
4288 cx.executor().run_until_parked();
4289 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4290 buffer.read_with(cx, |buffer, _| {
4291 let buffer_text = buffer.text();
4292 if buffer_text == on_disk_text {
4293 assert!(
4294 !buffer.is_dirty() && !buffer.has_conflict(),
4295 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4296 );
4297 }
4298 // If the file change occurred while the buffer was processing the first
4299 // change, the buffer will be in a conflicting state.
4300 else {
4301 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4302 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4303 }
4304 });
4305}
4306
4307#[gpui::test]
4308async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4309 init_test(cx);
4310
4311 let fs = FakeFs::new(cx.executor());
4312 fs.insert_tree(
4313 path!("/dir"),
4314 json!({
4315 "file1": "the old contents",
4316 }),
4317 )
4318 .await;
4319
4320 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4321 let buffer = project
4322 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4323 .await
4324 .unwrap();
4325 buffer.update(cx, |buffer, cx| {
4326 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4327 });
4328
4329 project
4330 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4331 .await
4332 .unwrap();
4333
4334 let new_text = fs
4335 .load(Path::new(path!("/dir/file1")))
4336 .await
4337 .unwrap()
4338 .replace("\r\n", "\n");
4339 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4340}
4341
4342#[gpui::test]
4343async fn test_save_as(cx: &mut gpui::TestAppContext) {
4344 init_test(cx);
4345
4346 let fs = FakeFs::new(cx.executor());
4347 fs.insert_tree("/dir", json!({})).await;
4348
4349 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4350
4351 let languages = project.update(cx, |project, _| project.languages().clone());
4352 languages.add(rust_lang());
4353
4354 let buffer = project.update(cx, |project, cx| {
4355 project.create_local_buffer("", None, false, cx)
4356 });
4357 buffer.update(cx, |buffer, cx| {
4358 buffer.edit([(0..0, "abc")], None, cx);
4359 assert!(buffer.is_dirty());
4360 assert!(!buffer.has_conflict());
4361 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4362 });
4363 project
4364 .update(cx, |project, cx| {
4365 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4366 let path = ProjectPath {
4367 worktree_id,
4368 path: rel_path("file1.rs").into(),
4369 };
4370 project.save_buffer_as(buffer.clone(), path, cx)
4371 })
4372 .await
4373 .unwrap();
4374 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4375
4376 cx.executor().run_until_parked();
4377 buffer.update(cx, |buffer, cx| {
4378 assert_eq!(
4379 buffer.file().unwrap().full_path(cx),
4380 Path::new("dir/file1.rs")
4381 );
4382 assert!(!buffer.is_dirty());
4383 assert!(!buffer.has_conflict());
4384 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4385 });
4386
4387 let opened_buffer = project
4388 .update(cx, |project, cx| {
4389 project.open_local_buffer("/dir/file1.rs", cx)
4390 })
4391 .await
4392 .unwrap();
4393 assert_eq!(opened_buffer, buffer);
4394}
4395
4396#[gpui::test]
4397async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4398 init_test(cx);
4399
4400 let fs = FakeFs::new(cx.executor());
4401 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4402
4403 fs.insert_tree(
4404 path!("/dir"),
4405 json!({
4406 "data_a.txt": "data about a"
4407 }),
4408 )
4409 .await;
4410
4411 let buffer = project
4412 .update(cx, |project, cx| {
4413 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4414 })
4415 .await
4416 .unwrap();
4417
4418 buffer.update(cx, |buffer, cx| {
4419 buffer.edit([(11..12, "b")], None, cx);
4420 });
4421
4422 // Save buffer's contents as a new file and confirm that the buffer's now
4423 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4424 // file associated with the buffer has now been updated to `data_b.txt`
4425 project
4426 .update(cx, |project, cx| {
4427 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4428 let new_path = ProjectPath {
4429 worktree_id,
4430 path: rel_path("data_b.txt").into(),
4431 };
4432
4433 project.save_buffer_as(buffer.clone(), new_path, cx)
4434 })
4435 .await
4436 .unwrap();
4437
4438 buffer.update(cx, |buffer, cx| {
4439 assert_eq!(
4440 buffer.file().unwrap().full_path(cx),
4441 Path::new("dir/data_b.txt")
4442 )
4443 });
4444
4445 // Open the original `data_a.txt` file, confirming that its contents are
4446 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4447 let original_buffer = project
4448 .update(cx, |project, cx| {
4449 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4450 })
4451 .await
4452 .unwrap();
4453
4454 original_buffer.update(cx, |buffer, cx| {
4455 assert_eq!(buffer.text(), "data about a");
4456 assert_eq!(
4457 buffer.file().unwrap().full_path(cx),
4458 Path::new("dir/data_a.txt")
4459 )
4460 });
4461}
4462
4463#[gpui::test(retries = 5)]
4464async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4465 use worktree::WorktreeModelHandle as _;
4466
4467 init_test(cx);
4468 cx.executor().allow_parking();
4469
4470 let dir = TempTree::new(json!({
4471 "a": {
4472 "file1": "",
4473 "file2": "",
4474 "file3": "",
4475 },
4476 "b": {
4477 "c": {
4478 "file4": "",
4479 "file5": "",
4480 }
4481 }
4482 }));
4483
4484 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4485
4486 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4487 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4488 async move { buffer.await.unwrap() }
4489 };
4490 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4491 project.update(cx, |project, cx| {
4492 let tree = project.worktrees(cx).next().unwrap();
4493 tree.read(cx)
4494 .entry_for_path(rel_path(path))
4495 .unwrap_or_else(|| panic!("no entry for path {}", path))
4496 .id
4497 })
4498 };
4499
4500 let buffer2 = buffer_for_path("a/file2", cx).await;
4501 let buffer3 = buffer_for_path("a/file3", cx).await;
4502 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4503 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4504
4505 let file2_id = id_for_path("a/file2", cx);
4506 let file3_id = id_for_path("a/file3", cx);
4507 let file4_id = id_for_path("b/c/file4", cx);
4508
4509 // Create a remote copy of this worktree.
4510 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4511 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4512
4513 let updates = Arc::new(Mutex::new(Vec::new()));
4514 tree.update(cx, |tree, cx| {
4515 let updates = updates.clone();
4516 tree.observe_updates(0, cx, move |update| {
4517 updates.lock().push(update);
4518 async { true }
4519 });
4520 });
4521
4522 let remote = cx.update(|cx| {
4523 Worktree::remote(
4524 0,
4525 ReplicaId::REMOTE_SERVER,
4526 metadata,
4527 project.read(cx).client().into(),
4528 project.read(cx).path_style(cx),
4529 cx,
4530 )
4531 });
4532
4533 cx.executor().run_until_parked();
4534
4535 cx.update(|cx| {
4536 assert!(!buffer2.read(cx).is_dirty());
4537 assert!(!buffer3.read(cx).is_dirty());
4538 assert!(!buffer4.read(cx).is_dirty());
4539 assert!(!buffer5.read(cx).is_dirty());
4540 });
4541
4542 // Rename and delete files and directories.
4543 tree.flush_fs_events(cx).await;
4544 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4545 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4546 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4547 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4548 tree.flush_fs_events(cx).await;
4549
4550 cx.update(|app| {
4551 assert_eq!(
4552 tree.read(app).paths().collect::<Vec<_>>(),
4553 vec![
4554 rel_path("a"),
4555 rel_path("a/file1"),
4556 rel_path("a/file2.new"),
4557 rel_path("b"),
4558 rel_path("d"),
4559 rel_path("d/file3"),
4560 rel_path("d/file4"),
4561 ]
4562 );
4563 });
4564
4565 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4566 assert_eq!(id_for_path("d/file3", cx), file3_id);
4567 assert_eq!(id_for_path("d/file4", cx), file4_id);
4568
4569 cx.update(|cx| {
4570 assert_eq!(
4571 buffer2.read(cx).file().unwrap().path().as_ref(),
4572 rel_path("a/file2.new")
4573 );
4574 assert_eq!(
4575 buffer3.read(cx).file().unwrap().path().as_ref(),
4576 rel_path("d/file3")
4577 );
4578 assert_eq!(
4579 buffer4.read(cx).file().unwrap().path().as_ref(),
4580 rel_path("d/file4")
4581 );
4582 assert_eq!(
4583 buffer5.read(cx).file().unwrap().path().as_ref(),
4584 rel_path("b/c/file5")
4585 );
4586
4587 assert_matches!(
4588 buffer2.read(cx).file().unwrap().disk_state(),
4589 DiskState::Present { .. }
4590 );
4591 assert_matches!(
4592 buffer3.read(cx).file().unwrap().disk_state(),
4593 DiskState::Present { .. }
4594 );
4595 assert_matches!(
4596 buffer4.read(cx).file().unwrap().disk_state(),
4597 DiskState::Present { .. }
4598 );
4599 assert_eq!(
4600 buffer5.read(cx).file().unwrap().disk_state(),
4601 DiskState::Deleted
4602 );
4603 });
4604
4605 // Update the remote worktree. Check that it becomes consistent with the
4606 // local worktree.
4607 cx.executor().run_until_parked();
4608
4609 remote.update(cx, |remote, _| {
4610 for update in updates.lock().drain(..) {
4611 remote.as_remote_mut().unwrap().update_from_remote(update);
4612 }
4613 });
4614 cx.executor().run_until_parked();
4615 remote.update(cx, |remote, _| {
4616 assert_eq!(
4617 remote.paths().collect::<Vec<_>>(),
4618 vec![
4619 rel_path("a"),
4620 rel_path("a/file1"),
4621 rel_path("a/file2.new"),
4622 rel_path("b"),
4623 rel_path("d"),
4624 rel_path("d/file3"),
4625 rel_path("d/file4"),
4626 ]
4627 );
4628 });
4629}
4630
4631#[gpui::test(iterations = 10)]
4632async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4633 init_test(cx);
4634
4635 let fs = FakeFs::new(cx.executor());
4636 fs.insert_tree(
4637 path!("/dir"),
4638 json!({
4639 "a": {
4640 "file1": "",
4641 }
4642 }),
4643 )
4644 .await;
4645
4646 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4647 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4648 let tree_id = tree.update(cx, |tree, _| tree.id());
4649
4650 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4651 project.update(cx, |project, cx| {
4652 let tree = project.worktrees(cx).next().unwrap();
4653 tree.read(cx)
4654 .entry_for_path(rel_path(path))
4655 .unwrap_or_else(|| panic!("no entry for path {}", path))
4656 .id
4657 })
4658 };
4659
4660 let dir_id = id_for_path("a", cx);
4661 let file_id = id_for_path("a/file1", cx);
4662 let buffer = project
4663 .update(cx, |p, cx| {
4664 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4665 })
4666 .await
4667 .unwrap();
4668 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4669
4670 project
4671 .update(cx, |project, cx| {
4672 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4673 })
4674 .unwrap()
4675 .await
4676 .into_included()
4677 .unwrap();
4678 cx.executor().run_until_parked();
4679
4680 assert_eq!(id_for_path("b", cx), dir_id);
4681 assert_eq!(id_for_path("b/file1", cx), file_id);
4682 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4683}
4684
4685#[gpui::test]
4686async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4687 init_test(cx);
4688
4689 let fs = FakeFs::new(cx.executor());
4690 fs.insert_tree(
4691 "/dir",
4692 json!({
4693 "a.txt": "a-contents",
4694 "b.txt": "b-contents",
4695 }),
4696 )
4697 .await;
4698
4699 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4700
4701 // Spawn multiple tasks to open paths, repeating some paths.
4702 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4703 (
4704 p.open_local_buffer("/dir/a.txt", cx),
4705 p.open_local_buffer("/dir/b.txt", cx),
4706 p.open_local_buffer("/dir/a.txt", cx),
4707 )
4708 });
4709
4710 let buffer_a_1 = buffer_a_1.await.unwrap();
4711 let buffer_a_2 = buffer_a_2.await.unwrap();
4712 let buffer_b = buffer_b.await.unwrap();
4713 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4714 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4715
4716 // There is only one buffer per path.
4717 let buffer_a_id = buffer_a_1.entity_id();
4718 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4719
4720 // Open the same path again while it is still open.
4721 drop(buffer_a_1);
4722 let buffer_a_3 = project
4723 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4724 .await
4725 .unwrap();
4726
4727 // There's still only one buffer per path.
4728 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4729}
4730
4731#[gpui::test]
4732async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4733 init_test(cx);
4734
4735 let fs = FakeFs::new(cx.executor());
4736 fs.insert_tree(
4737 path!("/dir"),
4738 json!({
4739 "file1": "abc",
4740 "file2": "def",
4741 "file3": "ghi",
4742 }),
4743 )
4744 .await;
4745
4746 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4747
4748 let buffer1 = project
4749 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4750 .await
4751 .unwrap();
4752 let events = Arc::new(Mutex::new(Vec::new()));
4753
4754 // initially, the buffer isn't dirty.
4755 buffer1.update(cx, |buffer, cx| {
4756 cx.subscribe(&buffer1, {
4757 let events = events.clone();
4758 move |_, _, event, _| match event {
4759 BufferEvent::Operation { .. } => {}
4760 _ => events.lock().push(event.clone()),
4761 }
4762 })
4763 .detach();
4764
4765 assert!(!buffer.is_dirty());
4766 assert!(events.lock().is_empty());
4767
4768 buffer.edit([(1..2, "")], None, cx);
4769 });
4770
4771 // after the first edit, the buffer is dirty, and emits a dirtied event.
4772 buffer1.update(cx, |buffer, cx| {
4773 assert!(buffer.text() == "ac");
4774 assert!(buffer.is_dirty());
4775 assert_eq!(
4776 *events.lock(),
4777 &[
4778 language::BufferEvent::Edited,
4779 language::BufferEvent::DirtyChanged
4780 ]
4781 );
4782 events.lock().clear();
4783 buffer.did_save(
4784 buffer.version(),
4785 buffer.file().unwrap().disk_state().mtime(),
4786 cx,
4787 );
4788 });
4789
4790 // after saving, the buffer is not dirty, and emits a saved event.
4791 buffer1.update(cx, |buffer, cx| {
4792 assert!(!buffer.is_dirty());
4793 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4794 events.lock().clear();
4795
4796 buffer.edit([(1..1, "B")], None, cx);
4797 buffer.edit([(2..2, "D")], None, cx);
4798 });
4799
4800 // after editing again, the buffer is dirty, and emits another dirty event.
4801 buffer1.update(cx, |buffer, cx| {
4802 assert!(buffer.text() == "aBDc");
4803 assert!(buffer.is_dirty());
4804 assert_eq!(
4805 *events.lock(),
4806 &[
4807 language::BufferEvent::Edited,
4808 language::BufferEvent::DirtyChanged,
4809 language::BufferEvent::Edited,
4810 ],
4811 );
4812 events.lock().clear();
4813
4814 // After restoring the buffer to its previously-saved state,
4815 // the buffer is not considered dirty anymore.
4816 buffer.edit([(1..3, "")], None, cx);
4817 assert!(buffer.text() == "ac");
4818 assert!(!buffer.is_dirty());
4819 });
4820
4821 assert_eq!(
4822 *events.lock(),
4823 &[
4824 language::BufferEvent::Edited,
4825 language::BufferEvent::DirtyChanged
4826 ]
4827 );
4828
4829 // When a file is deleted, it is not considered dirty.
4830 let events = Arc::new(Mutex::new(Vec::new()));
4831 let buffer2 = project
4832 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4833 .await
4834 .unwrap();
4835 buffer2.update(cx, |_, cx| {
4836 cx.subscribe(&buffer2, {
4837 let events = events.clone();
4838 move |_, _, event, _| match event {
4839 BufferEvent::Operation { .. } => {}
4840 _ => events.lock().push(event.clone()),
4841 }
4842 })
4843 .detach();
4844 });
4845
4846 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4847 .await
4848 .unwrap();
4849 cx.executor().run_until_parked();
4850 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4851 assert_eq!(
4852 mem::take(&mut *events.lock()),
4853 &[language::BufferEvent::FileHandleChanged]
4854 );
4855
4856 // Buffer becomes dirty when edited.
4857 buffer2.update(cx, |buffer, cx| {
4858 buffer.edit([(2..3, "")], None, cx);
4859 assert_eq!(buffer.is_dirty(), true);
4860 });
4861 assert_eq!(
4862 mem::take(&mut *events.lock()),
4863 &[
4864 language::BufferEvent::Edited,
4865 language::BufferEvent::DirtyChanged
4866 ]
4867 );
4868
4869 // Buffer becomes clean again when all of its content is removed, because
4870 // the file was deleted.
4871 buffer2.update(cx, |buffer, cx| {
4872 buffer.edit([(0..2, "")], None, cx);
4873 assert_eq!(buffer.is_empty(), true);
4874 assert_eq!(buffer.is_dirty(), false);
4875 });
4876 assert_eq!(
4877 *events.lock(),
4878 &[
4879 language::BufferEvent::Edited,
4880 language::BufferEvent::DirtyChanged
4881 ]
4882 );
4883
4884 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4885 let events = Arc::new(Mutex::new(Vec::new()));
4886 let buffer3 = project
4887 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4888 .await
4889 .unwrap();
4890 buffer3.update(cx, |_, cx| {
4891 cx.subscribe(&buffer3, {
4892 let events = events.clone();
4893 move |_, _, event, _| match event {
4894 BufferEvent::Operation { .. } => {}
4895 _ => events.lock().push(event.clone()),
4896 }
4897 })
4898 .detach();
4899 });
4900
4901 buffer3.update(cx, |buffer, cx| {
4902 buffer.edit([(0..0, "x")], None, cx);
4903 });
4904 events.lock().clear();
4905 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4906 .await
4907 .unwrap();
4908 cx.executor().run_until_parked();
4909 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4910 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4911}
4912
4913#[gpui::test]
4914async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4915 init_test(cx);
4916
4917 let (initial_contents, initial_offsets) =
4918 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4919 let fs = FakeFs::new(cx.executor());
4920 fs.insert_tree(
4921 path!("/dir"),
4922 json!({
4923 "the-file": initial_contents,
4924 }),
4925 )
4926 .await;
4927 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4928 let buffer = project
4929 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4930 .await
4931 .unwrap();
4932
4933 let anchors = initial_offsets
4934 .iter()
4935 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4936 .collect::<Vec<_>>();
4937
4938 // Change the file on disk, adding two new lines of text, and removing
4939 // one line.
4940 buffer.update(cx, |buffer, _| {
4941 assert!(!buffer.is_dirty());
4942 assert!(!buffer.has_conflict());
4943 });
4944
4945 let (new_contents, new_offsets) =
4946 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4947 fs.save(
4948 path!("/dir/the-file").as_ref(),
4949 &new_contents.as_str().into(),
4950 LineEnding::Unix,
4951 )
4952 .await
4953 .unwrap();
4954
4955 // Because the buffer was not modified, it is reloaded from disk. Its
4956 // contents are edited according to the diff between the old and new
4957 // file contents.
4958 cx.executor().run_until_parked();
4959 buffer.update(cx, |buffer, _| {
4960 assert_eq!(buffer.text(), new_contents);
4961 assert!(!buffer.is_dirty());
4962 assert!(!buffer.has_conflict());
4963
4964 let anchor_offsets = anchors
4965 .iter()
4966 .map(|anchor| anchor.to_offset(&*buffer))
4967 .collect::<Vec<_>>();
4968 assert_eq!(anchor_offsets, new_offsets);
4969 });
4970
4971 // Modify the buffer
4972 buffer.update(cx, |buffer, cx| {
4973 buffer.edit([(0..0, " ")], None, cx);
4974 assert!(buffer.is_dirty());
4975 assert!(!buffer.has_conflict());
4976 });
4977
4978 // Change the file on disk again, adding blank lines to the beginning.
4979 fs.save(
4980 path!("/dir/the-file").as_ref(),
4981 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4982 LineEnding::Unix,
4983 )
4984 .await
4985 .unwrap();
4986
4987 // Because the buffer is modified, it doesn't reload from disk, but is
4988 // marked as having a conflict.
4989 cx.executor().run_until_parked();
4990 buffer.update(cx, |buffer, _| {
4991 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4992 assert!(buffer.has_conflict());
4993 });
4994}
4995
4996#[gpui::test]
4997async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4998 init_test(cx);
4999
5000 let fs = FakeFs::new(cx.executor());
5001 fs.insert_tree(
5002 path!("/dir"),
5003 json!({
5004 "file1": "a\nb\nc\n",
5005 "file2": "one\r\ntwo\r\nthree\r\n",
5006 }),
5007 )
5008 .await;
5009
5010 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5011 let buffer1 = project
5012 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5013 .await
5014 .unwrap();
5015 let buffer2 = project
5016 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5017 .await
5018 .unwrap();
5019
5020 buffer1.update(cx, |buffer, _| {
5021 assert_eq!(buffer.text(), "a\nb\nc\n");
5022 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5023 });
5024 buffer2.update(cx, |buffer, _| {
5025 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5026 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5027 });
5028
5029 // Change a file's line endings on disk from unix to windows. The buffer's
5030 // state updates correctly.
5031 fs.save(
5032 path!("/dir/file1").as_ref(),
5033 &"aaa\nb\nc\n".into(),
5034 LineEnding::Windows,
5035 )
5036 .await
5037 .unwrap();
5038 cx.executor().run_until_parked();
5039 buffer1.update(cx, |buffer, _| {
5040 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5041 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5042 });
5043
5044 // Save a file with windows line endings. The file is written correctly.
5045 buffer2.update(cx, |buffer, cx| {
5046 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5047 });
5048 project
5049 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5050 .await
5051 .unwrap();
5052 assert_eq!(
5053 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5054 "one\r\ntwo\r\nthree\r\nfour\r\n",
5055 );
5056}
5057
5058#[gpui::test]
5059async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5060 init_test(cx);
5061
5062 let fs = FakeFs::new(cx.executor());
5063 fs.insert_tree(
5064 path!("/dir"),
5065 json!({
5066 "a.rs": "
5067 fn foo(mut v: Vec<usize>) {
5068 for x in &v {
5069 v.push(1);
5070 }
5071 }
5072 "
5073 .unindent(),
5074 }),
5075 )
5076 .await;
5077
5078 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5079 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5080 let buffer = project
5081 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5082 .await
5083 .unwrap();
5084
5085 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5086 let message = lsp::PublishDiagnosticsParams {
5087 uri: buffer_uri.clone(),
5088 diagnostics: vec![
5089 lsp::Diagnostic {
5090 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5091 severity: Some(DiagnosticSeverity::WARNING),
5092 message: "error 1".to_string(),
5093 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5094 location: lsp::Location {
5095 uri: buffer_uri.clone(),
5096 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5097 },
5098 message: "error 1 hint 1".to_string(),
5099 }]),
5100 ..Default::default()
5101 },
5102 lsp::Diagnostic {
5103 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5104 severity: Some(DiagnosticSeverity::HINT),
5105 message: "error 1 hint 1".to_string(),
5106 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5107 location: lsp::Location {
5108 uri: buffer_uri.clone(),
5109 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5110 },
5111 message: "original diagnostic".to_string(),
5112 }]),
5113 ..Default::default()
5114 },
5115 lsp::Diagnostic {
5116 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5117 severity: Some(DiagnosticSeverity::ERROR),
5118 message: "error 2".to_string(),
5119 related_information: Some(vec![
5120 lsp::DiagnosticRelatedInformation {
5121 location: lsp::Location {
5122 uri: buffer_uri.clone(),
5123 range: lsp::Range::new(
5124 lsp::Position::new(1, 13),
5125 lsp::Position::new(1, 15),
5126 ),
5127 },
5128 message: "error 2 hint 1".to_string(),
5129 },
5130 lsp::DiagnosticRelatedInformation {
5131 location: lsp::Location {
5132 uri: buffer_uri.clone(),
5133 range: lsp::Range::new(
5134 lsp::Position::new(1, 13),
5135 lsp::Position::new(1, 15),
5136 ),
5137 },
5138 message: "error 2 hint 2".to_string(),
5139 },
5140 ]),
5141 ..Default::default()
5142 },
5143 lsp::Diagnostic {
5144 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5145 severity: Some(DiagnosticSeverity::HINT),
5146 message: "error 2 hint 1".to_string(),
5147 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5148 location: lsp::Location {
5149 uri: buffer_uri.clone(),
5150 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5151 },
5152 message: "original diagnostic".to_string(),
5153 }]),
5154 ..Default::default()
5155 },
5156 lsp::Diagnostic {
5157 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5158 severity: Some(DiagnosticSeverity::HINT),
5159 message: "error 2 hint 2".to_string(),
5160 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5161 location: lsp::Location {
5162 uri: buffer_uri,
5163 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5164 },
5165 message: "original diagnostic".to_string(),
5166 }]),
5167 ..Default::default()
5168 },
5169 ],
5170 version: None,
5171 };
5172
5173 lsp_store
5174 .update(cx, |lsp_store, cx| {
5175 lsp_store.update_diagnostics(
5176 LanguageServerId(0),
5177 message,
5178 None,
5179 DiagnosticSourceKind::Pushed,
5180 &[],
5181 cx,
5182 )
5183 })
5184 .unwrap();
5185 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5186
5187 assert_eq!(
5188 buffer
5189 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5190 .collect::<Vec<_>>(),
5191 &[
5192 DiagnosticEntry {
5193 range: Point::new(1, 8)..Point::new(1, 9),
5194 diagnostic: Diagnostic {
5195 severity: DiagnosticSeverity::WARNING,
5196 message: "error 1".to_string(),
5197 group_id: 1,
5198 is_primary: true,
5199 source_kind: DiagnosticSourceKind::Pushed,
5200 ..Diagnostic::default()
5201 }
5202 },
5203 DiagnosticEntry {
5204 range: Point::new(1, 8)..Point::new(1, 9),
5205 diagnostic: Diagnostic {
5206 severity: DiagnosticSeverity::HINT,
5207 message: "error 1 hint 1".to_string(),
5208 group_id: 1,
5209 is_primary: false,
5210 source_kind: DiagnosticSourceKind::Pushed,
5211 ..Diagnostic::default()
5212 }
5213 },
5214 DiagnosticEntry {
5215 range: Point::new(1, 13)..Point::new(1, 15),
5216 diagnostic: Diagnostic {
5217 severity: DiagnosticSeverity::HINT,
5218 message: "error 2 hint 1".to_string(),
5219 group_id: 0,
5220 is_primary: false,
5221 source_kind: DiagnosticSourceKind::Pushed,
5222 ..Diagnostic::default()
5223 }
5224 },
5225 DiagnosticEntry {
5226 range: Point::new(1, 13)..Point::new(1, 15),
5227 diagnostic: Diagnostic {
5228 severity: DiagnosticSeverity::HINT,
5229 message: "error 2 hint 2".to_string(),
5230 group_id: 0,
5231 is_primary: false,
5232 source_kind: DiagnosticSourceKind::Pushed,
5233 ..Diagnostic::default()
5234 }
5235 },
5236 DiagnosticEntry {
5237 range: Point::new(2, 8)..Point::new(2, 17),
5238 diagnostic: Diagnostic {
5239 severity: DiagnosticSeverity::ERROR,
5240 message: "error 2".to_string(),
5241 group_id: 0,
5242 is_primary: true,
5243 source_kind: DiagnosticSourceKind::Pushed,
5244 ..Diagnostic::default()
5245 }
5246 }
5247 ]
5248 );
5249
5250 assert_eq!(
5251 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5252 &[
5253 DiagnosticEntry {
5254 range: Point::new(1, 13)..Point::new(1, 15),
5255 diagnostic: Diagnostic {
5256 severity: DiagnosticSeverity::HINT,
5257 message: "error 2 hint 1".to_string(),
5258 group_id: 0,
5259 is_primary: false,
5260 source_kind: DiagnosticSourceKind::Pushed,
5261 ..Diagnostic::default()
5262 }
5263 },
5264 DiagnosticEntry {
5265 range: Point::new(1, 13)..Point::new(1, 15),
5266 diagnostic: Diagnostic {
5267 severity: DiagnosticSeverity::HINT,
5268 message: "error 2 hint 2".to_string(),
5269 group_id: 0,
5270 is_primary: false,
5271 source_kind: DiagnosticSourceKind::Pushed,
5272 ..Diagnostic::default()
5273 }
5274 },
5275 DiagnosticEntry {
5276 range: Point::new(2, 8)..Point::new(2, 17),
5277 diagnostic: Diagnostic {
5278 severity: DiagnosticSeverity::ERROR,
5279 message: "error 2".to_string(),
5280 group_id: 0,
5281 is_primary: true,
5282 source_kind: DiagnosticSourceKind::Pushed,
5283 ..Diagnostic::default()
5284 }
5285 }
5286 ]
5287 );
5288
5289 assert_eq!(
5290 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5291 &[
5292 DiagnosticEntry {
5293 range: Point::new(1, 8)..Point::new(1, 9),
5294 diagnostic: Diagnostic {
5295 severity: DiagnosticSeverity::WARNING,
5296 message: "error 1".to_string(),
5297 group_id: 1,
5298 is_primary: true,
5299 source_kind: DiagnosticSourceKind::Pushed,
5300 ..Diagnostic::default()
5301 }
5302 },
5303 DiagnosticEntry {
5304 range: Point::new(1, 8)..Point::new(1, 9),
5305 diagnostic: Diagnostic {
5306 severity: DiagnosticSeverity::HINT,
5307 message: "error 1 hint 1".to_string(),
5308 group_id: 1,
5309 is_primary: false,
5310 source_kind: DiagnosticSourceKind::Pushed,
5311 ..Diagnostic::default()
5312 }
5313 },
5314 ]
5315 );
5316}
5317
5318#[gpui::test]
5319async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5320 init_test(cx);
5321
5322 let fs = FakeFs::new(cx.executor());
5323 fs.insert_tree(
5324 path!("/dir"),
5325 json!({
5326 "one.rs": "const ONE: usize = 1;",
5327 "two": {
5328 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5329 }
5330
5331 }),
5332 )
5333 .await;
5334 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5335
5336 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5337 language_registry.add(rust_lang());
5338 let watched_paths = lsp::FileOperationRegistrationOptions {
5339 filters: vec![
5340 FileOperationFilter {
5341 scheme: Some("file".to_owned()),
5342 pattern: lsp::FileOperationPattern {
5343 glob: "**/*.rs".to_owned(),
5344 matches: Some(lsp::FileOperationPatternKind::File),
5345 options: None,
5346 },
5347 },
5348 FileOperationFilter {
5349 scheme: Some("file".to_owned()),
5350 pattern: lsp::FileOperationPattern {
5351 glob: "**/**".to_owned(),
5352 matches: Some(lsp::FileOperationPatternKind::Folder),
5353 options: None,
5354 },
5355 },
5356 ],
5357 };
5358 let mut fake_servers = language_registry.register_fake_lsp(
5359 "Rust",
5360 FakeLspAdapter {
5361 capabilities: lsp::ServerCapabilities {
5362 workspace: Some(lsp::WorkspaceServerCapabilities {
5363 workspace_folders: None,
5364 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5365 did_rename: Some(watched_paths.clone()),
5366 will_rename: Some(watched_paths),
5367 ..Default::default()
5368 }),
5369 }),
5370 ..Default::default()
5371 },
5372 ..Default::default()
5373 },
5374 );
5375
5376 let _ = project
5377 .update(cx, |project, cx| {
5378 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5379 })
5380 .await
5381 .unwrap();
5382
5383 let fake_server = fake_servers.next().await.unwrap();
5384 cx.executor().run_until_parked();
5385 let response = project.update(cx, |project, cx| {
5386 let worktree = project.worktrees(cx).next().unwrap();
5387 let entry = worktree
5388 .read(cx)
5389 .entry_for_path(rel_path("one.rs"))
5390 .unwrap();
5391 project.rename_entry(
5392 entry.id,
5393 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5394 cx,
5395 )
5396 });
5397 let expected_edit = lsp::WorkspaceEdit {
5398 changes: None,
5399 document_changes: Some(DocumentChanges::Edits({
5400 vec![TextDocumentEdit {
5401 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5402 range: lsp::Range {
5403 start: lsp::Position {
5404 line: 0,
5405 character: 1,
5406 },
5407 end: lsp::Position {
5408 line: 0,
5409 character: 3,
5410 },
5411 },
5412 new_text: "This is not a drill".to_owned(),
5413 })],
5414 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5415 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5416 version: Some(1337),
5417 },
5418 }]
5419 })),
5420 change_annotations: None,
5421 };
5422 let resolved_workspace_edit = Arc::new(OnceLock::new());
5423 fake_server
5424 .set_request_handler::<WillRenameFiles, _, _>({
5425 let resolved_workspace_edit = resolved_workspace_edit.clone();
5426 let expected_edit = expected_edit.clone();
5427 move |params, _| {
5428 let resolved_workspace_edit = resolved_workspace_edit.clone();
5429 let expected_edit = expected_edit.clone();
5430 async move {
5431 assert_eq!(params.files.len(), 1);
5432 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5433 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5434 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5435 Ok(Some(expected_edit))
5436 }
5437 }
5438 })
5439 .next()
5440 .await
5441 .unwrap();
5442 let _ = response.await.unwrap();
5443 fake_server
5444 .handle_notification::<DidRenameFiles, _>(|params, _| {
5445 assert_eq!(params.files.len(), 1);
5446 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5447 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5448 })
5449 .next()
5450 .await
5451 .unwrap();
5452 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5453}
5454
5455#[gpui::test]
5456async fn test_rename(cx: &mut gpui::TestAppContext) {
5457 // hi
5458 init_test(cx);
5459
5460 let fs = FakeFs::new(cx.executor());
5461 fs.insert_tree(
5462 path!("/dir"),
5463 json!({
5464 "one.rs": "const ONE: usize = 1;",
5465 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5466 }),
5467 )
5468 .await;
5469
5470 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5471
5472 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5473 language_registry.add(rust_lang());
5474 let mut fake_servers = language_registry.register_fake_lsp(
5475 "Rust",
5476 FakeLspAdapter {
5477 capabilities: lsp::ServerCapabilities {
5478 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5479 prepare_provider: Some(true),
5480 work_done_progress_options: Default::default(),
5481 })),
5482 ..Default::default()
5483 },
5484 ..Default::default()
5485 },
5486 );
5487
5488 let (buffer, _handle) = project
5489 .update(cx, |project, cx| {
5490 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5491 })
5492 .await
5493 .unwrap();
5494
5495 let fake_server = fake_servers.next().await.unwrap();
5496 cx.executor().run_until_parked();
5497
5498 let response = project.update(cx, |project, cx| {
5499 project.prepare_rename(buffer.clone(), 7, cx)
5500 });
5501 fake_server
5502 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5503 assert_eq!(
5504 params.text_document.uri.as_str(),
5505 uri!("file:///dir/one.rs")
5506 );
5507 assert_eq!(params.position, lsp::Position::new(0, 7));
5508 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5509 lsp::Position::new(0, 6),
5510 lsp::Position::new(0, 9),
5511 ))))
5512 })
5513 .next()
5514 .await
5515 .unwrap();
5516 let response = response.await.unwrap();
5517 let PrepareRenameResponse::Success(range) = response else {
5518 panic!("{:?}", response);
5519 };
5520 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5521 assert_eq!(range, 6..9);
5522
5523 let response = project.update(cx, |project, cx| {
5524 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5525 });
5526 fake_server
5527 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5528 assert_eq!(
5529 params.text_document_position.text_document.uri.as_str(),
5530 uri!("file:///dir/one.rs")
5531 );
5532 assert_eq!(
5533 params.text_document_position.position,
5534 lsp::Position::new(0, 7)
5535 );
5536 assert_eq!(params.new_name, "THREE");
5537 Ok(Some(lsp::WorkspaceEdit {
5538 changes: Some(
5539 [
5540 (
5541 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5542 vec![lsp::TextEdit::new(
5543 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5544 "THREE".to_string(),
5545 )],
5546 ),
5547 (
5548 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5549 vec![
5550 lsp::TextEdit::new(
5551 lsp::Range::new(
5552 lsp::Position::new(0, 24),
5553 lsp::Position::new(0, 27),
5554 ),
5555 "THREE".to_string(),
5556 ),
5557 lsp::TextEdit::new(
5558 lsp::Range::new(
5559 lsp::Position::new(0, 35),
5560 lsp::Position::new(0, 38),
5561 ),
5562 "THREE".to_string(),
5563 ),
5564 ],
5565 ),
5566 ]
5567 .into_iter()
5568 .collect(),
5569 ),
5570 ..Default::default()
5571 }))
5572 })
5573 .next()
5574 .await
5575 .unwrap();
5576 let mut transaction = response.await.unwrap().0;
5577 assert_eq!(transaction.len(), 2);
5578 assert_eq!(
5579 transaction
5580 .remove_entry(&buffer)
5581 .unwrap()
5582 .0
5583 .update(cx, |buffer, _| buffer.text()),
5584 "const THREE: usize = 1;"
5585 );
5586 assert_eq!(
5587 transaction
5588 .into_keys()
5589 .next()
5590 .unwrap()
5591 .update(cx, |buffer, _| buffer.text()),
5592 "const TWO: usize = one::THREE + one::THREE;"
5593 );
5594}
5595
5596#[gpui::test]
5597async fn test_search(cx: &mut gpui::TestAppContext) {
5598 init_test(cx);
5599
5600 let fs = FakeFs::new(cx.executor());
5601 fs.insert_tree(
5602 path!("/dir"),
5603 json!({
5604 "one.rs": "const ONE: usize = 1;",
5605 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5606 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5607 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5608 }),
5609 )
5610 .await;
5611 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5612 assert_eq!(
5613 search(
5614 &project,
5615 SearchQuery::text(
5616 "TWO",
5617 false,
5618 true,
5619 false,
5620 Default::default(),
5621 Default::default(),
5622 false,
5623 None
5624 )
5625 .unwrap(),
5626 cx
5627 )
5628 .await
5629 .unwrap(),
5630 HashMap::from_iter([
5631 (path!("dir/two.rs").to_string(), vec![6..9]),
5632 (path!("dir/three.rs").to_string(), vec![37..40])
5633 ])
5634 );
5635
5636 let buffer_4 = project
5637 .update(cx, |project, cx| {
5638 project.open_local_buffer(path!("/dir/four.rs"), cx)
5639 })
5640 .await
5641 .unwrap();
5642 buffer_4.update(cx, |buffer, cx| {
5643 let text = "two::TWO";
5644 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5645 });
5646
5647 assert_eq!(
5648 search(
5649 &project,
5650 SearchQuery::text(
5651 "TWO",
5652 false,
5653 true,
5654 false,
5655 Default::default(),
5656 Default::default(),
5657 false,
5658 None,
5659 )
5660 .unwrap(),
5661 cx
5662 )
5663 .await
5664 .unwrap(),
5665 HashMap::from_iter([
5666 (path!("dir/two.rs").to_string(), vec![6..9]),
5667 (path!("dir/three.rs").to_string(), vec![37..40]),
5668 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5669 ])
5670 );
5671}
5672
5673#[gpui::test]
5674async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5675 init_test(cx);
5676
5677 let search_query = "file";
5678
5679 let fs = FakeFs::new(cx.executor());
5680 fs.insert_tree(
5681 path!("/dir"),
5682 json!({
5683 "one.rs": r#"// Rust file one"#,
5684 "one.ts": r#"// TypeScript file one"#,
5685 "two.rs": r#"// Rust file two"#,
5686 "two.ts": r#"// TypeScript file two"#,
5687 }),
5688 )
5689 .await;
5690 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5691
5692 assert!(
5693 search(
5694 &project,
5695 SearchQuery::text(
5696 search_query,
5697 false,
5698 true,
5699 false,
5700 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5701 Default::default(),
5702 false,
5703 None
5704 )
5705 .unwrap(),
5706 cx
5707 )
5708 .await
5709 .unwrap()
5710 .is_empty(),
5711 "If no inclusions match, no files should be returned"
5712 );
5713
5714 assert_eq!(
5715 search(
5716 &project,
5717 SearchQuery::text(
5718 search_query,
5719 false,
5720 true,
5721 false,
5722 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5723 Default::default(),
5724 false,
5725 None
5726 )
5727 .unwrap(),
5728 cx
5729 )
5730 .await
5731 .unwrap(),
5732 HashMap::from_iter([
5733 (path!("dir/one.rs").to_string(), vec![8..12]),
5734 (path!("dir/two.rs").to_string(), vec![8..12]),
5735 ]),
5736 "Rust only search should give only Rust files"
5737 );
5738
5739 assert_eq!(
5740 search(
5741 &project,
5742 SearchQuery::text(
5743 search_query,
5744 false,
5745 true,
5746 false,
5747 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5748 .unwrap(),
5749 Default::default(),
5750 false,
5751 None,
5752 )
5753 .unwrap(),
5754 cx
5755 )
5756 .await
5757 .unwrap(),
5758 HashMap::from_iter([
5759 (path!("dir/one.ts").to_string(), vec![14..18]),
5760 (path!("dir/two.ts").to_string(), vec![14..18]),
5761 ]),
5762 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5763 );
5764
5765 assert_eq!(
5766 search(
5767 &project,
5768 SearchQuery::text(
5769 search_query,
5770 false,
5771 true,
5772 false,
5773 PathMatcher::new(
5774 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5775 PathStyle::local()
5776 )
5777 .unwrap(),
5778 Default::default(),
5779 false,
5780 None,
5781 )
5782 .unwrap(),
5783 cx
5784 )
5785 .await
5786 .unwrap(),
5787 HashMap::from_iter([
5788 (path!("dir/two.ts").to_string(), vec![14..18]),
5789 (path!("dir/one.rs").to_string(), vec![8..12]),
5790 (path!("dir/one.ts").to_string(), vec![14..18]),
5791 (path!("dir/two.rs").to_string(), vec![8..12]),
5792 ]),
5793 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5794 );
5795}
5796
5797#[gpui::test]
5798async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5799 init_test(cx);
5800
5801 let search_query = "file";
5802
5803 let fs = FakeFs::new(cx.executor());
5804 fs.insert_tree(
5805 path!("/dir"),
5806 json!({
5807 "one.rs": r#"// Rust file one"#,
5808 "one.ts": r#"// TypeScript file one"#,
5809 "two.rs": r#"// Rust file two"#,
5810 "two.ts": r#"// TypeScript file two"#,
5811 }),
5812 )
5813 .await;
5814 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5815
5816 assert_eq!(
5817 search(
5818 &project,
5819 SearchQuery::text(
5820 search_query,
5821 false,
5822 true,
5823 false,
5824 Default::default(),
5825 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5826 false,
5827 None,
5828 )
5829 .unwrap(),
5830 cx
5831 )
5832 .await
5833 .unwrap(),
5834 HashMap::from_iter([
5835 (path!("dir/one.rs").to_string(), vec![8..12]),
5836 (path!("dir/one.ts").to_string(), vec![14..18]),
5837 (path!("dir/two.rs").to_string(), vec![8..12]),
5838 (path!("dir/two.ts").to_string(), vec![14..18]),
5839 ]),
5840 "If no exclusions match, all files should be returned"
5841 );
5842
5843 assert_eq!(
5844 search(
5845 &project,
5846 SearchQuery::text(
5847 search_query,
5848 false,
5849 true,
5850 false,
5851 Default::default(),
5852 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5853 false,
5854 None,
5855 )
5856 .unwrap(),
5857 cx
5858 )
5859 .await
5860 .unwrap(),
5861 HashMap::from_iter([
5862 (path!("dir/one.ts").to_string(), vec![14..18]),
5863 (path!("dir/two.ts").to_string(), vec![14..18]),
5864 ]),
5865 "Rust exclusion search should give only TypeScript files"
5866 );
5867
5868 assert_eq!(
5869 search(
5870 &project,
5871 SearchQuery::text(
5872 search_query,
5873 false,
5874 true,
5875 false,
5876 Default::default(),
5877 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5878 .unwrap(),
5879 false,
5880 None,
5881 )
5882 .unwrap(),
5883 cx
5884 )
5885 .await
5886 .unwrap(),
5887 HashMap::from_iter([
5888 (path!("dir/one.rs").to_string(), vec![8..12]),
5889 (path!("dir/two.rs").to_string(), vec![8..12]),
5890 ]),
5891 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5892 );
5893
5894 assert!(
5895 search(
5896 &project,
5897 SearchQuery::text(
5898 search_query,
5899 false,
5900 true,
5901 false,
5902 Default::default(),
5903 PathMatcher::new(
5904 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5905 PathStyle::local(),
5906 )
5907 .unwrap(),
5908 false,
5909 None,
5910 )
5911 .unwrap(),
5912 cx
5913 )
5914 .await
5915 .unwrap()
5916 .is_empty(),
5917 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5918 );
5919}
5920
5921#[gpui::test]
5922async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5923 init_test(cx);
5924
5925 let search_query = "file";
5926
5927 let fs = FakeFs::new(cx.executor());
5928 fs.insert_tree(
5929 path!("/dir"),
5930 json!({
5931 "one.rs": r#"// Rust file one"#,
5932 "one.ts": r#"// TypeScript file one"#,
5933 "two.rs": r#"// Rust file two"#,
5934 "two.ts": r#"// TypeScript file two"#,
5935 }),
5936 )
5937 .await;
5938
5939 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5940 let path_style = PathStyle::local();
5941 let _buffer = project.update(cx, |project, cx| {
5942 project.create_local_buffer("file", None, false, cx)
5943 });
5944
5945 assert_eq!(
5946 search(
5947 &project,
5948 SearchQuery::text(
5949 search_query,
5950 false,
5951 true,
5952 false,
5953 Default::default(),
5954 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5955 false,
5956 None,
5957 )
5958 .unwrap(),
5959 cx
5960 )
5961 .await
5962 .unwrap(),
5963 HashMap::from_iter([
5964 (path!("dir/one.rs").to_string(), vec![8..12]),
5965 (path!("dir/one.ts").to_string(), vec![14..18]),
5966 (path!("dir/two.rs").to_string(), vec![8..12]),
5967 (path!("dir/two.ts").to_string(), vec![14..18]),
5968 ]),
5969 "If no exclusions match, all files should be returned"
5970 );
5971
5972 assert_eq!(
5973 search(
5974 &project,
5975 SearchQuery::text(
5976 search_query,
5977 false,
5978 true,
5979 false,
5980 Default::default(),
5981 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5982 false,
5983 None,
5984 )
5985 .unwrap(),
5986 cx
5987 )
5988 .await
5989 .unwrap(),
5990 HashMap::from_iter([
5991 (path!("dir/one.ts").to_string(), vec![14..18]),
5992 (path!("dir/two.ts").to_string(), vec![14..18]),
5993 ]),
5994 "Rust exclusion search should give only TypeScript files"
5995 );
5996
5997 assert_eq!(
5998 search(
5999 &project,
6000 SearchQuery::text(
6001 search_query,
6002 false,
6003 true,
6004 false,
6005 Default::default(),
6006 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6007 false,
6008 None,
6009 )
6010 .unwrap(),
6011 cx
6012 )
6013 .await
6014 .unwrap(),
6015 HashMap::from_iter([
6016 (path!("dir/one.rs").to_string(), vec![8..12]),
6017 (path!("dir/two.rs").to_string(), vec![8..12]),
6018 ]),
6019 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6020 );
6021
6022 assert!(
6023 search(
6024 &project,
6025 SearchQuery::text(
6026 search_query,
6027 false,
6028 true,
6029 false,
6030 Default::default(),
6031 PathMatcher::new(
6032 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6033 PathStyle::local(),
6034 )
6035 .unwrap(),
6036 false,
6037 None,
6038 )
6039 .unwrap(),
6040 cx
6041 )
6042 .await
6043 .unwrap()
6044 .is_empty(),
6045 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6046 );
6047}
6048
6049#[gpui::test]
6050async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6051 init_test(cx);
6052
6053 let search_query = "file";
6054
6055 let fs = FakeFs::new(cx.executor());
6056 fs.insert_tree(
6057 path!("/dir"),
6058 json!({
6059 "one.rs": r#"// Rust file one"#,
6060 "one.ts": r#"// TypeScript file one"#,
6061 "two.rs": r#"// Rust file two"#,
6062 "two.ts": r#"// TypeScript file two"#,
6063 }),
6064 )
6065 .await;
6066 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6067 assert!(
6068 search(
6069 &project,
6070 SearchQuery::text(
6071 search_query,
6072 false,
6073 true,
6074 false,
6075 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6076 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6077 false,
6078 None,
6079 )
6080 .unwrap(),
6081 cx
6082 )
6083 .await
6084 .unwrap()
6085 .is_empty(),
6086 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6087 );
6088
6089 assert!(
6090 search(
6091 &project,
6092 SearchQuery::text(
6093 search_query,
6094 false,
6095 true,
6096 false,
6097 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6098 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6099 false,
6100 None,
6101 )
6102 .unwrap(),
6103 cx
6104 )
6105 .await
6106 .unwrap()
6107 .is_empty(),
6108 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6109 );
6110
6111 assert!(
6112 search(
6113 &project,
6114 SearchQuery::text(
6115 search_query,
6116 false,
6117 true,
6118 false,
6119 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6120 .unwrap(),
6121 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6122 .unwrap(),
6123 false,
6124 None,
6125 )
6126 .unwrap(),
6127 cx
6128 )
6129 .await
6130 .unwrap()
6131 .is_empty(),
6132 "Non-matching inclusions and exclusions should not change that."
6133 );
6134
6135 assert_eq!(
6136 search(
6137 &project,
6138 SearchQuery::text(
6139 search_query,
6140 false,
6141 true,
6142 false,
6143 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6144 .unwrap(),
6145 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6146 .unwrap(),
6147 false,
6148 None,
6149 )
6150 .unwrap(),
6151 cx
6152 )
6153 .await
6154 .unwrap(),
6155 HashMap::from_iter([
6156 (path!("dir/one.ts").to_string(), vec![14..18]),
6157 (path!("dir/two.ts").to_string(), vec![14..18]),
6158 ]),
6159 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6160 );
6161}
6162
6163#[gpui::test]
6164async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6165 init_test(cx);
6166
6167 let fs = FakeFs::new(cx.executor());
6168 fs.insert_tree(
6169 path!("/worktree-a"),
6170 json!({
6171 "haystack.rs": r#"// NEEDLE"#,
6172 "haystack.ts": r#"// NEEDLE"#,
6173 }),
6174 )
6175 .await;
6176 fs.insert_tree(
6177 path!("/worktree-b"),
6178 json!({
6179 "haystack.rs": r#"// NEEDLE"#,
6180 "haystack.ts": r#"// NEEDLE"#,
6181 }),
6182 )
6183 .await;
6184
6185 let path_style = PathStyle::local();
6186 let project = Project::test(
6187 fs.clone(),
6188 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6189 cx,
6190 )
6191 .await;
6192
6193 assert_eq!(
6194 search(
6195 &project,
6196 SearchQuery::text(
6197 "NEEDLE",
6198 false,
6199 true,
6200 false,
6201 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6202 Default::default(),
6203 true,
6204 None,
6205 )
6206 .unwrap(),
6207 cx
6208 )
6209 .await
6210 .unwrap(),
6211 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6212 "should only return results from included worktree"
6213 );
6214 assert_eq!(
6215 search(
6216 &project,
6217 SearchQuery::text(
6218 "NEEDLE",
6219 false,
6220 true,
6221 false,
6222 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6223 Default::default(),
6224 true,
6225 None,
6226 )
6227 .unwrap(),
6228 cx
6229 )
6230 .await
6231 .unwrap(),
6232 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6233 "should only return results from included worktree"
6234 );
6235
6236 assert_eq!(
6237 search(
6238 &project,
6239 SearchQuery::text(
6240 "NEEDLE",
6241 false,
6242 true,
6243 false,
6244 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6245 Default::default(),
6246 false,
6247 None,
6248 )
6249 .unwrap(),
6250 cx
6251 )
6252 .await
6253 .unwrap(),
6254 HashMap::from_iter([
6255 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6256 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6257 ]),
6258 "should return results from both worktrees"
6259 );
6260}
6261
6262#[gpui::test]
6263async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6264 init_test(cx);
6265
6266 let fs = FakeFs::new(cx.background_executor.clone());
6267 fs.insert_tree(
6268 path!("/dir"),
6269 json!({
6270 ".git": {},
6271 ".gitignore": "**/target\n/node_modules\n",
6272 "target": {
6273 "index.txt": "index_key:index_value"
6274 },
6275 "node_modules": {
6276 "eslint": {
6277 "index.ts": "const eslint_key = 'eslint value'",
6278 "package.json": r#"{ "some_key": "some value" }"#,
6279 },
6280 "prettier": {
6281 "index.ts": "const prettier_key = 'prettier value'",
6282 "package.json": r#"{ "other_key": "other value" }"#,
6283 },
6284 },
6285 "package.json": r#"{ "main_key": "main value" }"#,
6286 }),
6287 )
6288 .await;
6289 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6290
6291 let query = "key";
6292 assert_eq!(
6293 search(
6294 &project,
6295 SearchQuery::text(
6296 query,
6297 false,
6298 false,
6299 false,
6300 Default::default(),
6301 Default::default(),
6302 false,
6303 None,
6304 )
6305 .unwrap(),
6306 cx
6307 )
6308 .await
6309 .unwrap(),
6310 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6311 "Only one non-ignored file should have the query"
6312 );
6313
6314 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6315 let path_style = PathStyle::local();
6316 assert_eq!(
6317 search(
6318 &project,
6319 SearchQuery::text(
6320 query,
6321 false,
6322 false,
6323 true,
6324 Default::default(),
6325 Default::default(),
6326 false,
6327 None,
6328 )
6329 .unwrap(),
6330 cx
6331 )
6332 .await
6333 .unwrap(),
6334 HashMap::from_iter([
6335 (path!("dir/package.json").to_string(), vec![8..11]),
6336 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6337 (
6338 path!("dir/node_modules/prettier/package.json").to_string(),
6339 vec![9..12]
6340 ),
6341 (
6342 path!("dir/node_modules/prettier/index.ts").to_string(),
6343 vec![15..18]
6344 ),
6345 (
6346 path!("dir/node_modules/eslint/index.ts").to_string(),
6347 vec![13..16]
6348 ),
6349 (
6350 path!("dir/node_modules/eslint/package.json").to_string(),
6351 vec![8..11]
6352 ),
6353 ]),
6354 "Unrestricted search with ignored directories should find every file with the query"
6355 );
6356
6357 let files_to_include =
6358 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6359 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6360 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6361 assert_eq!(
6362 search(
6363 &project,
6364 SearchQuery::text(
6365 query,
6366 false,
6367 false,
6368 true,
6369 files_to_include,
6370 files_to_exclude,
6371 false,
6372 None,
6373 )
6374 .unwrap(),
6375 cx
6376 )
6377 .await
6378 .unwrap(),
6379 HashMap::from_iter([(
6380 path!("dir/node_modules/prettier/package.json").to_string(),
6381 vec![9..12]
6382 )]),
6383 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6384 );
6385}
6386
6387#[gpui::test]
6388async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6389 init_test(cx);
6390
6391 let fs = FakeFs::new(cx.executor());
6392 fs.insert_tree(
6393 path!("/dir"),
6394 json!({
6395 "one.rs": "// ПРИВЕТ? привет!",
6396 "two.rs": "// ПРИВЕТ.",
6397 "three.rs": "// привет",
6398 }),
6399 )
6400 .await;
6401 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6402 let unicode_case_sensitive_query = SearchQuery::text(
6403 "привет",
6404 false,
6405 true,
6406 false,
6407 Default::default(),
6408 Default::default(),
6409 false,
6410 None,
6411 );
6412 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6413 assert_eq!(
6414 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6415 .await
6416 .unwrap(),
6417 HashMap::from_iter([
6418 (path!("dir/one.rs").to_string(), vec![17..29]),
6419 (path!("dir/three.rs").to_string(), vec![3..15]),
6420 ])
6421 );
6422
6423 let unicode_case_insensitive_query = SearchQuery::text(
6424 "привет",
6425 false,
6426 false,
6427 false,
6428 Default::default(),
6429 Default::default(),
6430 false,
6431 None,
6432 );
6433 assert_matches!(
6434 unicode_case_insensitive_query,
6435 Ok(SearchQuery::Regex { .. })
6436 );
6437 assert_eq!(
6438 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6439 .await
6440 .unwrap(),
6441 HashMap::from_iter([
6442 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6443 (path!("dir/two.rs").to_string(), vec![3..15]),
6444 (path!("dir/three.rs").to_string(), vec![3..15]),
6445 ])
6446 );
6447
6448 assert_eq!(
6449 search(
6450 &project,
6451 SearchQuery::text(
6452 "привет.",
6453 false,
6454 false,
6455 false,
6456 Default::default(),
6457 Default::default(),
6458 false,
6459 None,
6460 )
6461 .unwrap(),
6462 cx
6463 )
6464 .await
6465 .unwrap(),
6466 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6467 );
6468}
6469
6470#[gpui::test]
6471async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6472 init_test(cx);
6473
6474 let fs = FakeFs::new(cx.executor());
6475 fs.insert_tree(
6476 "/one/two",
6477 json!({
6478 "three": {
6479 "a.txt": "",
6480 "four": {}
6481 },
6482 "c.rs": ""
6483 }),
6484 )
6485 .await;
6486
6487 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6488 project
6489 .update(cx, |project, cx| {
6490 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6491 project.create_entry((id, rel_path("b..")), true, cx)
6492 })
6493 .await
6494 .unwrap()
6495 .into_included()
6496 .unwrap();
6497
6498 assert_eq!(
6499 fs.paths(true),
6500 vec![
6501 PathBuf::from(path!("/")),
6502 PathBuf::from(path!("/one")),
6503 PathBuf::from(path!("/one/two")),
6504 PathBuf::from(path!("/one/two/c.rs")),
6505 PathBuf::from(path!("/one/two/three")),
6506 PathBuf::from(path!("/one/two/three/a.txt")),
6507 PathBuf::from(path!("/one/two/three/b..")),
6508 PathBuf::from(path!("/one/two/three/four")),
6509 ]
6510 );
6511}
6512
6513#[gpui::test]
6514async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6515 init_test(cx);
6516
6517 let fs = FakeFs::new(cx.executor());
6518 fs.insert_tree(
6519 path!("/dir"),
6520 json!({
6521 "a.tsx": "a",
6522 }),
6523 )
6524 .await;
6525
6526 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6527
6528 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6529 language_registry.add(tsx_lang());
6530 let language_server_names = [
6531 "TypeScriptServer",
6532 "TailwindServer",
6533 "ESLintServer",
6534 "NoHoverCapabilitiesServer",
6535 ];
6536 let mut language_servers = [
6537 language_registry.register_fake_lsp(
6538 "tsx",
6539 FakeLspAdapter {
6540 name: language_server_names[0],
6541 capabilities: lsp::ServerCapabilities {
6542 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6543 ..lsp::ServerCapabilities::default()
6544 },
6545 ..FakeLspAdapter::default()
6546 },
6547 ),
6548 language_registry.register_fake_lsp(
6549 "tsx",
6550 FakeLspAdapter {
6551 name: language_server_names[1],
6552 capabilities: lsp::ServerCapabilities {
6553 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6554 ..lsp::ServerCapabilities::default()
6555 },
6556 ..FakeLspAdapter::default()
6557 },
6558 ),
6559 language_registry.register_fake_lsp(
6560 "tsx",
6561 FakeLspAdapter {
6562 name: language_server_names[2],
6563 capabilities: lsp::ServerCapabilities {
6564 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6565 ..lsp::ServerCapabilities::default()
6566 },
6567 ..FakeLspAdapter::default()
6568 },
6569 ),
6570 language_registry.register_fake_lsp(
6571 "tsx",
6572 FakeLspAdapter {
6573 name: language_server_names[3],
6574 capabilities: lsp::ServerCapabilities {
6575 hover_provider: None,
6576 ..lsp::ServerCapabilities::default()
6577 },
6578 ..FakeLspAdapter::default()
6579 },
6580 ),
6581 ];
6582
6583 let (buffer, _handle) = project
6584 .update(cx, |p, cx| {
6585 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6586 })
6587 .await
6588 .unwrap();
6589 cx.executor().run_until_parked();
6590
6591 let mut servers_with_hover_requests = HashMap::default();
6592 for i in 0..language_server_names.len() {
6593 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6594 panic!(
6595 "Failed to get language server #{i} with name {}",
6596 &language_server_names[i]
6597 )
6598 });
6599 let new_server_name = new_server.server.name();
6600 assert!(
6601 !servers_with_hover_requests.contains_key(&new_server_name),
6602 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6603 );
6604 match new_server_name.as_ref() {
6605 "TailwindServer" | "TypeScriptServer" => {
6606 servers_with_hover_requests.insert(
6607 new_server_name.clone(),
6608 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6609 move |_, _| {
6610 let name = new_server_name.clone();
6611 async move {
6612 Ok(Some(lsp::Hover {
6613 contents: lsp::HoverContents::Scalar(
6614 lsp::MarkedString::String(format!("{name} hover")),
6615 ),
6616 range: None,
6617 }))
6618 }
6619 },
6620 ),
6621 );
6622 }
6623 "ESLintServer" => {
6624 servers_with_hover_requests.insert(
6625 new_server_name,
6626 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6627 |_, _| async move { Ok(None) },
6628 ),
6629 );
6630 }
6631 "NoHoverCapabilitiesServer" => {
6632 let _never_handled = new_server
6633 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6634 panic!(
6635 "Should not call for hovers server with no corresponding capabilities"
6636 )
6637 });
6638 }
6639 unexpected => panic!("Unexpected server name: {unexpected}"),
6640 }
6641 }
6642
6643 let hover_task = project.update(cx, |project, cx| {
6644 project.hover(&buffer, Point::new(0, 0), cx)
6645 });
6646 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6647 |mut hover_request| async move {
6648 hover_request
6649 .next()
6650 .await
6651 .expect("All hover requests should have been triggered")
6652 },
6653 ))
6654 .await;
6655 assert_eq!(
6656 vec!["TailwindServer hover", "TypeScriptServer hover"],
6657 hover_task
6658 .await
6659 .into_iter()
6660 .flatten()
6661 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6662 .sorted()
6663 .collect::<Vec<_>>(),
6664 "Should receive hover responses from all related servers with hover capabilities"
6665 );
6666}
6667
6668#[gpui::test]
6669async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6670 init_test(cx);
6671
6672 let fs = FakeFs::new(cx.executor());
6673 fs.insert_tree(
6674 path!("/dir"),
6675 json!({
6676 "a.ts": "a",
6677 }),
6678 )
6679 .await;
6680
6681 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6682
6683 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6684 language_registry.add(typescript_lang());
6685 let mut fake_language_servers = language_registry.register_fake_lsp(
6686 "TypeScript",
6687 FakeLspAdapter {
6688 capabilities: lsp::ServerCapabilities {
6689 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6690 ..lsp::ServerCapabilities::default()
6691 },
6692 ..FakeLspAdapter::default()
6693 },
6694 );
6695
6696 let (buffer, _handle) = project
6697 .update(cx, |p, cx| {
6698 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6699 })
6700 .await
6701 .unwrap();
6702 cx.executor().run_until_parked();
6703
6704 let fake_server = fake_language_servers
6705 .next()
6706 .await
6707 .expect("failed to get the language server");
6708
6709 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6710 move |_, _| async move {
6711 Ok(Some(lsp::Hover {
6712 contents: lsp::HoverContents::Array(vec![
6713 lsp::MarkedString::String("".to_string()),
6714 lsp::MarkedString::String(" ".to_string()),
6715 lsp::MarkedString::String("\n\n\n".to_string()),
6716 ]),
6717 range: None,
6718 }))
6719 },
6720 );
6721
6722 let hover_task = project.update(cx, |project, cx| {
6723 project.hover(&buffer, Point::new(0, 0), cx)
6724 });
6725 let () = request_handled
6726 .next()
6727 .await
6728 .expect("All hover requests should have been triggered");
6729 assert_eq!(
6730 Vec::<String>::new(),
6731 hover_task
6732 .await
6733 .into_iter()
6734 .flatten()
6735 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6736 .sorted()
6737 .collect::<Vec<_>>(),
6738 "Empty hover parts should be ignored"
6739 );
6740}
6741
6742#[gpui::test]
6743async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6744 init_test(cx);
6745
6746 let fs = FakeFs::new(cx.executor());
6747 fs.insert_tree(
6748 path!("/dir"),
6749 json!({
6750 "a.ts": "a",
6751 }),
6752 )
6753 .await;
6754
6755 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6756
6757 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6758 language_registry.add(typescript_lang());
6759 let mut fake_language_servers = language_registry.register_fake_lsp(
6760 "TypeScript",
6761 FakeLspAdapter {
6762 capabilities: lsp::ServerCapabilities {
6763 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6764 ..lsp::ServerCapabilities::default()
6765 },
6766 ..FakeLspAdapter::default()
6767 },
6768 );
6769
6770 let (buffer, _handle) = project
6771 .update(cx, |p, cx| {
6772 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6773 })
6774 .await
6775 .unwrap();
6776 cx.executor().run_until_parked();
6777
6778 let fake_server = fake_language_servers
6779 .next()
6780 .await
6781 .expect("failed to get the language server");
6782
6783 let mut request_handled = fake_server
6784 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6785 Ok(Some(vec![
6786 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6787 title: "organize imports".to_string(),
6788 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6789 ..lsp::CodeAction::default()
6790 }),
6791 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6792 title: "fix code".to_string(),
6793 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6794 ..lsp::CodeAction::default()
6795 }),
6796 ]))
6797 });
6798
6799 let code_actions_task = project.update(cx, |project, cx| {
6800 project.code_actions(
6801 &buffer,
6802 0..buffer.read(cx).len(),
6803 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6804 cx,
6805 )
6806 });
6807
6808 let () = request_handled
6809 .next()
6810 .await
6811 .expect("The code action request should have been triggered");
6812
6813 let code_actions = code_actions_task.await.unwrap().unwrap();
6814 assert_eq!(code_actions.len(), 1);
6815 assert_eq!(
6816 code_actions[0].lsp_action.action_kind(),
6817 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6818 );
6819}
6820
6821#[gpui::test]
6822async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6823 init_test(cx);
6824
6825 let fs = FakeFs::new(cx.executor());
6826 fs.insert_tree(
6827 path!("/dir"),
6828 json!({
6829 "a.tsx": "a",
6830 }),
6831 )
6832 .await;
6833
6834 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6835
6836 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6837 language_registry.add(tsx_lang());
6838 let language_server_names = [
6839 "TypeScriptServer",
6840 "TailwindServer",
6841 "ESLintServer",
6842 "NoActionsCapabilitiesServer",
6843 ];
6844
6845 let mut language_server_rxs = [
6846 language_registry.register_fake_lsp(
6847 "tsx",
6848 FakeLspAdapter {
6849 name: language_server_names[0],
6850 capabilities: lsp::ServerCapabilities {
6851 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6852 ..lsp::ServerCapabilities::default()
6853 },
6854 ..FakeLspAdapter::default()
6855 },
6856 ),
6857 language_registry.register_fake_lsp(
6858 "tsx",
6859 FakeLspAdapter {
6860 name: language_server_names[1],
6861 capabilities: lsp::ServerCapabilities {
6862 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6863 ..lsp::ServerCapabilities::default()
6864 },
6865 ..FakeLspAdapter::default()
6866 },
6867 ),
6868 language_registry.register_fake_lsp(
6869 "tsx",
6870 FakeLspAdapter {
6871 name: language_server_names[2],
6872 capabilities: lsp::ServerCapabilities {
6873 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6874 ..lsp::ServerCapabilities::default()
6875 },
6876 ..FakeLspAdapter::default()
6877 },
6878 ),
6879 language_registry.register_fake_lsp(
6880 "tsx",
6881 FakeLspAdapter {
6882 name: language_server_names[3],
6883 capabilities: lsp::ServerCapabilities {
6884 code_action_provider: None,
6885 ..lsp::ServerCapabilities::default()
6886 },
6887 ..FakeLspAdapter::default()
6888 },
6889 ),
6890 ];
6891
6892 let (buffer, _handle) = project
6893 .update(cx, |p, cx| {
6894 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6895 })
6896 .await
6897 .unwrap();
6898 cx.executor().run_until_parked();
6899
6900 let mut servers_with_actions_requests = HashMap::default();
6901 for i in 0..language_server_names.len() {
6902 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6903 panic!(
6904 "Failed to get language server #{i} with name {}",
6905 &language_server_names[i]
6906 )
6907 });
6908 let new_server_name = new_server.server.name();
6909
6910 assert!(
6911 !servers_with_actions_requests.contains_key(&new_server_name),
6912 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6913 );
6914 match new_server_name.0.as_ref() {
6915 "TailwindServer" | "TypeScriptServer" => {
6916 servers_with_actions_requests.insert(
6917 new_server_name.clone(),
6918 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6919 move |_, _| {
6920 let name = new_server_name.clone();
6921 async move {
6922 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6923 lsp::CodeAction {
6924 title: format!("{name} code action"),
6925 ..lsp::CodeAction::default()
6926 },
6927 )]))
6928 }
6929 },
6930 ),
6931 );
6932 }
6933 "ESLintServer" => {
6934 servers_with_actions_requests.insert(
6935 new_server_name,
6936 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6937 |_, _| async move { Ok(None) },
6938 ),
6939 );
6940 }
6941 "NoActionsCapabilitiesServer" => {
6942 let _never_handled = new_server
6943 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6944 panic!(
6945 "Should not call for code actions server with no corresponding capabilities"
6946 )
6947 });
6948 }
6949 unexpected => panic!("Unexpected server name: {unexpected}"),
6950 }
6951 }
6952
6953 let code_actions_task = project.update(cx, |project, cx| {
6954 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6955 });
6956
6957 // cx.run_until_parked();
6958 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6959 |mut code_actions_request| async move {
6960 code_actions_request
6961 .next()
6962 .await
6963 .expect("All code actions requests should have been triggered")
6964 },
6965 ))
6966 .await;
6967 assert_eq!(
6968 vec!["TailwindServer code action", "TypeScriptServer code action"],
6969 code_actions_task
6970 .await
6971 .unwrap()
6972 .unwrap()
6973 .into_iter()
6974 .map(|code_action| code_action.lsp_action.title().to_owned())
6975 .sorted()
6976 .collect::<Vec<_>>(),
6977 "Should receive code actions responses from all related servers with hover capabilities"
6978 );
6979}
6980
6981#[gpui::test]
6982async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6983 init_test(cx);
6984
6985 let fs = FakeFs::new(cx.executor());
6986 fs.insert_tree(
6987 "/dir",
6988 json!({
6989 "a.rs": "let a = 1;",
6990 "b.rs": "let b = 2;",
6991 "c.rs": "let c = 2;",
6992 }),
6993 )
6994 .await;
6995
6996 let project = Project::test(
6997 fs,
6998 [
6999 "/dir/a.rs".as_ref(),
7000 "/dir/b.rs".as_ref(),
7001 "/dir/c.rs".as_ref(),
7002 ],
7003 cx,
7004 )
7005 .await;
7006
7007 // check the initial state and get the worktrees
7008 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7009 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7010 assert_eq!(worktrees.len(), 3);
7011
7012 let worktree_a = worktrees[0].read(cx);
7013 let worktree_b = worktrees[1].read(cx);
7014 let worktree_c = worktrees[2].read(cx);
7015
7016 // check they start in the right order
7017 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7018 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7019 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7020
7021 (
7022 worktrees[0].clone(),
7023 worktrees[1].clone(),
7024 worktrees[2].clone(),
7025 )
7026 });
7027
7028 // move first worktree to after the second
7029 // [a, b, c] -> [b, a, c]
7030 project
7031 .update(cx, |project, cx| {
7032 let first = worktree_a.read(cx);
7033 let second = worktree_b.read(cx);
7034 project.move_worktree(first.id(), second.id(), cx)
7035 })
7036 .expect("moving first after second");
7037
7038 // check the state after moving
7039 project.update(cx, |project, cx| {
7040 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7041 assert_eq!(worktrees.len(), 3);
7042
7043 let first = worktrees[0].read(cx);
7044 let second = worktrees[1].read(cx);
7045 let third = worktrees[2].read(cx);
7046
7047 // check they are now in the right order
7048 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7049 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7050 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7051 });
7052
7053 // move the second worktree to before the first
7054 // [b, a, c] -> [a, b, c]
7055 project
7056 .update(cx, |project, cx| {
7057 let second = worktree_a.read(cx);
7058 let first = worktree_b.read(cx);
7059 project.move_worktree(first.id(), second.id(), cx)
7060 })
7061 .expect("moving second before first");
7062
7063 // check the state after moving
7064 project.update(cx, |project, cx| {
7065 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7066 assert_eq!(worktrees.len(), 3);
7067
7068 let first = worktrees[0].read(cx);
7069 let second = worktrees[1].read(cx);
7070 let third = worktrees[2].read(cx);
7071
7072 // check they are now in the right order
7073 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7074 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7075 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7076 });
7077
7078 // move the second worktree to after the third
7079 // [a, b, c] -> [a, c, b]
7080 project
7081 .update(cx, |project, cx| {
7082 let second = worktree_b.read(cx);
7083 let third = worktree_c.read(cx);
7084 project.move_worktree(second.id(), third.id(), cx)
7085 })
7086 .expect("moving second after third");
7087
7088 // check the state after moving
7089 project.update(cx, |project, cx| {
7090 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7091 assert_eq!(worktrees.len(), 3);
7092
7093 let first = worktrees[0].read(cx);
7094 let second = worktrees[1].read(cx);
7095 let third = worktrees[2].read(cx);
7096
7097 // check they are now in the right order
7098 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7099 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7100 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7101 });
7102
7103 // move the third worktree to before the second
7104 // [a, c, b] -> [a, b, c]
7105 project
7106 .update(cx, |project, cx| {
7107 let third = worktree_c.read(cx);
7108 let second = worktree_b.read(cx);
7109 project.move_worktree(third.id(), second.id(), cx)
7110 })
7111 .expect("moving third before second");
7112
7113 // check the state after moving
7114 project.update(cx, |project, cx| {
7115 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7116 assert_eq!(worktrees.len(), 3);
7117
7118 let first = worktrees[0].read(cx);
7119 let second = worktrees[1].read(cx);
7120 let third = worktrees[2].read(cx);
7121
7122 // check they are now in the right order
7123 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7124 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7125 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7126 });
7127
7128 // move the first worktree to after the third
7129 // [a, b, c] -> [b, c, a]
7130 project
7131 .update(cx, |project, cx| {
7132 let first = worktree_a.read(cx);
7133 let third = worktree_c.read(cx);
7134 project.move_worktree(first.id(), third.id(), cx)
7135 })
7136 .expect("moving first after third");
7137
7138 // check the state after moving
7139 project.update(cx, |project, cx| {
7140 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7141 assert_eq!(worktrees.len(), 3);
7142
7143 let first = worktrees[0].read(cx);
7144 let second = worktrees[1].read(cx);
7145 let third = worktrees[2].read(cx);
7146
7147 // check they are now in the right order
7148 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7149 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7150 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7151 });
7152
7153 // move the third worktree to before the first
7154 // [b, c, a] -> [a, b, c]
7155 project
7156 .update(cx, |project, cx| {
7157 let third = worktree_a.read(cx);
7158 let first = worktree_b.read(cx);
7159 project.move_worktree(third.id(), first.id(), cx)
7160 })
7161 .expect("moving third before first");
7162
7163 // check the state after moving
7164 project.update(cx, |project, cx| {
7165 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7166 assert_eq!(worktrees.len(), 3);
7167
7168 let first = worktrees[0].read(cx);
7169 let second = worktrees[1].read(cx);
7170 let third = worktrees[2].read(cx);
7171
7172 // check they are now in the right order
7173 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7174 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7175 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7176 });
7177}
7178
7179#[gpui::test]
7180async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7181 init_test(cx);
7182
7183 let staged_contents = r#"
7184 fn main() {
7185 println!("hello world");
7186 }
7187 "#
7188 .unindent();
7189 let file_contents = r#"
7190 // print goodbye
7191 fn main() {
7192 println!("goodbye world");
7193 }
7194 "#
7195 .unindent();
7196
7197 let fs = FakeFs::new(cx.background_executor.clone());
7198 fs.insert_tree(
7199 "/dir",
7200 json!({
7201 ".git": {},
7202 "src": {
7203 "main.rs": file_contents,
7204 }
7205 }),
7206 )
7207 .await;
7208
7209 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7210
7211 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7212
7213 let buffer = project
7214 .update(cx, |project, cx| {
7215 project.open_local_buffer("/dir/src/main.rs", cx)
7216 })
7217 .await
7218 .unwrap();
7219 let unstaged_diff = project
7220 .update(cx, |project, cx| {
7221 project.open_unstaged_diff(buffer.clone(), cx)
7222 })
7223 .await
7224 .unwrap();
7225
7226 cx.run_until_parked();
7227 unstaged_diff.update(cx, |unstaged_diff, cx| {
7228 let snapshot = buffer.read(cx).snapshot();
7229 assert_hunks(
7230 unstaged_diff.snapshot(cx).hunks(&snapshot),
7231 &snapshot,
7232 &unstaged_diff.base_text_string(cx).unwrap(),
7233 &[
7234 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7235 (
7236 2..3,
7237 " println!(\"hello world\");\n",
7238 " println!(\"goodbye world\");\n",
7239 DiffHunkStatus::modified_none(),
7240 ),
7241 ],
7242 );
7243 });
7244
7245 let staged_contents = r#"
7246 // print goodbye
7247 fn main() {
7248 }
7249 "#
7250 .unindent();
7251
7252 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7253
7254 cx.run_until_parked();
7255 unstaged_diff.update(cx, |unstaged_diff, cx| {
7256 let snapshot = buffer.read(cx).snapshot();
7257 assert_hunks(
7258 unstaged_diff
7259 .snapshot(cx)
7260 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7261 &snapshot,
7262 &unstaged_diff.base_text(cx).text(),
7263 &[(
7264 2..3,
7265 "",
7266 " println!(\"goodbye world\");\n",
7267 DiffHunkStatus::added_none(),
7268 )],
7269 );
7270 });
7271}
7272
7273#[gpui::test]
7274async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7275 init_test(cx);
7276
7277 let committed_contents = r#"
7278 fn main() {
7279 println!("hello world");
7280 }
7281 "#
7282 .unindent();
7283 let staged_contents = r#"
7284 fn main() {
7285 println!("goodbye world");
7286 }
7287 "#
7288 .unindent();
7289 let file_contents = r#"
7290 // print goodbye
7291 fn main() {
7292 println!("goodbye world");
7293 }
7294 "#
7295 .unindent();
7296
7297 let fs = FakeFs::new(cx.background_executor.clone());
7298 fs.insert_tree(
7299 "/dir",
7300 json!({
7301 ".git": {},
7302 "src": {
7303 "modification.rs": file_contents,
7304 }
7305 }),
7306 )
7307 .await;
7308
7309 fs.set_head_for_repo(
7310 Path::new("/dir/.git"),
7311 &[
7312 ("src/modification.rs", committed_contents),
7313 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7314 ],
7315 "deadbeef",
7316 );
7317 fs.set_index_for_repo(
7318 Path::new("/dir/.git"),
7319 &[
7320 ("src/modification.rs", staged_contents),
7321 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7322 ],
7323 );
7324
7325 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7326 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7327 let language = rust_lang();
7328 language_registry.add(language.clone());
7329
7330 let buffer_1 = project
7331 .update(cx, |project, cx| {
7332 project.open_local_buffer("/dir/src/modification.rs", cx)
7333 })
7334 .await
7335 .unwrap();
7336 let diff_1 = project
7337 .update(cx, |project, cx| {
7338 project.open_uncommitted_diff(buffer_1.clone(), cx)
7339 })
7340 .await
7341 .unwrap();
7342 diff_1.read_with(cx, |diff, cx| {
7343 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7344 });
7345 cx.run_until_parked();
7346 diff_1.update(cx, |diff, cx| {
7347 let snapshot = buffer_1.read(cx).snapshot();
7348 assert_hunks(
7349 diff.snapshot(cx)
7350 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7351 &snapshot,
7352 &diff.base_text_string(cx).unwrap(),
7353 &[
7354 (
7355 0..1,
7356 "",
7357 "// print goodbye\n",
7358 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7359 ),
7360 (
7361 2..3,
7362 " println!(\"hello world\");\n",
7363 " println!(\"goodbye world\");\n",
7364 DiffHunkStatus::modified_none(),
7365 ),
7366 ],
7367 );
7368 });
7369
7370 // Reset HEAD to a version that differs from both the buffer and the index.
7371 let committed_contents = r#"
7372 // print goodbye
7373 fn main() {
7374 }
7375 "#
7376 .unindent();
7377 fs.set_head_for_repo(
7378 Path::new("/dir/.git"),
7379 &[
7380 ("src/modification.rs", committed_contents.clone()),
7381 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7382 ],
7383 "deadbeef",
7384 );
7385
7386 // Buffer now has an unstaged hunk.
7387 cx.run_until_parked();
7388 diff_1.update(cx, |diff, cx| {
7389 let snapshot = buffer_1.read(cx).snapshot();
7390 assert_hunks(
7391 diff.snapshot(cx)
7392 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7393 &snapshot,
7394 &diff.base_text(cx).text(),
7395 &[(
7396 2..3,
7397 "",
7398 " println!(\"goodbye world\");\n",
7399 DiffHunkStatus::added_none(),
7400 )],
7401 );
7402 });
7403
7404 // Open a buffer for a file that's been deleted.
7405 let buffer_2 = project
7406 .update(cx, |project, cx| {
7407 project.open_local_buffer("/dir/src/deletion.rs", cx)
7408 })
7409 .await
7410 .unwrap();
7411 let diff_2 = project
7412 .update(cx, |project, cx| {
7413 project.open_uncommitted_diff(buffer_2.clone(), cx)
7414 })
7415 .await
7416 .unwrap();
7417 cx.run_until_parked();
7418 diff_2.update(cx, |diff, cx| {
7419 let snapshot = buffer_2.read(cx).snapshot();
7420 assert_hunks(
7421 diff.snapshot(cx)
7422 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7423 &snapshot,
7424 &diff.base_text_string(cx).unwrap(),
7425 &[(
7426 0..0,
7427 "// the-deleted-contents\n",
7428 "",
7429 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7430 )],
7431 );
7432 });
7433
7434 // Stage the deletion of this file
7435 fs.set_index_for_repo(
7436 Path::new("/dir/.git"),
7437 &[("src/modification.rs", committed_contents.clone())],
7438 );
7439 cx.run_until_parked();
7440 diff_2.update(cx, |diff, cx| {
7441 let snapshot = buffer_2.read(cx).snapshot();
7442 assert_hunks(
7443 diff.snapshot(cx)
7444 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7445 &snapshot,
7446 &diff.base_text_string(cx).unwrap(),
7447 &[(
7448 0..0,
7449 "// the-deleted-contents\n",
7450 "",
7451 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7452 )],
7453 );
7454 });
7455}
7456
7457#[gpui::test]
7458async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7459 use DiffHunkSecondaryStatus::*;
7460 init_test(cx);
7461
7462 let committed_contents = r#"
7463 zero
7464 one
7465 two
7466 three
7467 four
7468 five
7469 "#
7470 .unindent();
7471 let file_contents = r#"
7472 one
7473 TWO
7474 three
7475 FOUR
7476 five
7477 "#
7478 .unindent();
7479
7480 let fs = FakeFs::new(cx.background_executor.clone());
7481 fs.insert_tree(
7482 "/dir",
7483 json!({
7484 ".git": {},
7485 "file.txt": file_contents.clone()
7486 }),
7487 )
7488 .await;
7489
7490 fs.set_head_and_index_for_repo(
7491 path!("/dir/.git").as_ref(),
7492 &[("file.txt", committed_contents.clone())],
7493 );
7494
7495 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7496
7497 let buffer = project
7498 .update(cx, |project, cx| {
7499 project.open_local_buffer("/dir/file.txt", cx)
7500 })
7501 .await
7502 .unwrap();
7503 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7504 let uncommitted_diff = project
7505 .update(cx, |project, cx| {
7506 project.open_uncommitted_diff(buffer.clone(), cx)
7507 })
7508 .await
7509 .unwrap();
7510 let mut diff_events = cx.events(&uncommitted_diff);
7511
7512 // The hunks are initially unstaged.
7513 uncommitted_diff.read_with(cx, |diff, cx| {
7514 assert_hunks(
7515 diff.snapshot(cx).hunks(&snapshot),
7516 &snapshot,
7517 &diff.base_text_string(cx).unwrap(),
7518 &[
7519 (
7520 0..0,
7521 "zero\n",
7522 "",
7523 DiffHunkStatus::deleted(HasSecondaryHunk),
7524 ),
7525 (
7526 1..2,
7527 "two\n",
7528 "TWO\n",
7529 DiffHunkStatus::modified(HasSecondaryHunk),
7530 ),
7531 (
7532 3..4,
7533 "four\n",
7534 "FOUR\n",
7535 DiffHunkStatus::modified(HasSecondaryHunk),
7536 ),
7537 ],
7538 );
7539 });
7540
7541 // Stage a hunk. It appears as optimistically staged.
7542 uncommitted_diff.update(cx, |diff, cx| {
7543 let range =
7544 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7545 let hunks = diff
7546 .snapshot(cx)
7547 .hunks_intersecting_range(range, &snapshot)
7548 .collect::<Vec<_>>();
7549 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7550
7551 assert_hunks(
7552 diff.snapshot(cx).hunks(&snapshot),
7553 &snapshot,
7554 &diff.base_text_string(cx).unwrap(),
7555 &[
7556 (
7557 0..0,
7558 "zero\n",
7559 "",
7560 DiffHunkStatus::deleted(HasSecondaryHunk),
7561 ),
7562 (
7563 1..2,
7564 "two\n",
7565 "TWO\n",
7566 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7567 ),
7568 (
7569 3..4,
7570 "four\n",
7571 "FOUR\n",
7572 DiffHunkStatus::modified(HasSecondaryHunk),
7573 ),
7574 ],
7575 );
7576 });
7577
7578 // The diff emits a change event for the range of the staged hunk.
7579 assert!(matches!(
7580 diff_events.next().await.unwrap(),
7581 BufferDiffEvent::HunksStagedOrUnstaged(_)
7582 ));
7583 let event = diff_events.next().await.unwrap();
7584 if let BufferDiffEvent::DiffChanged {
7585 changed_range: Some(changed_range),
7586 base_text_changed_range: _,
7587 } = event
7588 {
7589 let changed_range = changed_range.to_point(&snapshot);
7590 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7591 } else {
7592 panic!("Unexpected event {event:?}");
7593 }
7594
7595 // When the write to the index completes, it appears as staged.
7596 cx.run_until_parked();
7597 uncommitted_diff.update(cx, |diff, cx| {
7598 assert_hunks(
7599 diff.snapshot(cx).hunks(&snapshot),
7600 &snapshot,
7601 &diff.base_text_string(cx).unwrap(),
7602 &[
7603 (
7604 0..0,
7605 "zero\n",
7606 "",
7607 DiffHunkStatus::deleted(HasSecondaryHunk),
7608 ),
7609 (
7610 1..2,
7611 "two\n",
7612 "TWO\n",
7613 DiffHunkStatus::modified(NoSecondaryHunk),
7614 ),
7615 (
7616 3..4,
7617 "four\n",
7618 "FOUR\n",
7619 DiffHunkStatus::modified(HasSecondaryHunk),
7620 ),
7621 ],
7622 );
7623 });
7624
7625 // The diff emits a change event for the changed index text.
7626 let event = diff_events.next().await.unwrap();
7627 if let BufferDiffEvent::DiffChanged {
7628 changed_range: Some(changed_range),
7629 base_text_changed_range: _,
7630 } = event
7631 {
7632 let changed_range = changed_range.to_point(&snapshot);
7633 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7634 } else {
7635 panic!("Unexpected event {event:?}");
7636 }
7637
7638 // Simulate a problem writing to the git index.
7639 fs.set_error_message_for_index_write(
7640 "/dir/.git".as_ref(),
7641 Some("failed to write git index".into()),
7642 );
7643
7644 // Stage another hunk.
7645 uncommitted_diff.update(cx, |diff, cx| {
7646 let range =
7647 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7648 let hunks = diff
7649 .snapshot(cx)
7650 .hunks_intersecting_range(range, &snapshot)
7651 .collect::<Vec<_>>();
7652 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7653
7654 assert_hunks(
7655 diff.snapshot(cx).hunks(&snapshot),
7656 &snapshot,
7657 &diff.base_text_string(cx).unwrap(),
7658 &[
7659 (
7660 0..0,
7661 "zero\n",
7662 "",
7663 DiffHunkStatus::deleted(HasSecondaryHunk),
7664 ),
7665 (
7666 1..2,
7667 "two\n",
7668 "TWO\n",
7669 DiffHunkStatus::modified(NoSecondaryHunk),
7670 ),
7671 (
7672 3..4,
7673 "four\n",
7674 "FOUR\n",
7675 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7676 ),
7677 ],
7678 );
7679 });
7680 assert!(matches!(
7681 diff_events.next().await.unwrap(),
7682 BufferDiffEvent::HunksStagedOrUnstaged(_)
7683 ));
7684 let event = diff_events.next().await.unwrap();
7685 if let BufferDiffEvent::DiffChanged {
7686 changed_range: Some(changed_range),
7687 base_text_changed_range: _,
7688 } = event
7689 {
7690 let changed_range = changed_range.to_point(&snapshot);
7691 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7692 } else {
7693 panic!("Unexpected event {event:?}");
7694 }
7695
7696 // When the write fails, the hunk returns to being unstaged.
7697 cx.run_until_parked();
7698 uncommitted_diff.update(cx, |diff, cx| {
7699 assert_hunks(
7700 diff.snapshot(cx).hunks(&snapshot),
7701 &snapshot,
7702 &diff.base_text_string(cx).unwrap(),
7703 &[
7704 (
7705 0..0,
7706 "zero\n",
7707 "",
7708 DiffHunkStatus::deleted(HasSecondaryHunk),
7709 ),
7710 (
7711 1..2,
7712 "two\n",
7713 "TWO\n",
7714 DiffHunkStatus::modified(NoSecondaryHunk),
7715 ),
7716 (
7717 3..4,
7718 "four\n",
7719 "FOUR\n",
7720 DiffHunkStatus::modified(HasSecondaryHunk),
7721 ),
7722 ],
7723 );
7724 });
7725
7726 let event = diff_events.next().await.unwrap();
7727 if let BufferDiffEvent::DiffChanged {
7728 changed_range: Some(changed_range),
7729 base_text_changed_range: _,
7730 } = event
7731 {
7732 let changed_range = changed_range.to_point(&snapshot);
7733 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7734 } else {
7735 panic!("Unexpected event {event:?}");
7736 }
7737
7738 // Allow writing to the git index to succeed again.
7739 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7740
7741 // Stage two hunks with separate operations.
7742 uncommitted_diff.update(cx, |diff, cx| {
7743 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
7744 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7745 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7746 });
7747
7748 // Both staged hunks appear as pending.
7749 uncommitted_diff.update(cx, |diff, cx| {
7750 assert_hunks(
7751 diff.snapshot(cx).hunks(&snapshot),
7752 &snapshot,
7753 &diff.base_text_string(cx).unwrap(),
7754 &[
7755 (
7756 0..0,
7757 "zero\n",
7758 "",
7759 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7760 ),
7761 (
7762 1..2,
7763 "two\n",
7764 "TWO\n",
7765 DiffHunkStatus::modified(NoSecondaryHunk),
7766 ),
7767 (
7768 3..4,
7769 "four\n",
7770 "FOUR\n",
7771 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7772 ),
7773 ],
7774 );
7775 });
7776
7777 // Both staging operations take effect.
7778 cx.run_until_parked();
7779 uncommitted_diff.update(cx, |diff, cx| {
7780 assert_hunks(
7781 diff.snapshot(cx).hunks(&snapshot),
7782 &snapshot,
7783 &diff.base_text_string(cx).unwrap(),
7784 &[
7785 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7786 (
7787 1..2,
7788 "two\n",
7789 "TWO\n",
7790 DiffHunkStatus::modified(NoSecondaryHunk),
7791 ),
7792 (
7793 3..4,
7794 "four\n",
7795 "FOUR\n",
7796 DiffHunkStatus::modified(NoSecondaryHunk),
7797 ),
7798 ],
7799 );
7800 });
7801}
7802
7803#[gpui::test(seeds(340, 472))]
7804async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7805 use DiffHunkSecondaryStatus::*;
7806 init_test(cx);
7807
7808 let committed_contents = r#"
7809 zero
7810 one
7811 two
7812 three
7813 four
7814 five
7815 "#
7816 .unindent();
7817 let file_contents = r#"
7818 one
7819 TWO
7820 three
7821 FOUR
7822 five
7823 "#
7824 .unindent();
7825
7826 let fs = FakeFs::new(cx.background_executor.clone());
7827 fs.insert_tree(
7828 "/dir",
7829 json!({
7830 ".git": {},
7831 "file.txt": file_contents.clone()
7832 }),
7833 )
7834 .await;
7835
7836 fs.set_head_for_repo(
7837 "/dir/.git".as_ref(),
7838 &[("file.txt", committed_contents.clone())],
7839 "deadbeef",
7840 );
7841 fs.set_index_for_repo(
7842 "/dir/.git".as_ref(),
7843 &[("file.txt", committed_contents.clone())],
7844 );
7845
7846 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7847
7848 let buffer = project
7849 .update(cx, |project, cx| {
7850 project.open_local_buffer("/dir/file.txt", cx)
7851 })
7852 .await
7853 .unwrap();
7854 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7855 let uncommitted_diff = project
7856 .update(cx, |project, cx| {
7857 project.open_uncommitted_diff(buffer.clone(), cx)
7858 })
7859 .await
7860 .unwrap();
7861
7862 // The hunks are initially unstaged.
7863 uncommitted_diff.read_with(cx, |diff, cx| {
7864 assert_hunks(
7865 diff.snapshot(cx).hunks(&snapshot),
7866 &snapshot,
7867 &diff.base_text_string(cx).unwrap(),
7868 &[
7869 (
7870 0..0,
7871 "zero\n",
7872 "",
7873 DiffHunkStatus::deleted(HasSecondaryHunk),
7874 ),
7875 (
7876 1..2,
7877 "two\n",
7878 "TWO\n",
7879 DiffHunkStatus::modified(HasSecondaryHunk),
7880 ),
7881 (
7882 3..4,
7883 "four\n",
7884 "FOUR\n",
7885 DiffHunkStatus::modified(HasSecondaryHunk),
7886 ),
7887 ],
7888 );
7889 });
7890
7891 // Pause IO events
7892 fs.pause_events();
7893
7894 // Stage the first hunk.
7895 uncommitted_diff.update(cx, |diff, cx| {
7896 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
7897 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7898 assert_hunks(
7899 diff.snapshot(cx).hunks(&snapshot),
7900 &snapshot,
7901 &diff.base_text_string(cx).unwrap(),
7902 &[
7903 (
7904 0..0,
7905 "zero\n",
7906 "",
7907 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7908 ),
7909 (
7910 1..2,
7911 "two\n",
7912 "TWO\n",
7913 DiffHunkStatus::modified(HasSecondaryHunk),
7914 ),
7915 (
7916 3..4,
7917 "four\n",
7918 "FOUR\n",
7919 DiffHunkStatus::modified(HasSecondaryHunk),
7920 ),
7921 ],
7922 );
7923 });
7924
7925 // Stage the second hunk *before* receiving the FS event for the first hunk.
7926 cx.run_until_parked();
7927 uncommitted_diff.update(cx, |diff, cx| {
7928 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
7929 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7930 assert_hunks(
7931 diff.snapshot(cx).hunks(&snapshot),
7932 &snapshot,
7933 &diff.base_text_string(cx).unwrap(),
7934 &[
7935 (
7936 0..0,
7937 "zero\n",
7938 "",
7939 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7940 ),
7941 (
7942 1..2,
7943 "two\n",
7944 "TWO\n",
7945 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7946 ),
7947 (
7948 3..4,
7949 "four\n",
7950 "FOUR\n",
7951 DiffHunkStatus::modified(HasSecondaryHunk),
7952 ),
7953 ],
7954 );
7955 });
7956
7957 // Process the FS event for staging the first hunk (second event is still pending).
7958 fs.flush_events(1);
7959 cx.run_until_parked();
7960
7961 // Stage the third hunk before receiving the second FS event.
7962 uncommitted_diff.update(cx, |diff, cx| {
7963 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
7964 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7965 });
7966
7967 // Wait for all remaining IO.
7968 cx.run_until_parked();
7969 fs.flush_events(fs.buffered_event_count());
7970
7971 // Now all hunks are staged.
7972 cx.run_until_parked();
7973 uncommitted_diff.update(cx, |diff, cx| {
7974 assert_hunks(
7975 diff.snapshot(cx).hunks(&snapshot),
7976 &snapshot,
7977 &diff.base_text_string(cx).unwrap(),
7978 &[
7979 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7980 (
7981 1..2,
7982 "two\n",
7983 "TWO\n",
7984 DiffHunkStatus::modified(NoSecondaryHunk),
7985 ),
7986 (
7987 3..4,
7988 "four\n",
7989 "FOUR\n",
7990 DiffHunkStatus::modified(NoSecondaryHunk),
7991 ),
7992 ],
7993 );
7994 });
7995}
7996
7997#[gpui::test(iterations = 25)]
7998async fn test_staging_random_hunks(
7999 mut rng: StdRng,
8000 _executor: BackgroundExecutor,
8001 cx: &mut gpui::TestAppContext,
8002) {
8003 let operations = env::var("OPERATIONS")
8004 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8005 .unwrap_or(20);
8006
8007 use DiffHunkSecondaryStatus::*;
8008 init_test(cx);
8009
8010 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8011 let index_text = committed_text.clone();
8012 let buffer_text = (0..30)
8013 .map(|i| match i % 5 {
8014 0 => format!("line {i} (modified)\n"),
8015 _ => format!("line {i}\n"),
8016 })
8017 .collect::<String>();
8018
8019 let fs = FakeFs::new(cx.background_executor.clone());
8020 fs.insert_tree(
8021 path!("/dir"),
8022 json!({
8023 ".git": {},
8024 "file.txt": buffer_text.clone()
8025 }),
8026 )
8027 .await;
8028 fs.set_head_for_repo(
8029 path!("/dir/.git").as_ref(),
8030 &[("file.txt", committed_text.clone())],
8031 "deadbeef",
8032 );
8033 fs.set_index_for_repo(
8034 path!("/dir/.git").as_ref(),
8035 &[("file.txt", index_text.clone())],
8036 );
8037 let repo = fs
8038 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8039 .unwrap();
8040
8041 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8042 let buffer = project
8043 .update(cx, |project, cx| {
8044 project.open_local_buffer(path!("/dir/file.txt"), cx)
8045 })
8046 .await
8047 .unwrap();
8048 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8049 let uncommitted_diff = project
8050 .update(cx, |project, cx| {
8051 project.open_uncommitted_diff(buffer.clone(), cx)
8052 })
8053 .await
8054 .unwrap();
8055
8056 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8057 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8058 });
8059 assert_eq!(hunks.len(), 6);
8060
8061 for _i in 0..operations {
8062 let hunk_ix = rng.random_range(0..hunks.len());
8063 let hunk = &mut hunks[hunk_ix];
8064 let row = hunk.range.start.row;
8065
8066 if hunk.status().has_secondary_hunk() {
8067 log::info!("staging hunk at {row}");
8068 uncommitted_diff.update(cx, |diff, cx| {
8069 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8070 });
8071 hunk.secondary_status = SecondaryHunkRemovalPending;
8072 } else {
8073 log::info!("unstaging hunk at {row}");
8074 uncommitted_diff.update(cx, |diff, cx| {
8075 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8076 });
8077 hunk.secondary_status = SecondaryHunkAdditionPending;
8078 }
8079
8080 for _ in 0..rng.random_range(0..10) {
8081 log::info!("yielding");
8082 cx.executor().simulate_random_delay().await;
8083 }
8084 }
8085
8086 cx.executor().run_until_parked();
8087
8088 for hunk in &mut hunks {
8089 if hunk.secondary_status == SecondaryHunkRemovalPending {
8090 hunk.secondary_status = NoSecondaryHunk;
8091 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8092 hunk.secondary_status = HasSecondaryHunk;
8093 }
8094 }
8095
8096 log::info!(
8097 "index text:\n{}",
8098 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8099 .await
8100 .unwrap()
8101 );
8102
8103 uncommitted_diff.update(cx, |diff, cx| {
8104 let expected_hunks = hunks
8105 .iter()
8106 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8107 .collect::<Vec<_>>();
8108 let actual_hunks = diff
8109 .snapshot(cx)
8110 .hunks(&snapshot)
8111 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8112 .collect::<Vec<_>>();
8113 assert_eq!(actual_hunks, expected_hunks);
8114 });
8115}
8116
8117#[gpui::test]
8118async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8119 init_test(cx);
8120
8121 let committed_contents = r#"
8122 fn main() {
8123 println!("hello from HEAD");
8124 }
8125 "#
8126 .unindent();
8127 let file_contents = r#"
8128 fn main() {
8129 println!("hello from the working copy");
8130 }
8131 "#
8132 .unindent();
8133
8134 let fs = FakeFs::new(cx.background_executor.clone());
8135 fs.insert_tree(
8136 "/dir",
8137 json!({
8138 ".git": {},
8139 "src": {
8140 "main.rs": file_contents,
8141 }
8142 }),
8143 )
8144 .await;
8145
8146 fs.set_head_for_repo(
8147 Path::new("/dir/.git"),
8148 &[("src/main.rs", committed_contents.clone())],
8149 "deadbeef",
8150 );
8151 fs.set_index_for_repo(
8152 Path::new("/dir/.git"),
8153 &[("src/main.rs", committed_contents.clone())],
8154 );
8155
8156 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8157
8158 let buffer = project
8159 .update(cx, |project, cx| {
8160 project.open_local_buffer("/dir/src/main.rs", cx)
8161 })
8162 .await
8163 .unwrap();
8164 let uncommitted_diff = project
8165 .update(cx, |project, cx| {
8166 project.open_uncommitted_diff(buffer.clone(), cx)
8167 })
8168 .await
8169 .unwrap();
8170
8171 cx.run_until_parked();
8172 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8173 let snapshot = buffer.read(cx).snapshot();
8174 assert_hunks(
8175 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8176 &snapshot,
8177 &uncommitted_diff.base_text_string(cx).unwrap(),
8178 &[(
8179 1..2,
8180 " println!(\"hello from HEAD\");\n",
8181 " println!(\"hello from the working copy\");\n",
8182 DiffHunkStatus {
8183 kind: DiffHunkStatusKind::Modified,
8184 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8185 },
8186 )],
8187 );
8188 });
8189}
8190
8191// TODO: Should we test this on Windows also?
8192#[gpui::test]
8193#[cfg(not(windows))]
8194async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8195 use std::os::unix::fs::PermissionsExt;
8196 init_test(cx);
8197 cx.executor().allow_parking();
8198 let committed_contents = "bar\n";
8199 let file_contents = "baz\n";
8200 let root = TempTree::new(json!({
8201 "project": {
8202 "foo": committed_contents
8203 },
8204 }));
8205
8206 let work_dir = root.path().join("project");
8207 let file_path = work_dir.join("foo");
8208 let repo = git_init(work_dir.as_path());
8209 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8210 perms.set_mode(0o755);
8211 std::fs::set_permissions(&file_path, perms).unwrap();
8212 git_add("foo", &repo);
8213 git_commit("Initial commit", &repo);
8214 std::fs::write(&file_path, file_contents).unwrap();
8215
8216 let project = Project::test(
8217 Arc::new(RealFs::new(None, cx.executor())),
8218 [root.path()],
8219 cx,
8220 )
8221 .await;
8222
8223 let buffer = project
8224 .update(cx, |project, cx| {
8225 project.open_local_buffer(file_path.as_path(), cx)
8226 })
8227 .await
8228 .unwrap();
8229
8230 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8231
8232 let uncommitted_diff = project
8233 .update(cx, |project, cx| {
8234 project.open_uncommitted_diff(buffer.clone(), cx)
8235 })
8236 .await
8237 .unwrap();
8238
8239 uncommitted_diff.update(cx, |diff, cx| {
8240 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8241 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8242 });
8243
8244 cx.run_until_parked();
8245
8246 let output = smol::process::Command::new("git")
8247 .current_dir(&work_dir)
8248 .args(["diff", "--staged"])
8249 .output()
8250 .await
8251 .unwrap();
8252
8253 let staged_diff = String::from_utf8_lossy(&output.stdout);
8254
8255 assert!(
8256 !staged_diff.contains("new mode 100644"),
8257 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8258 staged_diff
8259 );
8260
8261 let output = smol::process::Command::new("git")
8262 .current_dir(&work_dir)
8263 .args(["ls-files", "-s"])
8264 .output()
8265 .await
8266 .unwrap();
8267 let index_contents = String::from_utf8_lossy(&output.stdout);
8268
8269 assert!(
8270 index_contents.contains("100755"),
8271 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8272 index_contents
8273 );
8274}
8275
8276#[gpui::test]
8277async fn test_repository_and_path_for_project_path(
8278 background_executor: BackgroundExecutor,
8279 cx: &mut gpui::TestAppContext,
8280) {
8281 init_test(cx);
8282 let fs = FakeFs::new(background_executor);
8283 fs.insert_tree(
8284 path!("/root"),
8285 json!({
8286 "c.txt": "",
8287 "dir1": {
8288 ".git": {},
8289 "deps": {
8290 "dep1": {
8291 ".git": {},
8292 "src": {
8293 "a.txt": ""
8294 }
8295 }
8296 },
8297 "src": {
8298 "b.txt": ""
8299 }
8300 },
8301 }),
8302 )
8303 .await;
8304
8305 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8306 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8307 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8308 project
8309 .update(cx, |project, cx| project.git_scans_complete(cx))
8310 .await;
8311 cx.run_until_parked();
8312
8313 project.read_with(cx, |project, cx| {
8314 let git_store = project.git_store().read(cx);
8315 let pairs = [
8316 ("c.txt", None),
8317 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8318 (
8319 "dir1/deps/dep1/src/a.txt",
8320 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8321 ),
8322 ];
8323 let expected = pairs
8324 .iter()
8325 .map(|(path, result)| {
8326 (
8327 path,
8328 result.map(|(repo, repo_path)| {
8329 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8330 }),
8331 )
8332 })
8333 .collect::<Vec<_>>();
8334 let actual = pairs
8335 .iter()
8336 .map(|(path, _)| {
8337 let project_path = (tree_id, rel_path(path)).into();
8338 let result = maybe!({
8339 let (repo, repo_path) =
8340 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8341 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8342 });
8343 (path, result)
8344 })
8345 .collect::<Vec<_>>();
8346 pretty_assertions::assert_eq!(expected, actual);
8347 });
8348
8349 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8350 .await
8351 .unwrap();
8352 cx.run_until_parked();
8353
8354 project.read_with(cx, |project, cx| {
8355 let git_store = project.git_store().read(cx);
8356 assert_eq!(
8357 git_store.repository_and_path_for_project_path(
8358 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8359 cx
8360 ),
8361 None
8362 );
8363 });
8364}
8365
8366#[gpui::test]
8367async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8368 init_test(cx);
8369 let fs = FakeFs::new(cx.background_executor.clone());
8370 let home = paths::home_dir();
8371 fs.insert_tree(
8372 home,
8373 json!({
8374 ".git": {},
8375 "project": {
8376 "a.txt": "A"
8377 },
8378 }),
8379 )
8380 .await;
8381
8382 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8383 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8384 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8385
8386 project
8387 .update(cx, |project, cx| project.git_scans_complete(cx))
8388 .await;
8389 tree.flush_fs_events(cx).await;
8390
8391 project.read_with(cx, |project, cx| {
8392 let containing = project
8393 .git_store()
8394 .read(cx)
8395 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8396 assert!(containing.is_none());
8397 });
8398
8399 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8400 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8401 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8402 project
8403 .update(cx, |project, cx| project.git_scans_complete(cx))
8404 .await;
8405 tree.flush_fs_events(cx).await;
8406
8407 project.read_with(cx, |project, cx| {
8408 let containing = project
8409 .git_store()
8410 .read(cx)
8411 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8412 assert_eq!(
8413 containing
8414 .unwrap()
8415 .0
8416 .read(cx)
8417 .work_directory_abs_path
8418 .as_ref(),
8419 home,
8420 );
8421 });
8422}
8423
8424#[gpui::test]
8425async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8426 init_test(cx);
8427 cx.executor().allow_parking();
8428
8429 let root = TempTree::new(json!({
8430 "project": {
8431 "a.txt": "a", // Modified
8432 "b.txt": "bb", // Added
8433 "c.txt": "ccc", // Unchanged
8434 "d.txt": "dddd", // Deleted
8435 },
8436 }));
8437
8438 // Set up git repository before creating the project.
8439 let work_dir = root.path().join("project");
8440 let repo = git_init(work_dir.as_path());
8441 git_add("a.txt", &repo);
8442 git_add("c.txt", &repo);
8443 git_add("d.txt", &repo);
8444 git_commit("Initial commit", &repo);
8445 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8446 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8447
8448 let project = Project::test(
8449 Arc::new(RealFs::new(None, cx.executor())),
8450 [root.path()],
8451 cx,
8452 )
8453 .await;
8454
8455 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8456 tree.flush_fs_events(cx).await;
8457 project
8458 .update(cx, |project, cx| project.git_scans_complete(cx))
8459 .await;
8460 cx.executor().run_until_parked();
8461
8462 let repository = project.read_with(cx, |project, cx| {
8463 project.repositories(cx).values().next().unwrap().clone()
8464 });
8465
8466 // Check that the right git state is observed on startup
8467 repository.read_with(cx, |repository, _| {
8468 let entries = repository.cached_status().collect::<Vec<_>>();
8469 assert_eq!(
8470 entries,
8471 [
8472 StatusEntry {
8473 repo_path: repo_path("a.txt"),
8474 status: StatusCode::Modified.worktree(),
8475 },
8476 StatusEntry {
8477 repo_path: repo_path("b.txt"),
8478 status: FileStatus::Untracked,
8479 },
8480 StatusEntry {
8481 repo_path: repo_path("d.txt"),
8482 status: StatusCode::Deleted.worktree(),
8483 },
8484 ]
8485 );
8486 });
8487
8488 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8489
8490 tree.flush_fs_events(cx).await;
8491 project
8492 .update(cx, |project, cx| project.git_scans_complete(cx))
8493 .await;
8494 cx.executor().run_until_parked();
8495
8496 repository.read_with(cx, |repository, _| {
8497 let entries = repository.cached_status().collect::<Vec<_>>();
8498 assert_eq!(
8499 entries,
8500 [
8501 StatusEntry {
8502 repo_path: repo_path("a.txt"),
8503 status: StatusCode::Modified.worktree(),
8504 },
8505 StatusEntry {
8506 repo_path: repo_path("b.txt"),
8507 status: FileStatus::Untracked,
8508 },
8509 StatusEntry {
8510 repo_path: repo_path("c.txt"),
8511 status: StatusCode::Modified.worktree(),
8512 },
8513 StatusEntry {
8514 repo_path: repo_path("d.txt"),
8515 status: StatusCode::Deleted.worktree(),
8516 },
8517 ]
8518 );
8519 });
8520
8521 git_add("a.txt", &repo);
8522 git_add("c.txt", &repo);
8523 git_remove_index(Path::new("d.txt"), &repo);
8524 git_commit("Another commit", &repo);
8525 tree.flush_fs_events(cx).await;
8526 project
8527 .update(cx, |project, cx| project.git_scans_complete(cx))
8528 .await;
8529 cx.executor().run_until_parked();
8530
8531 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8532 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8533 tree.flush_fs_events(cx).await;
8534 project
8535 .update(cx, |project, cx| project.git_scans_complete(cx))
8536 .await;
8537 cx.executor().run_until_parked();
8538
8539 repository.read_with(cx, |repository, _cx| {
8540 let entries = repository.cached_status().collect::<Vec<_>>();
8541
8542 // Deleting an untracked entry, b.txt, should leave no status
8543 // a.txt was tracked, and so should have a status
8544 assert_eq!(
8545 entries,
8546 [StatusEntry {
8547 repo_path: repo_path("a.txt"),
8548 status: StatusCode::Deleted.worktree(),
8549 }]
8550 );
8551 });
8552}
8553
8554#[gpui::test]
8555#[ignore]
8556async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8557 init_test(cx);
8558 cx.executor().allow_parking();
8559
8560 let root = TempTree::new(json!({
8561 "project": {
8562 "sub": {},
8563 "a.txt": "",
8564 },
8565 }));
8566
8567 let work_dir = root.path().join("project");
8568 let repo = git_init(work_dir.as_path());
8569 // a.txt exists in HEAD and the working copy but is deleted in the index.
8570 git_add("a.txt", &repo);
8571 git_commit("Initial commit", &repo);
8572 git_remove_index("a.txt".as_ref(), &repo);
8573 // `sub` is a nested git repository.
8574 let _sub = git_init(&work_dir.join("sub"));
8575
8576 let project = Project::test(
8577 Arc::new(RealFs::new(None, cx.executor())),
8578 [root.path()],
8579 cx,
8580 )
8581 .await;
8582
8583 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8584 tree.flush_fs_events(cx).await;
8585 project
8586 .update(cx, |project, cx| project.git_scans_complete(cx))
8587 .await;
8588 cx.executor().run_until_parked();
8589
8590 let repository = project.read_with(cx, |project, cx| {
8591 project
8592 .repositories(cx)
8593 .values()
8594 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8595 .unwrap()
8596 .clone()
8597 });
8598
8599 repository.read_with(cx, |repository, _cx| {
8600 let entries = repository.cached_status().collect::<Vec<_>>();
8601
8602 // `sub` doesn't appear in our computed statuses.
8603 // a.txt appears with a combined `DA` status.
8604 assert_eq!(
8605 entries,
8606 [StatusEntry {
8607 repo_path: repo_path("a.txt"),
8608 status: TrackedStatus {
8609 index_status: StatusCode::Deleted,
8610 worktree_status: StatusCode::Added
8611 }
8612 .into(),
8613 }]
8614 )
8615 });
8616}
8617
8618#[track_caller]
8619/// We merge lhs into rhs.
8620fn merge_pending_ops_snapshots(
8621 source: Vec<pending_op::PendingOps>,
8622 mut target: Vec<pending_op::PendingOps>,
8623) -> Vec<pending_op::PendingOps> {
8624 for s_ops in source {
8625 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8626 if ops.repo_path == s_ops.repo_path {
8627 Some(idx)
8628 } else {
8629 None
8630 }
8631 }) {
8632 let t_ops = &mut target[idx];
8633 for s_op in s_ops.ops {
8634 if let Some(op_idx) = t_ops
8635 .ops
8636 .iter()
8637 .zip(0..)
8638 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8639 {
8640 let t_op = &mut t_ops.ops[op_idx];
8641 match (s_op.job_status, t_op.job_status) {
8642 (pending_op::JobStatus::Running, _) => {}
8643 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8644 (s_st, t_st) if s_st == t_st => {}
8645 _ => unreachable!(),
8646 }
8647 } else {
8648 t_ops.ops.push(s_op);
8649 }
8650 }
8651 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8652 } else {
8653 target.push(s_ops);
8654 }
8655 }
8656 target
8657}
8658
8659#[gpui::test]
8660async fn test_repository_pending_ops_staging(
8661 executor: gpui::BackgroundExecutor,
8662 cx: &mut gpui::TestAppContext,
8663) {
8664 init_test(cx);
8665
8666 let fs = FakeFs::new(executor);
8667 fs.insert_tree(
8668 path!("/root"),
8669 json!({
8670 "my-repo": {
8671 ".git": {},
8672 "a.txt": "a",
8673 }
8674
8675 }),
8676 )
8677 .await;
8678
8679 fs.set_status_for_repo(
8680 path!("/root/my-repo/.git").as_ref(),
8681 &[("a.txt", FileStatus::Untracked)],
8682 );
8683
8684 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8685 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8686 project.update(cx, |project, cx| {
8687 let pending_ops_all = pending_ops_all.clone();
8688 cx.subscribe(project.git_store(), move |_, _, e, _| {
8689 if let GitStoreEvent::RepositoryUpdated(
8690 _,
8691 RepositoryEvent::PendingOpsChanged { pending_ops },
8692 _,
8693 ) = e
8694 {
8695 let merged = merge_pending_ops_snapshots(
8696 pending_ops.items(()),
8697 pending_ops_all.lock().items(()),
8698 );
8699 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8700 }
8701 })
8702 .detach();
8703 });
8704 project
8705 .update(cx, |project, cx| project.git_scans_complete(cx))
8706 .await;
8707
8708 let repo = project.read_with(cx, |project, cx| {
8709 project.repositories(cx).values().next().unwrap().clone()
8710 });
8711
8712 // Ensure we have no pending ops for any of the untracked files
8713 repo.read_with(cx, |repo, _cx| {
8714 assert!(repo.pending_ops().next().is_none());
8715 });
8716
8717 let mut id = 1u16;
8718
8719 let mut assert_stage = async |path: RepoPath, stage| {
8720 let git_status = if stage {
8721 pending_op::GitStatus::Staged
8722 } else {
8723 pending_op::GitStatus::Unstaged
8724 };
8725 repo.update(cx, |repo, cx| {
8726 let task = if stage {
8727 repo.stage_entries(vec![path.clone()], cx)
8728 } else {
8729 repo.unstage_entries(vec![path.clone()], cx)
8730 };
8731 let ops = repo.pending_ops_for_path(&path).unwrap();
8732 assert_eq!(
8733 ops.ops.last(),
8734 Some(&pending_op::PendingOp {
8735 id: id.into(),
8736 git_status,
8737 job_status: pending_op::JobStatus::Running
8738 })
8739 );
8740 task
8741 })
8742 .await
8743 .unwrap();
8744
8745 repo.read_with(cx, |repo, _cx| {
8746 let ops = repo.pending_ops_for_path(&path).unwrap();
8747 assert_eq!(
8748 ops.ops.last(),
8749 Some(&pending_op::PendingOp {
8750 id: id.into(),
8751 git_status,
8752 job_status: pending_op::JobStatus::Finished
8753 })
8754 );
8755 });
8756
8757 id += 1;
8758 };
8759
8760 assert_stage(repo_path("a.txt"), true).await;
8761 assert_stage(repo_path("a.txt"), false).await;
8762 assert_stage(repo_path("a.txt"), true).await;
8763 assert_stage(repo_path("a.txt"), false).await;
8764 assert_stage(repo_path("a.txt"), true).await;
8765
8766 cx.run_until_parked();
8767
8768 assert_eq!(
8769 pending_ops_all
8770 .lock()
8771 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8772 .unwrap()
8773 .ops,
8774 vec![
8775 pending_op::PendingOp {
8776 id: 1u16.into(),
8777 git_status: pending_op::GitStatus::Staged,
8778 job_status: pending_op::JobStatus::Finished
8779 },
8780 pending_op::PendingOp {
8781 id: 2u16.into(),
8782 git_status: pending_op::GitStatus::Unstaged,
8783 job_status: pending_op::JobStatus::Finished
8784 },
8785 pending_op::PendingOp {
8786 id: 3u16.into(),
8787 git_status: pending_op::GitStatus::Staged,
8788 job_status: pending_op::JobStatus::Finished
8789 },
8790 pending_op::PendingOp {
8791 id: 4u16.into(),
8792 git_status: pending_op::GitStatus::Unstaged,
8793 job_status: pending_op::JobStatus::Finished
8794 },
8795 pending_op::PendingOp {
8796 id: 5u16.into(),
8797 git_status: pending_op::GitStatus::Staged,
8798 job_status: pending_op::JobStatus::Finished
8799 }
8800 ],
8801 );
8802
8803 repo.update(cx, |repo, _cx| {
8804 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8805
8806 assert_eq!(
8807 git_statuses,
8808 [StatusEntry {
8809 repo_path: repo_path("a.txt"),
8810 status: TrackedStatus {
8811 index_status: StatusCode::Added,
8812 worktree_status: StatusCode::Unmodified
8813 }
8814 .into(),
8815 }]
8816 );
8817 });
8818}
8819
8820#[gpui::test]
8821async fn test_repository_pending_ops_long_running_staging(
8822 executor: gpui::BackgroundExecutor,
8823 cx: &mut gpui::TestAppContext,
8824) {
8825 init_test(cx);
8826
8827 let fs = FakeFs::new(executor);
8828 fs.insert_tree(
8829 path!("/root"),
8830 json!({
8831 "my-repo": {
8832 ".git": {},
8833 "a.txt": "a",
8834 }
8835
8836 }),
8837 )
8838 .await;
8839
8840 fs.set_status_for_repo(
8841 path!("/root/my-repo/.git").as_ref(),
8842 &[("a.txt", FileStatus::Untracked)],
8843 );
8844
8845 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8846 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8847 project.update(cx, |project, cx| {
8848 let pending_ops_all = pending_ops_all.clone();
8849 cx.subscribe(project.git_store(), move |_, _, e, _| {
8850 if let GitStoreEvent::RepositoryUpdated(
8851 _,
8852 RepositoryEvent::PendingOpsChanged { pending_ops },
8853 _,
8854 ) = e
8855 {
8856 let merged = merge_pending_ops_snapshots(
8857 pending_ops.items(()),
8858 pending_ops_all.lock().items(()),
8859 );
8860 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8861 }
8862 })
8863 .detach();
8864 });
8865
8866 project
8867 .update(cx, |project, cx| project.git_scans_complete(cx))
8868 .await;
8869
8870 let repo = project.read_with(cx, |project, cx| {
8871 project.repositories(cx).values().next().unwrap().clone()
8872 });
8873
8874 repo.update(cx, |repo, cx| {
8875 repo.stage_entries(vec![repo_path("a.txt")], cx)
8876 })
8877 .detach();
8878
8879 repo.update(cx, |repo, cx| {
8880 repo.stage_entries(vec![repo_path("a.txt")], cx)
8881 })
8882 .unwrap()
8883 .with_timeout(Duration::from_secs(1), &cx.executor())
8884 .await
8885 .unwrap();
8886
8887 cx.run_until_parked();
8888
8889 assert_eq!(
8890 pending_ops_all
8891 .lock()
8892 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8893 .unwrap()
8894 .ops,
8895 vec![
8896 pending_op::PendingOp {
8897 id: 1u16.into(),
8898 git_status: pending_op::GitStatus::Staged,
8899 job_status: pending_op::JobStatus::Skipped
8900 },
8901 pending_op::PendingOp {
8902 id: 2u16.into(),
8903 git_status: pending_op::GitStatus::Staged,
8904 job_status: pending_op::JobStatus::Finished
8905 }
8906 ],
8907 );
8908
8909 repo.update(cx, |repo, _cx| {
8910 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8911
8912 assert_eq!(
8913 git_statuses,
8914 [StatusEntry {
8915 repo_path: repo_path("a.txt"),
8916 status: TrackedStatus {
8917 index_status: StatusCode::Added,
8918 worktree_status: StatusCode::Unmodified
8919 }
8920 .into(),
8921 }]
8922 );
8923 });
8924}
8925
8926#[gpui::test]
8927async fn test_repository_pending_ops_stage_all(
8928 executor: gpui::BackgroundExecutor,
8929 cx: &mut gpui::TestAppContext,
8930) {
8931 init_test(cx);
8932
8933 let fs = FakeFs::new(executor);
8934 fs.insert_tree(
8935 path!("/root"),
8936 json!({
8937 "my-repo": {
8938 ".git": {},
8939 "a.txt": "a",
8940 "b.txt": "b"
8941 }
8942
8943 }),
8944 )
8945 .await;
8946
8947 fs.set_status_for_repo(
8948 path!("/root/my-repo/.git").as_ref(),
8949 &[
8950 ("a.txt", FileStatus::Untracked),
8951 ("b.txt", FileStatus::Untracked),
8952 ],
8953 );
8954
8955 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8956 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8957 project.update(cx, |project, cx| {
8958 let pending_ops_all = pending_ops_all.clone();
8959 cx.subscribe(project.git_store(), move |_, _, e, _| {
8960 if let GitStoreEvent::RepositoryUpdated(
8961 _,
8962 RepositoryEvent::PendingOpsChanged { pending_ops },
8963 _,
8964 ) = e
8965 {
8966 let merged = merge_pending_ops_snapshots(
8967 pending_ops.items(()),
8968 pending_ops_all.lock().items(()),
8969 );
8970 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8971 }
8972 })
8973 .detach();
8974 });
8975 project
8976 .update(cx, |project, cx| project.git_scans_complete(cx))
8977 .await;
8978
8979 let repo = project.read_with(cx, |project, cx| {
8980 project.repositories(cx).values().next().unwrap().clone()
8981 });
8982
8983 repo.update(cx, |repo, cx| {
8984 repo.stage_entries(vec![repo_path("a.txt")], cx)
8985 })
8986 .await
8987 .unwrap();
8988 repo.update(cx, |repo, cx| repo.stage_all(cx))
8989 .await
8990 .unwrap();
8991 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8992 .await
8993 .unwrap();
8994
8995 cx.run_until_parked();
8996
8997 assert_eq!(
8998 pending_ops_all
8999 .lock()
9000 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9001 .unwrap()
9002 .ops,
9003 vec![
9004 pending_op::PendingOp {
9005 id: 1u16.into(),
9006 git_status: pending_op::GitStatus::Staged,
9007 job_status: pending_op::JobStatus::Finished
9008 },
9009 pending_op::PendingOp {
9010 id: 2u16.into(),
9011 git_status: pending_op::GitStatus::Unstaged,
9012 job_status: pending_op::JobStatus::Finished
9013 },
9014 ],
9015 );
9016 assert_eq!(
9017 pending_ops_all
9018 .lock()
9019 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9020 .unwrap()
9021 .ops,
9022 vec![
9023 pending_op::PendingOp {
9024 id: 1u16.into(),
9025 git_status: pending_op::GitStatus::Staged,
9026 job_status: pending_op::JobStatus::Finished
9027 },
9028 pending_op::PendingOp {
9029 id: 2u16.into(),
9030 git_status: pending_op::GitStatus::Unstaged,
9031 job_status: pending_op::JobStatus::Finished
9032 },
9033 ],
9034 );
9035
9036 repo.update(cx, |repo, _cx| {
9037 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9038
9039 assert_eq!(
9040 git_statuses,
9041 [
9042 StatusEntry {
9043 repo_path: repo_path("a.txt"),
9044 status: FileStatus::Untracked,
9045 },
9046 StatusEntry {
9047 repo_path: repo_path("b.txt"),
9048 status: FileStatus::Untracked,
9049 },
9050 ]
9051 );
9052 });
9053}
9054
9055#[gpui::test]
9056async fn test_repository_subfolder_git_status(
9057 executor: gpui::BackgroundExecutor,
9058 cx: &mut gpui::TestAppContext,
9059) {
9060 init_test(cx);
9061
9062 let fs = FakeFs::new(executor);
9063 fs.insert_tree(
9064 path!("/root"),
9065 json!({
9066 "my-repo": {
9067 ".git": {},
9068 "a.txt": "a",
9069 "sub-folder-1": {
9070 "sub-folder-2": {
9071 "c.txt": "cc",
9072 "d": {
9073 "e.txt": "eee"
9074 }
9075 },
9076 }
9077 },
9078 }),
9079 )
9080 .await;
9081
9082 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9083 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9084
9085 fs.set_status_for_repo(
9086 path!("/root/my-repo/.git").as_ref(),
9087 &[(E_TXT, FileStatus::Untracked)],
9088 );
9089
9090 let project = Project::test(
9091 fs.clone(),
9092 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9093 cx,
9094 )
9095 .await;
9096
9097 project
9098 .update(cx, |project, cx| project.git_scans_complete(cx))
9099 .await;
9100 cx.run_until_parked();
9101
9102 let repository = project.read_with(cx, |project, cx| {
9103 project.repositories(cx).values().next().unwrap().clone()
9104 });
9105
9106 // Ensure that the git status is loaded correctly
9107 repository.read_with(cx, |repository, _cx| {
9108 assert_eq!(
9109 repository.work_directory_abs_path,
9110 Path::new(path!("/root/my-repo")).into()
9111 );
9112
9113 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9114 assert_eq!(
9115 repository
9116 .status_for_path(&repo_path(E_TXT))
9117 .unwrap()
9118 .status,
9119 FileStatus::Untracked
9120 );
9121 });
9122
9123 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9124 project
9125 .update(cx, |project, cx| project.git_scans_complete(cx))
9126 .await;
9127 cx.run_until_parked();
9128
9129 repository.read_with(cx, |repository, _cx| {
9130 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9131 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9132 });
9133}
9134
9135// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9136#[cfg(any())]
9137#[gpui::test]
9138async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9139 init_test(cx);
9140 cx.executor().allow_parking();
9141
9142 let root = TempTree::new(json!({
9143 "project": {
9144 "a.txt": "a",
9145 },
9146 }));
9147 let root_path = root.path();
9148
9149 let repo = git_init(&root_path.join("project"));
9150 git_add("a.txt", &repo);
9151 git_commit("init", &repo);
9152
9153 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9154
9155 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9156 tree.flush_fs_events(cx).await;
9157 project
9158 .update(cx, |project, cx| project.git_scans_complete(cx))
9159 .await;
9160 cx.executor().run_until_parked();
9161
9162 let repository = project.read_with(cx, |project, cx| {
9163 project.repositories(cx).values().next().unwrap().clone()
9164 });
9165
9166 git_branch("other-branch", &repo);
9167 git_checkout("refs/heads/other-branch", &repo);
9168 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9169 git_add("a.txt", &repo);
9170 git_commit("capitalize", &repo);
9171 let commit = repo
9172 .head()
9173 .expect("Failed to get HEAD")
9174 .peel_to_commit()
9175 .expect("HEAD is not a commit");
9176 git_checkout("refs/heads/main", &repo);
9177 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9178 git_add("a.txt", &repo);
9179 git_commit("improve letter", &repo);
9180 git_cherry_pick(&commit, &repo);
9181 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9182 .expect("No CHERRY_PICK_HEAD");
9183 pretty_assertions::assert_eq!(
9184 git_status(&repo),
9185 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9186 );
9187 tree.flush_fs_events(cx).await;
9188 project
9189 .update(cx, |project, cx| project.git_scans_complete(cx))
9190 .await;
9191 cx.executor().run_until_parked();
9192 let conflicts = repository.update(cx, |repository, _| {
9193 repository
9194 .merge_conflicts
9195 .iter()
9196 .cloned()
9197 .collect::<Vec<_>>()
9198 });
9199 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9200
9201 git_add("a.txt", &repo);
9202 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9203 git_commit("whatevs", &repo);
9204 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9205 .expect("Failed to remove CHERRY_PICK_HEAD");
9206 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9207 tree.flush_fs_events(cx).await;
9208 let conflicts = repository.update(cx, |repository, _| {
9209 repository
9210 .merge_conflicts
9211 .iter()
9212 .cloned()
9213 .collect::<Vec<_>>()
9214 });
9215 pretty_assertions::assert_eq!(conflicts, []);
9216}
9217
9218#[gpui::test]
9219async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9220 init_test(cx);
9221 let fs = FakeFs::new(cx.background_executor.clone());
9222 fs.insert_tree(
9223 path!("/root"),
9224 json!({
9225 ".git": {},
9226 ".gitignore": "*.txt\n",
9227 "a.xml": "<a></a>",
9228 "b.txt": "Some text"
9229 }),
9230 )
9231 .await;
9232
9233 fs.set_head_and_index_for_repo(
9234 path!("/root/.git").as_ref(),
9235 &[
9236 (".gitignore", "*.txt\n".into()),
9237 ("a.xml", "<a></a>".into()),
9238 ],
9239 );
9240
9241 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9242
9243 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9244 tree.flush_fs_events(cx).await;
9245 project
9246 .update(cx, |project, cx| project.git_scans_complete(cx))
9247 .await;
9248 cx.executor().run_until_parked();
9249
9250 let repository = project.read_with(cx, |project, cx| {
9251 project.repositories(cx).values().next().unwrap().clone()
9252 });
9253
9254 // One file is unmodified, the other is ignored.
9255 cx.read(|cx| {
9256 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9257 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9258 });
9259
9260 // Change the gitignore, and stage the newly non-ignored file.
9261 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9262 .await
9263 .unwrap();
9264 fs.set_index_for_repo(
9265 Path::new(path!("/root/.git")),
9266 &[
9267 (".gitignore", "*.txt\n".into()),
9268 ("a.xml", "<a></a>".into()),
9269 ("b.txt", "Some text".into()),
9270 ],
9271 );
9272
9273 cx.executor().run_until_parked();
9274 cx.read(|cx| {
9275 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9276 assert_entry_git_state(
9277 tree.read(cx),
9278 repository.read(cx),
9279 "b.txt",
9280 Some(StatusCode::Added),
9281 false,
9282 );
9283 });
9284}
9285
9286// NOTE:
9287// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9288// a directory which some program has already open.
9289// This is a limitation of the Windows.
9290// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9291// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9292#[gpui::test]
9293#[cfg_attr(target_os = "windows", ignore)]
9294async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9295 init_test(cx);
9296 cx.executor().allow_parking();
9297 let root = TempTree::new(json!({
9298 "projects": {
9299 "project1": {
9300 "a": "",
9301 "b": "",
9302 }
9303 },
9304
9305 }));
9306 let root_path = root.path();
9307
9308 let repo = git_init(&root_path.join("projects/project1"));
9309 git_add("a", &repo);
9310 git_commit("init", &repo);
9311 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9312
9313 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9314
9315 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9316 tree.flush_fs_events(cx).await;
9317 project
9318 .update(cx, |project, cx| project.git_scans_complete(cx))
9319 .await;
9320 cx.executor().run_until_parked();
9321
9322 let repository = project.read_with(cx, |project, cx| {
9323 project.repositories(cx).values().next().unwrap().clone()
9324 });
9325
9326 repository.read_with(cx, |repository, _| {
9327 assert_eq!(
9328 repository.work_directory_abs_path.as_ref(),
9329 root_path.join("projects/project1").as_path()
9330 );
9331 assert_eq!(
9332 repository
9333 .status_for_path(&repo_path("a"))
9334 .map(|entry| entry.status),
9335 Some(StatusCode::Modified.worktree()),
9336 );
9337 assert_eq!(
9338 repository
9339 .status_for_path(&repo_path("b"))
9340 .map(|entry| entry.status),
9341 Some(FileStatus::Untracked),
9342 );
9343 });
9344
9345 std::fs::rename(
9346 root_path.join("projects/project1"),
9347 root_path.join("projects/project2"),
9348 )
9349 .unwrap();
9350 tree.flush_fs_events(cx).await;
9351
9352 repository.read_with(cx, |repository, _| {
9353 assert_eq!(
9354 repository.work_directory_abs_path.as_ref(),
9355 root_path.join("projects/project2").as_path()
9356 );
9357 assert_eq!(
9358 repository.status_for_path(&repo_path("a")).unwrap().status,
9359 StatusCode::Modified.worktree(),
9360 );
9361 assert_eq!(
9362 repository.status_for_path(&repo_path("b")).unwrap().status,
9363 FileStatus::Untracked,
9364 );
9365 });
9366}
9367
9368// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9369// you can't rename a directory which some program has already open. This is a
9370// limitation of the Windows. See:
9371// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9372// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9373#[gpui::test]
9374#[cfg_attr(target_os = "windows", ignore)]
9375async fn test_file_status(cx: &mut gpui::TestAppContext) {
9376 init_test(cx);
9377 cx.executor().allow_parking();
9378 const IGNORE_RULE: &str = "**/target";
9379
9380 let root = TempTree::new(json!({
9381 "project": {
9382 "a.txt": "a",
9383 "b.txt": "bb",
9384 "c": {
9385 "d": {
9386 "e.txt": "eee"
9387 }
9388 },
9389 "f.txt": "ffff",
9390 "target": {
9391 "build_file": "???"
9392 },
9393 ".gitignore": IGNORE_RULE
9394 },
9395
9396 }));
9397 let root_path = root.path();
9398
9399 const A_TXT: &str = "a.txt";
9400 const B_TXT: &str = "b.txt";
9401 const E_TXT: &str = "c/d/e.txt";
9402 const F_TXT: &str = "f.txt";
9403 const DOTGITIGNORE: &str = ".gitignore";
9404 const BUILD_FILE: &str = "target/build_file";
9405
9406 // Set up git repository before creating the worktree.
9407 let work_dir = root.path().join("project");
9408 let mut repo = git_init(work_dir.as_path());
9409 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9410 git_add(A_TXT, &repo);
9411 git_add(E_TXT, &repo);
9412 git_add(DOTGITIGNORE, &repo);
9413 git_commit("Initial commit", &repo);
9414
9415 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9416
9417 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9418 tree.flush_fs_events(cx).await;
9419 project
9420 .update(cx, |project, cx| project.git_scans_complete(cx))
9421 .await;
9422 cx.executor().run_until_parked();
9423
9424 let repository = project.read_with(cx, |project, cx| {
9425 project.repositories(cx).values().next().unwrap().clone()
9426 });
9427
9428 // Check that the right git state is observed on startup
9429 repository.read_with(cx, |repository, _cx| {
9430 assert_eq!(
9431 repository.work_directory_abs_path.as_ref(),
9432 root_path.join("project").as_path()
9433 );
9434
9435 assert_eq!(
9436 repository
9437 .status_for_path(&repo_path(B_TXT))
9438 .unwrap()
9439 .status,
9440 FileStatus::Untracked,
9441 );
9442 assert_eq!(
9443 repository
9444 .status_for_path(&repo_path(F_TXT))
9445 .unwrap()
9446 .status,
9447 FileStatus::Untracked,
9448 );
9449 });
9450
9451 // Modify a file in the working copy.
9452 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9453 tree.flush_fs_events(cx).await;
9454 project
9455 .update(cx, |project, cx| project.git_scans_complete(cx))
9456 .await;
9457 cx.executor().run_until_parked();
9458
9459 // The worktree detects that the file's git status has changed.
9460 repository.read_with(cx, |repository, _| {
9461 assert_eq!(
9462 repository
9463 .status_for_path(&repo_path(A_TXT))
9464 .unwrap()
9465 .status,
9466 StatusCode::Modified.worktree(),
9467 );
9468 });
9469
9470 // Create a commit in the git repository.
9471 git_add(A_TXT, &repo);
9472 git_add(B_TXT, &repo);
9473 git_commit("Committing modified and added", &repo);
9474 tree.flush_fs_events(cx).await;
9475 project
9476 .update(cx, |project, cx| project.git_scans_complete(cx))
9477 .await;
9478 cx.executor().run_until_parked();
9479
9480 // The worktree detects that the files' git status have changed.
9481 repository.read_with(cx, |repository, _cx| {
9482 assert_eq!(
9483 repository
9484 .status_for_path(&repo_path(F_TXT))
9485 .unwrap()
9486 .status,
9487 FileStatus::Untracked,
9488 );
9489 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9490 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9491 });
9492
9493 // Modify files in the working copy and perform git operations on other files.
9494 git_reset(0, &repo);
9495 git_remove_index(Path::new(B_TXT), &repo);
9496 git_stash(&mut repo);
9497 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9498 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9499 tree.flush_fs_events(cx).await;
9500 project
9501 .update(cx, |project, cx| project.git_scans_complete(cx))
9502 .await;
9503 cx.executor().run_until_parked();
9504
9505 // Check that more complex repo changes are tracked
9506 repository.read_with(cx, |repository, _cx| {
9507 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9508 assert_eq!(
9509 repository
9510 .status_for_path(&repo_path(B_TXT))
9511 .unwrap()
9512 .status,
9513 FileStatus::Untracked,
9514 );
9515 assert_eq!(
9516 repository
9517 .status_for_path(&repo_path(E_TXT))
9518 .unwrap()
9519 .status,
9520 StatusCode::Modified.worktree(),
9521 );
9522 });
9523
9524 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9525 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9526 std::fs::write(
9527 work_dir.join(DOTGITIGNORE),
9528 [IGNORE_RULE, "f.txt"].join("\n"),
9529 )
9530 .unwrap();
9531
9532 git_add(Path::new(DOTGITIGNORE), &repo);
9533 git_commit("Committing modified git ignore", &repo);
9534
9535 tree.flush_fs_events(cx).await;
9536 cx.executor().run_until_parked();
9537
9538 let mut renamed_dir_name = "first_directory/second_directory";
9539 const RENAMED_FILE: &str = "rf.txt";
9540
9541 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9542 std::fs::write(
9543 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9544 "new-contents",
9545 )
9546 .unwrap();
9547
9548 tree.flush_fs_events(cx).await;
9549 project
9550 .update(cx, |project, cx| project.git_scans_complete(cx))
9551 .await;
9552 cx.executor().run_until_parked();
9553
9554 repository.read_with(cx, |repository, _cx| {
9555 assert_eq!(
9556 repository
9557 .status_for_path(&RepoPath::from_rel_path(
9558 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9559 ))
9560 .unwrap()
9561 .status,
9562 FileStatus::Untracked,
9563 );
9564 });
9565
9566 renamed_dir_name = "new_first_directory/second_directory";
9567
9568 std::fs::rename(
9569 work_dir.join("first_directory"),
9570 work_dir.join("new_first_directory"),
9571 )
9572 .unwrap();
9573
9574 tree.flush_fs_events(cx).await;
9575 project
9576 .update(cx, |project, cx| project.git_scans_complete(cx))
9577 .await;
9578 cx.executor().run_until_parked();
9579
9580 repository.read_with(cx, |repository, _cx| {
9581 assert_eq!(
9582 repository
9583 .status_for_path(&RepoPath::from_rel_path(
9584 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9585 ))
9586 .unwrap()
9587 .status,
9588 FileStatus::Untracked,
9589 );
9590 });
9591}
9592
9593#[gpui::test]
9594#[ignore]
9595async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9596 init_test(cx);
9597 cx.executor().allow_parking();
9598
9599 const IGNORE_RULE: &str = "**/target";
9600
9601 let root = TempTree::new(json!({
9602 "project": {
9603 "src": {
9604 "main.rs": "fn main() {}"
9605 },
9606 "target": {
9607 "debug": {
9608 "important_text.txt": "important text",
9609 },
9610 },
9611 ".gitignore": IGNORE_RULE
9612 },
9613
9614 }));
9615 let root_path = root.path();
9616
9617 // Set up git repository before creating the worktree.
9618 let work_dir = root.path().join("project");
9619 let repo = git_init(work_dir.as_path());
9620 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9621 git_add("src/main.rs", &repo);
9622 git_add(".gitignore", &repo);
9623 git_commit("Initial commit", &repo);
9624
9625 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9626 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9627 let project_events = Arc::new(Mutex::new(Vec::new()));
9628 project.update(cx, |project, cx| {
9629 let repo_events = repository_updates.clone();
9630 cx.subscribe(project.git_store(), move |_, _, e, _| {
9631 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9632 repo_events.lock().push(e.clone());
9633 }
9634 })
9635 .detach();
9636 let project_events = project_events.clone();
9637 cx.subscribe_self(move |_, e, _| {
9638 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9639 project_events.lock().extend(
9640 updates
9641 .iter()
9642 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9643 .filter(|(path, _)| path != "fs-event-sentinel"),
9644 );
9645 }
9646 })
9647 .detach();
9648 });
9649
9650 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9651 tree.flush_fs_events(cx).await;
9652 tree.update(cx, |tree, cx| {
9653 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9654 })
9655 .await
9656 .unwrap();
9657 tree.update(cx, |tree, _| {
9658 assert_eq!(
9659 tree.entries(true, 0)
9660 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9661 .collect::<Vec<_>>(),
9662 vec![
9663 (rel_path(""), false),
9664 (rel_path("project/"), false),
9665 (rel_path("project/.gitignore"), false),
9666 (rel_path("project/src"), false),
9667 (rel_path("project/src/main.rs"), false),
9668 (rel_path("project/target"), true),
9669 (rel_path("project/target/debug"), true),
9670 (rel_path("project/target/debug/important_text.txt"), true),
9671 ]
9672 );
9673 });
9674
9675 assert_eq!(
9676 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9677 vec![
9678 RepositoryEvent::StatusesChanged,
9679 RepositoryEvent::MergeHeadsChanged,
9680 ],
9681 "Initial worktree scan should produce a repo update event"
9682 );
9683 assert_eq!(
9684 project_events.lock().drain(..).collect::<Vec<_>>(),
9685 vec![
9686 ("project/target".to_string(), PathChange::Loaded),
9687 ("project/target/debug".to_string(), PathChange::Loaded),
9688 (
9689 "project/target/debug/important_text.txt".to_string(),
9690 PathChange::Loaded
9691 ),
9692 ],
9693 "Initial project changes should show that all not-ignored and all opened files are loaded"
9694 );
9695
9696 let deps_dir = work_dir.join("target").join("debug").join("deps");
9697 std::fs::create_dir_all(&deps_dir).unwrap();
9698 tree.flush_fs_events(cx).await;
9699 project
9700 .update(cx, |project, cx| project.git_scans_complete(cx))
9701 .await;
9702 cx.executor().run_until_parked();
9703 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9704 tree.flush_fs_events(cx).await;
9705 project
9706 .update(cx, |project, cx| project.git_scans_complete(cx))
9707 .await;
9708 cx.executor().run_until_parked();
9709 std::fs::remove_dir_all(&deps_dir).unwrap();
9710 tree.flush_fs_events(cx).await;
9711 project
9712 .update(cx, |project, cx| project.git_scans_complete(cx))
9713 .await;
9714 cx.executor().run_until_parked();
9715
9716 tree.update(cx, |tree, _| {
9717 assert_eq!(
9718 tree.entries(true, 0)
9719 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9720 .collect::<Vec<_>>(),
9721 vec![
9722 (rel_path(""), false),
9723 (rel_path("project/"), false),
9724 (rel_path("project/.gitignore"), false),
9725 (rel_path("project/src"), false),
9726 (rel_path("project/src/main.rs"), false),
9727 (rel_path("project/target"), true),
9728 (rel_path("project/target/debug"), true),
9729 (rel_path("project/target/debug/important_text.txt"), true),
9730 ],
9731 "No stray temp files should be left after the flycheck changes"
9732 );
9733 });
9734
9735 assert_eq!(
9736 repository_updates
9737 .lock()
9738 .iter()
9739 .cloned()
9740 .collect::<Vec<_>>(),
9741 Vec::new(),
9742 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9743 );
9744 assert_eq!(
9745 project_events.lock().as_slice(),
9746 vec![
9747 ("project/target/debug/deps".to_string(), PathChange::Added),
9748 ("project/target/debug/deps".to_string(), PathChange::Removed),
9749 ],
9750 "Due to `debug` directory being tracked, it should get updates for entries inside it.
9751 No updates for more nested directories should happen as those are ignored",
9752 );
9753}
9754
9755// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
9756// to different timings/ordering of events.
9757#[ignore]
9758#[gpui::test]
9759async fn test_odd_events_for_ignored_dirs(
9760 executor: BackgroundExecutor,
9761 cx: &mut gpui::TestAppContext,
9762) {
9763 init_test(cx);
9764 let fs = FakeFs::new(executor);
9765 fs.insert_tree(
9766 path!("/root"),
9767 json!({
9768 ".git": {},
9769 ".gitignore": "**/target/",
9770 "src": {
9771 "main.rs": "fn main() {}",
9772 },
9773 "target": {
9774 "debug": {
9775 "foo.txt": "foo",
9776 "deps": {}
9777 }
9778 }
9779 }),
9780 )
9781 .await;
9782 fs.set_head_and_index_for_repo(
9783 path!("/root/.git").as_ref(),
9784 &[
9785 (".gitignore", "**/target/".into()),
9786 ("src/main.rs", "fn main() {}".into()),
9787 ],
9788 );
9789
9790 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9791 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9792 let project_events = Arc::new(Mutex::new(Vec::new()));
9793 project.update(cx, |project, cx| {
9794 let repository_updates = repository_updates.clone();
9795 cx.subscribe(project.git_store(), move |_, _, e, _| {
9796 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9797 repository_updates.lock().push(e.clone());
9798 }
9799 })
9800 .detach();
9801 let project_events = project_events.clone();
9802 cx.subscribe_self(move |_, e, _| {
9803 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9804 project_events.lock().extend(
9805 updates
9806 .iter()
9807 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9808 .filter(|(path, _)| path != "fs-event-sentinel"),
9809 );
9810 }
9811 })
9812 .detach();
9813 });
9814
9815 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9816 tree.update(cx, |tree, cx| {
9817 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9818 })
9819 .await
9820 .unwrap();
9821 tree.flush_fs_events(cx).await;
9822 project
9823 .update(cx, |project, cx| project.git_scans_complete(cx))
9824 .await;
9825 cx.run_until_parked();
9826 tree.update(cx, |tree, _| {
9827 assert_eq!(
9828 tree.entries(true, 0)
9829 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9830 .collect::<Vec<_>>(),
9831 vec![
9832 (rel_path(""), false),
9833 (rel_path(".gitignore"), false),
9834 (rel_path("src"), false),
9835 (rel_path("src/main.rs"), false),
9836 (rel_path("target"), true),
9837 (rel_path("target/debug"), true),
9838 (rel_path("target/debug/deps"), true),
9839 (rel_path("target/debug/foo.txt"), true),
9840 ]
9841 );
9842 });
9843
9844 assert_eq!(
9845 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9846 vec![
9847 RepositoryEvent::MergeHeadsChanged,
9848 RepositoryEvent::BranchChanged,
9849 RepositoryEvent::StatusesChanged,
9850 RepositoryEvent::StatusesChanged,
9851 ],
9852 "Initial worktree scan should produce a repo update event"
9853 );
9854 assert_eq!(
9855 project_events.lock().drain(..).collect::<Vec<_>>(),
9856 vec![
9857 ("target".to_string(), PathChange::Loaded),
9858 ("target/debug".to_string(), PathChange::Loaded),
9859 ("target/debug/deps".to_string(), PathChange::Loaded),
9860 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9861 ],
9862 "All non-ignored entries and all opened firs should be getting a project event",
9863 );
9864
9865 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9866 // This may happen multiple times during a single flycheck, but once is enough for testing.
9867 fs.emit_fs_event("/root/target/debug/deps", None);
9868 tree.flush_fs_events(cx).await;
9869 project
9870 .update(cx, |project, cx| project.git_scans_complete(cx))
9871 .await;
9872 cx.executor().run_until_parked();
9873
9874 assert_eq!(
9875 repository_updates
9876 .lock()
9877 .iter()
9878 .cloned()
9879 .collect::<Vec<_>>(),
9880 Vec::new(),
9881 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9882 );
9883 assert_eq!(
9884 project_events.lock().as_slice(),
9885 Vec::new(),
9886 "No further project events should happen, as only ignored dirs received FS events",
9887 );
9888}
9889
9890#[gpui::test]
9891async fn test_repos_in_invisible_worktrees(
9892 executor: BackgroundExecutor,
9893 cx: &mut gpui::TestAppContext,
9894) {
9895 init_test(cx);
9896 let fs = FakeFs::new(executor);
9897 fs.insert_tree(
9898 path!("/root"),
9899 json!({
9900 "dir1": {
9901 ".git": {},
9902 "dep1": {
9903 ".git": {},
9904 "src": {
9905 "a.txt": "",
9906 },
9907 },
9908 "b.txt": "",
9909 },
9910 }),
9911 )
9912 .await;
9913
9914 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9915 let _visible_worktree =
9916 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9917 project
9918 .update(cx, |project, cx| project.git_scans_complete(cx))
9919 .await;
9920
9921 let repos = project.read_with(cx, |project, cx| {
9922 project
9923 .repositories(cx)
9924 .values()
9925 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9926 .collect::<Vec<_>>()
9927 });
9928 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9929
9930 let (_invisible_worktree, _) = project
9931 .update(cx, |project, cx| {
9932 project.worktree_store.update(cx, |worktree_store, cx| {
9933 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9934 })
9935 })
9936 .await
9937 .expect("failed to create worktree");
9938 project
9939 .update(cx, |project, cx| project.git_scans_complete(cx))
9940 .await;
9941
9942 let repos = project.read_with(cx, |project, cx| {
9943 project
9944 .repositories(cx)
9945 .values()
9946 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9947 .collect::<Vec<_>>()
9948 });
9949 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9950}
9951
9952#[gpui::test(iterations = 10)]
9953async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9954 init_test(cx);
9955 cx.update(|cx| {
9956 cx.update_global::<SettingsStore, _>(|store, cx| {
9957 store.update_user_settings(cx, |settings| {
9958 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9959 });
9960 });
9961 });
9962 let fs = FakeFs::new(cx.background_executor.clone());
9963 fs.insert_tree(
9964 path!("/root"),
9965 json!({
9966 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9967 "tree": {
9968 ".git": {},
9969 ".gitignore": "ignored-dir\n",
9970 "tracked-dir": {
9971 "tracked-file1": "",
9972 "ancestor-ignored-file1": "",
9973 },
9974 "ignored-dir": {
9975 "ignored-file1": ""
9976 }
9977 }
9978 }),
9979 )
9980 .await;
9981 fs.set_head_and_index_for_repo(
9982 path!("/root/tree/.git").as_ref(),
9983 &[
9984 (".gitignore", "ignored-dir\n".into()),
9985 ("tracked-dir/tracked-file1", "".into()),
9986 ],
9987 );
9988
9989 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9990
9991 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9992 tree.flush_fs_events(cx).await;
9993 project
9994 .update(cx, |project, cx| project.git_scans_complete(cx))
9995 .await;
9996 cx.executor().run_until_parked();
9997
9998 let repository = project.read_with(cx, |project, cx| {
9999 project.repositories(cx).values().next().unwrap().clone()
10000 });
10001
10002 tree.read_with(cx, |tree, _| {
10003 tree.as_local()
10004 .unwrap()
10005 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10006 })
10007 .recv()
10008 .await;
10009
10010 cx.read(|cx| {
10011 assert_entry_git_state(
10012 tree.read(cx),
10013 repository.read(cx),
10014 "tracked-dir/tracked-file1",
10015 None,
10016 false,
10017 );
10018 assert_entry_git_state(
10019 tree.read(cx),
10020 repository.read(cx),
10021 "tracked-dir/ancestor-ignored-file1",
10022 None,
10023 false,
10024 );
10025 assert_entry_git_state(
10026 tree.read(cx),
10027 repository.read(cx),
10028 "ignored-dir/ignored-file1",
10029 None,
10030 true,
10031 );
10032 });
10033
10034 fs.create_file(
10035 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10036 Default::default(),
10037 )
10038 .await
10039 .unwrap();
10040 fs.set_index_for_repo(
10041 path!("/root/tree/.git").as_ref(),
10042 &[
10043 (".gitignore", "ignored-dir\n".into()),
10044 ("tracked-dir/tracked-file1", "".into()),
10045 ("tracked-dir/tracked-file2", "".into()),
10046 ],
10047 );
10048 fs.create_file(
10049 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10050 Default::default(),
10051 )
10052 .await
10053 .unwrap();
10054 fs.create_file(
10055 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10056 Default::default(),
10057 )
10058 .await
10059 .unwrap();
10060
10061 cx.executor().run_until_parked();
10062 cx.read(|cx| {
10063 assert_entry_git_state(
10064 tree.read(cx),
10065 repository.read(cx),
10066 "tracked-dir/tracked-file2",
10067 Some(StatusCode::Added),
10068 false,
10069 );
10070 assert_entry_git_state(
10071 tree.read(cx),
10072 repository.read(cx),
10073 "tracked-dir/ancestor-ignored-file2",
10074 None,
10075 false,
10076 );
10077 assert_entry_git_state(
10078 tree.read(cx),
10079 repository.read(cx),
10080 "ignored-dir/ignored-file2",
10081 None,
10082 true,
10083 );
10084 assert!(
10085 tree.read(cx)
10086 .entry_for_path(&rel_path(".git"))
10087 .unwrap()
10088 .is_ignored
10089 );
10090 });
10091}
10092
10093#[gpui::test]
10094async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10095 init_test(cx);
10096
10097 let fs = FakeFs::new(cx.executor());
10098 fs.insert_tree(
10099 path!("/project"),
10100 json!({
10101 ".git": {
10102 "worktrees": {
10103 "some-worktree": {
10104 "commondir": "../..\n",
10105 // For is_git_dir
10106 "HEAD": "",
10107 "config": ""
10108 }
10109 },
10110 "modules": {
10111 "subdir": {
10112 "some-submodule": {
10113 // For is_git_dir
10114 "HEAD": "",
10115 "config": "",
10116 }
10117 }
10118 }
10119 },
10120 "src": {
10121 "a.txt": "A",
10122 },
10123 "some-worktree": {
10124 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10125 "src": {
10126 "b.txt": "B",
10127 }
10128 },
10129 "subdir": {
10130 "some-submodule": {
10131 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10132 "c.txt": "C",
10133 }
10134 }
10135 }),
10136 )
10137 .await;
10138
10139 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10140 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10141 scan_complete.await;
10142
10143 let mut repositories = project.update(cx, |project, cx| {
10144 project
10145 .repositories(cx)
10146 .values()
10147 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10148 .collect::<Vec<_>>()
10149 });
10150 repositories.sort();
10151 pretty_assertions::assert_eq!(
10152 repositories,
10153 [
10154 Path::new(path!("/project")).into(),
10155 Path::new(path!("/project/some-worktree")).into(),
10156 Path::new(path!("/project/subdir/some-submodule")).into(),
10157 ]
10158 );
10159
10160 // Generate a git-related event for the worktree and check that it's refreshed.
10161 fs.with_git_state(
10162 path!("/project/some-worktree/.git").as_ref(),
10163 true,
10164 |state| {
10165 state
10166 .head_contents
10167 .insert(repo_path("src/b.txt"), "b".to_owned());
10168 state
10169 .index_contents
10170 .insert(repo_path("src/b.txt"), "b".to_owned());
10171 },
10172 )
10173 .unwrap();
10174 cx.run_until_parked();
10175
10176 let buffer = project
10177 .update(cx, |project, cx| {
10178 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10179 })
10180 .await
10181 .unwrap();
10182 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10183 let (repo, _) = project
10184 .git_store()
10185 .read(cx)
10186 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10187 .unwrap();
10188 pretty_assertions::assert_eq!(
10189 repo.read(cx).work_directory_abs_path,
10190 Path::new(path!("/project/some-worktree")).into(),
10191 );
10192 let barrier = repo.update(cx, |repo, _| repo.barrier());
10193 (repo.clone(), barrier)
10194 });
10195 barrier.await.unwrap();
10196 worktree_repo.update(cx, |repo, _| {
10197 pretty_assertions::assert_eq!(
10198 repo.status_for_path(&repo_path("src/b.txt"))
10199 .unwrap()
10200 .status,
10201 StatusCode::Modified.worktree(),
10202 );
10203 });
10204
10205 // The same for the submodule.
10206 fs.with_git_state(
10207 path!("/project/subdir/some-submodule/.git").as_ref(),
10208 true,
10209 |state| {
10210 state
10211 .head_contents
10212 .insert(repo_path("c.txt"), "c".to_owned());
10213 state
10214 .index_contents
10215 .insert(repo_path("c.txt"), "c".to_owned());
10216 },
10217 )
10218 .unwrap();
10219 cx.run_until_parked();
10220
10221 let buffer = project
10222 .update(cx, |project, cx| {
10223 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10224 })
10225 .await
10226 .unwrap();
10227 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10228 let (repo, _) = project
10229 .git_store()
10230 .read(cx)
10231 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10232 .unwrap();
10233 pretty_assertions::assert_eq!(
10234 repo.read(cx).work_directory_abs_path,
10235 Path::new(path!("/project/subdir/some-submodule")).into(),
10236 );
10237 let barrier = repo.update(cx, |repo, _| repo.barrier());
10238 (repo.clone(), barrier)
10239 });
10240 barrier.await.unwrap();
10241 submodule_repo.update(cx, |repo, _| {
10242 pretty_assertions::assert_eq!(
10243 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10244 StatusCode::Modified.worktree(),
10245 );
10246 });
10247}
10248
10249#[gpui::test]
10250async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10251 init_test(cx);
10252 let fs = FakeFs::new(cx.background_executor.clone());
10253 fs.insert_tree(
10254 path!("/root"),
10255 json!({
10256 "project": {
10257 ".git": {},
10258 "child1": {
10259 "a.txt": "A",
10260 },
10261 "child2": {
10262 "b.txt": "B",
10263 }
10264 }
10265 }),
10266 )
10267 .await;
10268
10269 let project = Project::test(
10270 fs.clone(),
10271 [
10272 path!("/root/project/child1").as_ref(),
10273 path!("/root/project/child2").as_ref(),
10274 ],
10275 cx,
10276 )
10277 .await;
10278
10279 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10280 tree.flush_fs_events(cx).await;
10281 project
10282 .update(cx, |project, cx| project.git_scans_complete(cx))
10283 .await;
10284 cx.executor().run_until_parked();
10285
10286 let repos = project.read_with(cx, |project, cx| {
10287 project
10288 .repositories(cx)
10289 .values()
10290 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10291 .collect::<Vec<_>>()
10292 });
10293 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10294}
10295
10296#[gpui::test]
10297async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10298 init_test(cx);
10299
10300 let file_1_committed = String::from(r#"file_1_committed"#);
10301 let file_1_staged = String::from(r#"file_1_staged"#);
10302 let file_2_committed = String::from(r#"file_2_committed"#);
10303 let file_2_staged = String::from(r#"file_2_staged"#);
10304 let buffer_contents = String::from(r#"buffer"#);
10305
10306 let fs = FakeFs::new(cx.background_executor.clone());
10307 fs.insert_tree(
10308 path!("/dir"),
10309 json!({
10310 ".git": {},
10311 "src": {
10312 "file_1.rs": file_1_committed.clone(),
10313 "file_2.rs": file_2_committed.clone(),
10314 }
10315 }),
10316 )
10317 .await;
10318
10319 fs.set_head_for_repo(
10320 path!("/dir/.git").as_ref(),
10321 &[
10322 ("src/file_1.rs", file_1_committed.clone()),
10323 ("src/file_2.rs", file_2_committed.clone()),
10324 ],
10325 "deadbeef",
10326 );
10327 fs.set_index_for_repo(
10328 path!("/dir/.git").as_ref(),
10329 &[
10330 ("src/file_1.rs", file_1_staged.clone()),
10331 ("src/file_2.rs", file_2_staged.clone()),
10332 ],
10333 );
10334
10335 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10336
10337 let buffer = project
10338 .update(cx, |project, cx| {
10339 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10340 })
10341 .await
10342 .unwrap();
10343
10344 buffer.update(cx, |buffer, cx| {
10345 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10346 });
10347
10348 let unstaged_diff = project
10349 .update(cx, |project, cx| {
10350 project.open_unstaged_diff(buffer.clone(), cx)
10351 })
10352 .await
10353 .unwrap();
10354
10355 cx.run_until_parked();
10356
10357 unstaged_diff.update(cx, |unstaged_diff, cx| {
10358 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10359 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10360 });
10361
10362 // Save the buffer as `file_2.rs`, which should trigger the
10363 // `BufferChangedFilePath` event.
10364 project
10365 .update(cx, |project, cx| {
10366 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10367 let path = ProjectPath {
10368 worktree_id,
10369 path: rel_path("src/file_2.rs").into(),
10370 };
10371 project.save_buffer_as(buffer.clone(), path, cx)
10372 })
10373 .await
10374 .unwrap();
10375
10376 cx.run_until_parked();
10377
10378 // Verify that the diff bases have been updated to file_2's contents due to
10379 // the `BufferChangedFilePath` event being handled.
10380 unstaged_diff.update(cx, |unstaged_diff, cx| {
10381 let snapshot = buffer.read(cx).snapshot();
10382 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10383 assert_eq!(
10384 base_text, file_2_staged,
10385 "Diff bases should be automatically updated to file_2 staged content"
10386 );
10387
10388 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10389 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10390 });
10391
10392 let uncommitted_diff = project
10393 .update(cx, |project, cx| {
10394 project.open_uncommitted_diff(buffer.clone(), cx)
10395 })
10396 .await
10397 .unwrap();
10398
10399 cx.run_until_parked();
10400
10401 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10402 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10403 assert_eq!(
10404 base_text, file_2_committed,
10405 "Uncommitted diff should compare against file_2 committed content"
10406 );
10407 });
10408}
10409
10410async fn search(
10411 project: &Entity<Project>,
10412 query: SearchQuery,
10413 cx: &mut gpui::TestAppContext,
10414) -> Result<HashMap<String, Vec<Range<usize>>>> {
10415 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10416 let mut results = HashMap::default();
10417 while let Ok(search_result) = search_rx.rx.recv().await {
10418 match search_result {
10419 SearchResult::Buffer { buffer, ranges } => {
10420 results.entry(buffer).or_insert(ranges);
10421 }
10422 SearchResult::LimitReached => {}
10423 }
10424 }
10425 Ok(results
10426 .into_iter()
10427 .map(|(buffer, ranges)| {
10428 buffer.update(cx, |buffer, cx| {
10429 let path = buffer
10430 .file()
10431 .unwrap()
10432 .full_path(cx)
10433 .to_string_lossy()
10434 .to_string();
10435 let ranges = ranges
10436 .into_iter()
10437 .map(|range| range.to_offset(buffer))
10438 .collect::<Vec<_>>();
10439 (path, ranges)
10440 })
10441 })
10442 .collect())
10443}
10444
10445pub fn init_test(cx: &mut gpui::TestAppContext) {
10446 zlog::init_test();
10447
10448 cx.update(|cx| {
10449 let settings_store = SettingsStore::test(cx);
10450 cx.set_global(settings_store);
10451 release_channel::init(semver::Version::new(0, 0, 0), cx);
10452 });
10453}
10454
10455fn json_lang() -> Arc<Language> {
10456 Arc::new(Language::new(
10457 LanguageConfig {
10458 name: "JSON".into(),
10459 matcher: LanguageMatcher {
10460 path_suffixes: vec!["json".to_string()],
10461 ..Default::default()
10462 },
10463 ..Default::default()
10464 },
10465 None,
10466 ))
10467}
10468
10469fn js_lang() -> Arc<Language> {
10470 Arc::new(Language::new(
10471 LanguageConfig {
10472 name: "JavaScript".into(),
10473 matcher: LanguageMatcher {
10474 path_suffixes: vec!["js".to_string()],
10475 ..Default::default()
10476 },
10477 ..Default::default()
10478 },
10479 None,
10480 ))
10481}
10482
10483fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10484 struct PythonMootToolchainLister(Arc<FakeFs>);
10485 #[async_trait]
10486 impl ToolchainLister for PythonMootToolchainLister {
10487 async fn list(
10488 &self,
10489 worktree_root: PathBuf,
10490 subroot_relative_path: Arc<RelPath>,
10491 _: Option<HashMap<String, String>>,
10492 _: &dyn Fs,
10493 ) -> ToolchainList {
10494 // This lister will always return a path .venv directories within ancestors
10495 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10496 let mut toolchains = vec![];
10497 for ancestor in ancestors {
10498 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10499 if self.0.is_dir(&venv_path).await {
10500 toolchains.push(Toolchain {
10501 name: SharedString::new("Python Venv"),
10502 path: venv_path.to_string_lossy().into_owned().into(),
10503 language_name: LanguageName(SharedString::new_static("Python")),
10504 as_json: serde_json::Value::Null,
10505 })
10506 }
10507 }
10508 ToolchainList {
10509 toolchains,
10510 ..Default::default()
10511 }
10512 }
10513 async fn resolve(
10514 &self,
10515 _: PathBuf,
10516 _: Option<HashMap<String, String>>,
10517 _: &dyn Fs,
10518 ) -> anyhow::Result<Toolchain> {
10519 Err(anyhow::anyhow!("Not implemented"))
10520 }
10521 fn meta(&self) -> ToolchainMetadata {
10522 ToolchainMetadata {
10523 term: SharedString::new_static("Virtual Environment"),
10524 new_toolchain_placeholder: SharedString::new_static(
10525 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10526 ),
10527 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10528 }
10529 }
10530 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10531 vec![]
10532 }
10533 }
10534 Arc::new(
10535 Language::new(
10536 LanguageConfig {
10537 name: "Python".into(),
10538 matcher: LanguageMatcher {
10539 path_suffixes: vec!["py".to_string()],
10540 ..Default::default()
10541 },
10542 ..Default::default()
10543 },
10544 None, // We're not testing Python parsing with this language.
10545 )
10546 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10547 "pyproject.toml",
10548 ))))
10549 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10550 )
10551}
10552
10553fn typescript_lang() -> Arc<Language> {
10554 Arc::new(Language::new(
10555 LanguageConfig {
10556 name: "TypeScript".into(),
10557 matcher: LanguageMatcher {
10558 path_suffixes: vec!["ts".to_string()],
10559 ..Default::default()
10560 },
10561 ..Default::default()
10562 },
10563 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10564 ))
10565}
10566
10567fn tsx_lang() -> Arc<Language> {
10568 Arc::new(Language::new(
10569 LanguageConfig {
10570 name: "tsx".into(),
10571 matcher: LanguageMatcher {
10572 path_suffixes: vec!["tsx".to_string()],
10573 ..Default::default()
10574 },
10575 ..Default::default()
10576 },
10577 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10578 ))
10579}
10580
10581fn get_all_tasks(
10582 project: &Entity<Project>,
10583 task_contexts: Arc<TaskContexts>,
10584 cx: &mut App,
10585) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10586 let new_tasks = project.update(cx, |project, cx| {
10587 project.task_store.update(cx, |task_store, cx| {
10588 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10589 this.used_and_current_resolved_tasks(task_contexts, cx)
10590 })
10591 })
10592 });
10593
10594 cx.background_spawn(async move {
10595 let (mut old, new) = new_tasks.await;
10596 old.extend(new);
10597 old
10598 })
10599}
10600
10601#[track_caller]
10602fn assert_entry_git_state(
10603 tree: &Worktree,
10604 repository: &Repository,
10605 path: &str,
10606 index_status: Option<StatusCode>,
10607 is_ignored: bool,
10608) {
10609 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10610 let entry = tree
10611 .entry_for_path(&rel_path(path))
10612 .unwrap_or_else(|| panic!("entry {path} not found"));
10613 let status = repository
10614 .status_for_path(&repo_path(path))
10615 .map(|entry| entry.status);
10616 let expected = index_status.map(|index_status| {
10617 TrackedStatus {
10618 index_status,
10619 worktree_status: StatusCode::Unmodified,
10620 }
10621 .into()
10622 });
10623 assert_eq!(
10624 status, expected,
10625 "expected {path} to have git status: {expected:?}"
10626 );
10627 assert_eq!(
10628 entry.is_ignored, is_ignored,
10629 "expected {path} to have is_ignored: {is_ignored}"
10630 );
10631}
10632
10633#[track_caller]
10634fn git_init(path: &Path) -> git2::Repository {
10635 let mut init_opts = RepositoryInitOptions::new();
10636 init_opts.initial_head("main");
10637 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10638}
10639
10640#[track_caller]
10641fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10642 let path = path.as_ref();
10643 let mut index = repo.index().expect("Failed to get index");
10644 index.add_path(path).expect("Failed to add file");
10645 index.write().expect("Failed to write index");
10646}
10647
10648#[track_caller]
10649fn git_remove_index(path: &Path, repo: &git2::Repository) {
10650 let mut index = repo.index().expect("Failed to get index");
10651 index.remove_path(path).expect("Failed to add file");
10652 index.write().expect("Failed to write index");
10653}
10654
10655#[track_caller]
10656fn git_commit(msg: &'static str, repo: &git2::Repository) {
10657 use git2::Signature;
10658
10659 let signature = Signature::now("test", "test@zed.dev").unwrap();
10660 let oid = repo.index().unwrap().write_tree().unwrap();
10661 let tree = repo.find_tree(oid).unwrap();
10662 if let Ok(head) = repo.head() {
10663 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10664
10665 let parent_commit = parent_obj.as_commit().unwrap();
10666
10667 repo.commit(
10668 Some("HEAD"),
10669 &signature,
10670 &signature,
10671 msg,
10672 &tree,
10673 &[parent_commit],
10674 )
10675 .expect("Failed to commit with parent");
10676 } else {
10677 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10678 .expect("Failed to commit");
10679 }
10680}
10681
10682#[cfg(any())]
10683#[track_caller]
10684fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10685 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10686}
10687
10688#[track_caller]
10689fn git_stash(repo: &mut git2::Repository) {
10690 use git2::Signature;
10691
10692 let signature = Signature::now("test", "test@zed.dev").unwrap();
10693 repo.stash_save(&signature, "N/A", None)
10694 .expect("Failed to stash");
10695}
10696
10697#[track_caller]
10698fn git_reset(offset: usize, repo: &git2::Repository) {
10699 let head = repo.head().expect("Couldn't get repo head");
10700 let object = head.peel(git2::ObjectType::Commit).unwrap();
10701 let commit = object.as_commit().unwrap();
10702 let new_head = commit
10703 .parents()
10704 .inspect(|parnet| {
10705 parnet.message();
10706 })
10707 .nth(offset)
10708 .expect("Not enough history");
10709 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10710 .expect("Could not reset");
10711}
10712
10713#[cfg(any())]
10714#[track_caller]
10715fn git_branch(name: &str, repo: &git2::Repository) {
10716 let head = repo
10717 .head()
10718 .expect("Couldn't get repo head")
10719 .peel_to_commit()
10720 .expect("HEAD is not a commit");
10721 repo.branch(name, &head, false).expect("Failed to commit");
10722}
10723
10724#[cfg(any())]
10725#[track_caller]
10726fn git_checkout(name: &str, repo: &git2::Repository) {
10727 repo.set_head(name).expect("Failed to set head");
10728 repo.checkout_head(None).expect("Failed to check out head");
10729}
10730
10731#[cfg(any())]
10732#[track_caller]
10733fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10734 repo.statuses(None)
10735 .unwrap()
10736 .iter()
10737 .map(|status| (status.path().unwrap().to_string(), status.status()))
10738 .collect()
10739}
10740
10741#[gpui::test]
10742async fn test_find_project_path_abs(
10743 background_executor: BackgroundExecutor,
10744 cx: &mut gpui::TestAppContext,
10745) {
10746 // find_project_path should work with absolute paths
10747 init_test(cx);
10748
10749 let fs = FakeFs::new(background_executor);
10750 fs.insert_tree(
10751 path!("/root"),
10752 json!({
10753 "project1": {
10754 "file1.txt": "content1",
10755 "subdir": {
10756 "file2.txt": "content2"
10757 }
10758 },
10759 "project2": {
10760 "file3.txt": "content3"
10761 }
10762 }),
10763 )
10764 .await;
10765
10766 let project = Project::test(
10767 fs.clone(),
10768 [
10769 path!("/root/project1").as_ref(),
10770 path!("/root/project2").as_ref(),
10771 ],
10772 cx,
10773 )
10774 .await;
10775
10776 // Make sure the worktrees are fully initialized
10777 project
10778 .update(cx, |project, cx| project.git_scans_complete(cx))
10779 .await;
10780 cx.run_until_parked();
10781
10782 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10783 project.read_with(cx, |project, cx| {
10784 let worktrees: Vec<_> = project.worktrees(cx).collect();
10785 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10786 let id1 = worktrees[0].read(cx).id();
10787 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10788 let id2 = worktrees[1].read(cx).id();
10789 (abs_path1, id1, abs_path2, id2)
10790 });
10791
10792 project.update(cx, |project, cx| {
10793 let abs_path = project1_abs_path.join("file1.txt");
10794 let found_path = project.find_project_path(abs_path, cx).unwrap();
10795 assert_eq!(found_path.worktree_id, project1_id);
10796 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10797
10798 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10799 let found_path = project.find_project_path(abs_path, cx).unwrap();
10800 assert_eq!(found_path.worktree_id, project1_id);
10801 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10802
10803 let abs_path = project2_abs_path.join("file3.txt");
10804 let found_path = project.find_project_path(abs_path, cx).unwrap();
10805 assert_eq!(found_path.worktree_id, project2_id);
10806 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10807
10808 let abs_path = project1_abs_path.join("nonexistent.txt");
10809 let found_path = project.find_project_path(abs_path, cx);
10810 assert!(
10811 found_path.is_some(),
10812 "Should find project path for nonexistent file in worktree"
10813 );
10814
10815 // Test with an absolute path outside any worktree
10816 let abs_path = Path::new("/some/other/path");
10817 let found_path = project.find_project_path(abs_path, cx);
10818 assert!(
10819 found_path.is_none(),
10820 "Should not find project path for path outside any worktree"
10821 );
10822 });
10823}
10824
10825#[gpui::test]
10826async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10827 init_test(cx);
10828
10829 let fs = FakeFs::new(cx.executor());
10830 fs.insert_tree(
10831 path!("/root"),
10832 json!({
10833 "a": {
10834 ".git": {},
10835 "src": {
10836 "main.rs": "fn main() {}",
10837 }
10838 },
10839 "b": {
10840 ".git": {},
10841 "src": {
10842 "main.rs": "fn main() {}",
10843 },
10844 "script": {
10845 "run.sh": "#!/bin/bash"
10846 }
10847 }
10848 }),
10849 )
10850 .await;
10851
10852 let project = Project::test(
10853 fs.clone(),
10854 [
10855 path!("/root/a").as_ref(),
10856 path!("/root/b/script").as_ref(),
10857 path!("/root/b").as_ref(),
10858 ],
10859 cx,
10860 )
10861 .await;
10862 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10863 scan_complete.await;
10864
10865 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10866 assert_eq!(worktrees.len(), 3);
10867
10868 let worktree_id_by_abs_path = worktrees
10869 .into_iter()
10870 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10871 .collect::<HashMap<_, _>>();
10872 let worktree_id = worktree_id_by_abs_path
10873 .get(Path::new(path!("/root/b/script")))
10874 .unwrap();
10875
10876 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10877 assert_eq!(repos.len(), 2);
10878
10879 project.update(cx, |project, cx| {
10880 project.remove_worktree(*worktree_id, cx);
10881 });
10882 cx.run_until_parked();
10883
10884 let mut repo_paths = project
10885 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10886 .values()
10887 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10888 .collect::<Vec<_>>();
10889 repo_paths.sort();
10890
10891 pretty_assertions::assert_eq!(
10892 repo_paths,
10893 [
10894 Path::new(path!("/root/a")).into(),
10895 Path::new(path!("/root/b")).into(),
10896 ]
10897 );
10898
10899 let active_repo_path = project
10900 .read_with(cx, |p, cx| {
10901 p.active_repository(cx)
10902 .map(|r| r.read(cx).work_directory_abs_path.clone())
10903 })
10904 .unwrap();
10905 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10906
10907 let worktree_id = worktree_id_by_abs_path
10908 .get(Path::new(path!("/root/a")))
10909 .unwrap();
10910 project.update(cx, |project, cx| {
10911 project.remove_worktree(*worktree_id, cx);
10912 });
10913 cx.run_until_parked();
10914
10915 let active_repo_path = project
10916 .read_with(cx, |p, cx| {
10917 p.active_repository(cx)
10918 .map(|r| r.read(cx).work_directory_abs_path.clone())
10919 })
10920 .unwrap();
10921 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10922
10923 let worktree_id = worktree_id_by_abs_path
10924 .get(Path::new(path!("/root/b")))
10925 .unwrap();
10926 project.update(cx, |project, cx| {
10927 project.remove_worktree(*worktree_id, cx);
10928 });
10929 cx.run_until_parked();
10930
10931 let active_repo_path = project.read_with(cx, |p, cx| {
10932 p.active_repository(cx)
10933 .map(|r| r.read(cx).work_directory_abs_path.clone())
10934 });
10935 assert!(active_repo_path.is_none());
10936}
10937
10938#[gpui::test]
10939async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
10940 use DiffHunkSecondaryStatus::*;
10941 init_test(cx);
10942
10943 let committed_contents = r#"
10944 one
10945 two
10946 three
10947 "#
10948 .unindent();
10949 let file_contents = r#"
10950 one
10951 TWO
10952 three
10953 "#
10954 .unindent();
10955
10956 let fs = FakeFs::new(cx.background_executor.clone());
10957 fs.insert_tree(
10958 path!("/dir"),
10959 json!({
10960 ".git": {},
10961 "file.txt": file_contents.clone()
10962 }),
10963 )
10964 .await;
10965
10966 fs.set_head_and_index_for_repo(
10967 path!("/dir/.git").as_ref(),
10968 &[("file.txt", committed_contents.clone())],
10969 );
10970
10971 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10972
10973 let buffer = project
10974 .update(cx, |project, cx| {
10975 project.open_local_buffer(path!("/dir/file.txt"), cx)
10976 })
10977 .await
10978 .unwrap();
10979 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
10980 let uncommitted_diff = project
10981 .update(cx, |project, cx| {
10982 project.open_uncommitted_diff(buffer.clone(), cx)
10983 })
10984 .await
10985 .unwrap();
10986
10987 // The hunk is initially unstaged.
10988 uncommitted_diff.read_with(cx, |diff, cx| {
10989 assert_hunks(
10990 diff.snapshot(cx).hunks(&snapshot),
10991 &snapshot,
10992 &diff.base_text_string(cx).unwrap(),
10993 &[(
10994 1..2,
10995 "two\n",
10996 "TWO\n",
10997 DiffHunkStatus::modified(HasSecondaryHunk),
10998 )],
10999 );
11000 });
11001
11002 // Get the repository handle.
11003 let repo = project.read_with(cx, |project, cx| {
11004 project.repositories(cx).values().next().unwrap().clone()
11005 });
11006
11007 // Stage the file.
11008 let stage_task = repo.update(cx, |repo, cx| {
11009 repo.stage_entries(vec![repo_path("file.txt")], cx)
11010 });
11011
11012 // Run a few ticks to let the job start and mark hunks as pending,
11013 // but don't run_until_parked which would complete the entire operation.
11014 for _ in 0..10 {
11015 cx.executor().tick();
11016 let [hunk]: [_; 1] = uncommitted_diff
11017 .read_with(cx, |diff, cx| {
11018 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11019 })
11020 .try_into()
11021 .unwrap();
11022 match hunk.secondary_status {
11023 HasSecondaryHunk => {}
11024 SecondaryHunkRemovalPending => break,
11025 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11026 _ => panic!("unexpected hunk state"),
11027 }
11028 }
11029 uncommitted_diff.read_with(cx, |diff, cx| {
11030 assert_hunks(
11031 diff.snapshot(cx).hunks(&snapshot),
11032 &snapshot,
11033 &diff.base_text_string(cx).unwrap(),
11034 &[(
11035 1..2,
11036 "two\n",
11037 "TWO\n",
11038 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11039 )],
11040 );
11041 });
11042
11043 // Let the staging complete.
11044 stage_task.await.unwrap();
11045 cx.run_until_parked();
11046
11047 // The hunk is now fully staged.
11048 uncommitted_diff.read_with(cx, |diff, cx| {
11049 assert_hunks(
11050 diff.snapshot(cx).hunks(&snapshot),
11051 &snapshot,
11052 &diff.base_text_string(cx).unwrap(),
11053 &[(
11054 1..2,
11055 "two\n",
11056 "TWO\n",
11057 DiffHunkStatus::modified(NoSecondaryHunk),
11058 )],
11059 );
11060 });
11061
11062 // Simulate a commit by updating HEAD to match the current file contents.
11063 // The FakeGitRepository's commit method is a no-op, so we need to manually
11064 // update HEAD to simulate the commit completing.
11065 fs.set_head_for_repo(
11066 path!("/dir/.git").as_ref(),
11067 &[("file.txt", file_contents.clone())],
11068 "newhead",
11069 );
11070 cx.run_until_parked();
11071
11072 // After committing, there are no more hunks.
11073 uncommitted_diff.read_with(cx, |diff, cx| {
11074 assert_hunks(
11075 diff.snapshot(cx).hunks(&snapshot),
11076 &snapshot,
11077 &diff.base_text_string(cx).unwrap(),
11078 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11079 );
11080 });
11081}
11082
11083#[gpui::test]
11084async fn test_read_only_files_setting(cx: &mut gpui::TestAppContext) {
11085 init_test(cx);
11086
11087 // Configure read_only_files setting
11088 cx.update(|cx| {
11089 cx.update_global::<SettingsStore, _>(|store, cx| {
11090 store.update_user_settings(cx, |settings| {
11091 settings.project.worktree.read_only_files = Some(vec![
11092 "**/generated/**".to_string(),
11093 "**/*.gen.rs".to_string(),
11094 ]);
11095 });
11096 });
11097 });
11098
11099 let fs = FakeFs::new(cx.background_executor.clone());
11100 fs.insert_tree(
11101 path!("/root"),
11102 json!({
11103 "src": {
11104 "main.rs": "fn main() {}",
11105 "types.gen.rs": "// Generated file",
11106 },
11107 "generated": {
11108 "schema.rs": "// Auto-generated schema",
11109 }
11110 }),
11111 )
11112 .await;
11113
11114 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11115
11116 // Open a regular file - should be read-write
11117 let regular_buffer = project
11118 .update(cx, |project, cx| {
11119 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11120 })
11121 .await
11122 .unwrap();
11123
11124 regular_buffer.read_with(cx, |buffer, _| {
11125 assert!(!buffer.read_only(), "Regular file should not be read-only");
11126 });
11127
11128 // Open a file matching *.gen.rs pattern - should be read-only
11129 let gen_buffer = project
11130 .update(cx, |project, cx| {
11131 project.open_local_buffer(path!("/root/src/types.gen.rs"), cx)
11132 })
11133 .await
11134 .unwrap();
11135
11136 gen_buffer.read_with(cx, |buffer, _| {
11137 assert!(
11138 buffer.read_only(),
11139 "File matching *.gen.rs pattern should be read-only"
11140 );
11141 });
11142
11143 // Open a file in generated directory - should be read-only
11144 let generated_buffer = project
11145 .update(cx, |project, cx| {
11146 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11147 })
11148 .await
11149 .unwrap();
11150
11151 generated_buffer.read_with(cx, |buffer, _| {
11152 assert!(
11153 buffer.read_only(),
11154 "File in generated directory should be read-only"
11155 );
11156 });
11157}
11158
11159#[gpui::test]
11160async fn test_read_only_files_empty_setting(cx: &mut gpui::TestAppContext) {
11161 init_test(cx);
11162
11163 // Explicitly set read_only_files to empty (default behavior)
11164 cx.update(|cx| {
11165 cx.update_global::<SettingsStore, _>(|store, cx| {
11166 store.update_user_settings(cx, |settings| {
11167 settings.project.worktree.read_only_files = Some(vec![]);
11168 });
11169 });
11170 });
11171
11172 let fs = FakeFs::new(cx.background_executor.clone());
11173 fs.insert_tree(
11174 path!("/root"),
11175 json!({
11176 "src": {
11177 "main.rs": "fn main() {}",
11178 },
11179 "generated": {
11180 "schema.rs": "// Auto-generated schema",
11181 }
11182 }),
11183 )
11184 .await;
11185
11186 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11187
11188 // All files should be read-write when read_only_files is empty
11189 let main_buffer = project
11190 .update(cx, |project, cx| {
11191 project.open_local_buffer(path!("/root/src/main.rs"), cx)
11192 })
11193 .await
11194 .unwrap();
11195
11196 main_buffer.read_with(cx, |buffer, _| {
11197 assert!(
11198 !buffer.read_only(),
11199 "Files should not be read-only when read_only_files is empty"
11200 );
11201 });
11202
11203 let generated_buffer = project
11204 .update(cx, |project, cx| {
11205 project.open_local_buffer(path!("/root/generated/schema.rs"), cx)
11206 })
11207 .await
11208 .unwrap();
11209
11210 generated_buffer.read_with(cx, |buffer, _| {
11211 assert!(
11212 !buffer.read_only(),
11213 "Generated files should not be read-only when read_only_files is empty"
11214 );
11215 });
11216}
11217
11218#[gpui::test]
11219async fn test_read_only_files_with_lock_files(cx: &mut gpui::TestAppContext) {
11220 init_test(cx);
11221
11222 // Configure to make lock files read-only
11223 cx.update(|cx| {
11224 cx.update_global::<SettingsStore, _>(|store, cx| {
11225 store.update_user_settings(cx, |settings| {
11226 settings.project.worktree.read_only_files = Some(vec![
11227 "**/*.lock".to_string(),
11228 "**/package-lock.json".to_string(),
11229 ]);
11230 });
11231 });
11232 });
11233
11234 let fs = FakeFs::new(cx.background_executor.clone());
11235 fs.insert_tree(
11236 path!("/root"),
11237 json!({
11238 "Cargo.lock": "# Lock file",
11239 "Cargo.toml": "[package]",
11240 "package-lock.json": "{}",
11241 "package.json": "{}",
11242 }),
11243 )
11244 .await;
11245
11246 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
11247
11248 // Cargo.lock should be read-only
11249 let cargo_lock = project
11250 .update(cx, |project, cx| {
11251 project.open_local_buffer(path!("/root/Cargo.lock"), cx)
11252 })
11253 .await
11254 .unwrap();
11255
11256 cargo_lock.read_with(cx, |buffer, _| {
11257 assert!(buffer.read_only(), "Cargo.lock should be read-only");
11258 });
11259
11260 // Cargo.toml should be read-write
11261 let cargo_toml = project
11262 .update(cx, |project, cx| {
11263 project.open_local_buffer(path!("/root/Cargo.toml"), cx)
11264 })
11265 .await
11266 .unwrap();
11267
11268 cargo_toml.read_with(cx, |buffer, _| {
11269 assert!(!buffer.read_only(), "Cargo.toml should not be read-only");
11270 });
11271
11272 // package-lock.json should be read-only
11273 let package_lock = project
11274 .update(cx, |project, cx| {
11275 project.open_local_buffer(path!("/root/package-lock.json"), cx)
11276 })
11277 .await
11278 .unwrap();
11279
11280 package_lock.read_with(cx, |buffer, _| {
11281 assert!(buffer.read_only(), "package-lock.json should be read-only");
11282 });
11283
11284 // package.json should be read-write
11285 let package_json = project
11286 .update(cx, |project, cx| {
11287 project.open_local_buffer(path!("/root/package.json"), cx)
11288 })
11289 .await
11290 .unwrap();
11291
11292 package_json.read_with(cx, |buffer, _| {
11293 assert!(!buffer.read_only(), "package.json should not be read-only");
11294 });
11295}