1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 rust_lang, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.running_language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.running_language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new_static("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new_static("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.running_language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let settings_json_contents = json!({
1216 "languages": {
1217 "Rust": {
1218 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1219 }
1220 },
1221 "lsp": {
1222 "my_fake_lsp": {
1223 "binary": {
1224 // file exists, so this is treated as a relative path
1225 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1226 }
1227 },
1228 "lsp_on_path": {
1229 "binary": {
1230 // file doesn't exist, so it will fall back on PATH env var
1231 "path": path!("lsp_on_path.exe").to_string(),
1232 }
1233 }
1234 },
1235 });
1236
1237 let fs = FakeFs::new(cx.executor());
1238 fs.insert_tree(
1239 path!("/the-root"),
1240 json!({
1241 ".zed": {
1242 "settings.json": settings_json_contents.to_string(),
1243 },
1244 ".relative_path": {
1245 "to": {
1246 "my_fake_lsp.exe": "",
1247 },
1248 },
1249 "src": {
1250 "main.rs": "",
1251 }
1252 }),
1253 )
1254 .await;
1255
1256 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1257 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1258 language_registry.add(rust_lang());
1259
1260 let mut my_fake_lsp = language_registry.register_fake_lsp(
1261 "Rust",
1262 FakeLspAdapter {
1263 name: "my_fake_lsp",
1264 ..Default::default()
1265 },
1266 );
1267 let mut lsp_on_path = language_registry.register_fake_lsp(
1268 "Rust",
1269 FakeLspAdapter {
1270 name: "lsp_on_path",
1271 ..Default::default()
1272 },
1273 );
1274
1275 cx.run_until_parked();
1276
1277 // Start the language server by opening a buffer with a compatible file extension.
1278 project
1279 .update(cx, |project, cx| {
1280 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284
1285 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1286 assert_eq!(
1287 lsp_path.to_string_lossy(),
1288 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1289 );
1290
1291 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1292 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1293}
1294
1295#[gpui::test]
1296async fn test_language_server_tilde_path(cx: &mut gpui::TestAppContext) {
1297 init_test(cx);
1298
1299 let settings_json_contents = json!({
1300 "languages": {
1301 "Rust": {
1302 "language_servers": ["tilde_lsp"]
1303 }
1304 },
1305 "lsp": {
1306 "tilde_lsp": {
1307 "binary": {
1308 "path": "~/.local/bin/rust-analyzer",
1309 }
1310 }
1311 },
1312 });
1313
1314 let fs = FakeFs::new(cx.executor());
1315 fs.insert_tree(
1316 path!("/root"),
1317 json!({
1318 ".zed": {
1319 "settings.json": settings_json_contents.to_string(),
1320 },
1321 "src": {
1322 "main.rs": "fn main() {}",
1323 }
1324 }),
1325 )
1326 .await;
1327
1328 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
1329 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1330 language_registry.add(rust_lang());
1331
1332 let mut tilde_lsp = language_registry.register_fake_lsp(
1333 "Rust",
1334 FakeLspAdapter {
1335 name: "tilde_lsp",
1336 ..Default::default()
1337 },
1338 );
1339 cx.run_until_parked();
1340
1341 project
1342 .update(cx, |project, cx| {
1343 project.open_local_buffer_with_lsp(path!("/root/src/main.rs"), cx)
1344 })
1345 .await
1346 .unwrap();
1347
1348 let lsp_path = tilde_lsp.next().await.unwrap().binary.path;
1349 let expected_path = paths::home_dir().join(".local/bin/rust-analyzer");
1350 assert_eq!(
1351 lsp_path, expected_path,
1352 "Tilde path should expand to home directory"
1353 );
1354}
1355
1356#[gpui::test]
1357async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1358 init_test(cx);
1359
1360 let fs = FakeFs::new(cx.executor());
1361 fs.insert_tree(
1362 path!("/the-root"),
1363 json!({
1364 ".gitignore": "target\n",
1365 "Cargo.lock": "",
1366 "src": {
1367 "a.rs": "",
1368 "b.rs": "",
1369 },
1370 "target": {
1371 "x": {
1372 "out": {
1373 "x.rs": ""
1374 }
1375 },
1376 "y": {
1377 "out": {
1378 "y.rs": "",
1379 }
1380 },
1381 "z": {
1382 "out": {
1383 "z.rs": ""
1384 }
1385 }
1386 }
1387 }),
1388 )
1389 .await;
1390 fs.insert_tree(
1391 path!("/the-registry"),
1392 json!({
1393 "dep1": {
1394 "src": {
1395 "dep1.rs": "",
1396 }
1397 },
1398 "dep2": {
1399 "src": {
1400 "dep2.rs": "",
1401 }
1402 },
1403 }),
1404 )
1405 .await;
1406 fs.insert_tree(
1407 path!("/the/stdlib"),
1408 json!({
1409 "LICENSE": "",
1410 "src": {
1411 "string.rs": "",
1412 }
1413 }),
1414 )
1415 .await;
1416
1417 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1418 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1419 (project.languages().clone(), project.lsp_store())
1420 });
1421 language_registry.add(rust_lang());
1422 let mut fake_servers = language_registry.register_fake_lsp(
1423 "Rust",
1424 FakeLspAdapter {
1425 name: "the-language-server",
1426 ..Default::default()
1427 },
1428 );
1429
1430 cx.executor().run_until_parked();
1431
1432 // Start the language server by opening a buffer with a compatible file extension.
1433 project
1434 .update(cx, |project, cx| {
1435 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1436 })
1437 .await
1438 .unwrap();
1439
1440 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1441 project.update(cx, |project, cx| {
1442 let worktree = project.worktrees(cx).next().unwrap();
1443 assert_eq!(
1444 worktree
1445 .read(cx)
1446 .snapshot()
1447 .entries(true, 0)
1448 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1449 .collect::<Vec<_>>(),
1450 &[
1451 ("", false),
1452 (".gitignore", false),
1453 ("Cargo.lock", false),
1454 ("src", false),
1455 ("src/a.rs", false),
1456 ("src/b.rs", false),
1457 ("target", true),
1458 ]
1459 );
1460 });
1461
1462 let prev_read_dir_count = fs.read_dir_call_count();
1463
1464 let fake_server = fake_servers.next().await.unwrap();
1465 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1466 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1467 id
1468 });
1469
1470 // Simulate jumping to a definition in a dependency outside of the worktree.
1471 let _out_of_worktree_buffer = project
1472 .update(cx, |project, cx| {
1473 project.open_local_buffer_via_lsp(
1474 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1475 server_id,
1476 cx,
1477 )
1478 })
1479 .await
1480 .unwrap();
1481
1482 // Keep track of the FS events reported to the language server.
1483 let file_changes = Arc::new(Mutex::new(Vec::new()));
1484 fake_server
1485 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1486 registrations: vec![lsp::Registration {
1487 id: Default::default(),
1488 method: "workspace/didChangeWatchedFiles".to_string(),
1489 register_options: serde_json::to_value(
1490 lsp::DidChangeWatchedFilesRegistrationOptions {
1491 watchers: vec![
1492 lsp::FileSystemWatcher {
1493 glob_pattern: lsp::GlobPattern::String(
1494 path!("/the-root/Cargo.toml").to_string(),
1495 ),
1496 kind: None,
1497 },
1498 lsp::FileSystemWatcher {
1499 glob_pattern: lsp::GlobPattern::String(
1500 path!("/the-root/src/*.{rs,c}").to_string(),
1501 ),
1502 kind: None,
1503 },
1504 lsp::FileSystemWatcher {
1505 glob_pattern: lsp::GlobPattern::String(
1506 path!("/the-root/target/y/**/*.rs").to_string(),
1507 ),
1508 kind: None,
1509 },
1510 lsp::FileSystemWatcher {
1511 glob_pattern: lsp::GlobPattern::String(
1512 path!("/the/stdlib/src/**/*.rs").to_string(),
1513 ),
1514 kind: None,
1515 },
1516 lsp::FileSystemWatcher {
1517 glob_pattern: lsp::GlobPattern::String(
1518 path!("**/Cargo.lock").to_string(),
1519 ),
1520 kind: None,
1521 },
1522 ],
1523 },
1524 )
1525 .ok(),
1526 }],
1527 })
1528 .await
1529 .into_response()
1530 .unwrap();
1531 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1532 let file_changes = file_changes.clone();
1533 move |params, _| {
1534 let mut file_changes = file_changes.lock();
1535 file_changes.extend(params.changes);
1536 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1537 }
1538 });
1539
1540 cx.executor().run_until_parked();
1541 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1542 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1543
1544 let mut new_watched_paths = fs.watched_paths();
1545 new_watched_paths.retain(|path| {
1546 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1547 });
1548 assert_eq!(
1549 &new_watched_paths,
1550 &[
1551 Path::new(path!("/the-root")),
1552 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1553 Path::new(path!("/the/stdlib/src"))
1554 ]
1555 );
1556
1557 // Now the language server has asked us to watch an ignored directory path,
1558 // so we recursively load it.
1559 project.update(cx, |project, cx| {
1560 let worktree = project.visible_worktrees(cx).next().unwrap();
1561 assert_eq!(
1562 worktree
1563 .read(cx)
1564 .snapshot()
1565 .entries(true, 0)
1566 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1567 .collect::<Vec<_>>(),
1568 &[
1569 ("", false),
1570 (".gitignore", false),
1571 ("Cargo.lock", false),
1572 ("src", false),
1573 ("src/a.rs", false),
1574 ("src/b.rs", false),
1575 ("target", true),
1576 ("target/x", true),
1577 ("target/y", true),
1578 ("target/y/out", true),
1579 ("target/y/out/y.rs", true),
1580 ("target/z", true),
1581 ]
1582 );
1583 });
1584
1585 // Perform some file system mutations, two of which match the watched patterns,
1586 // and one of which does not.
1587 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1588 .await
1589 .unwrap();
1590 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1591 .await
1592 .unwrap();
1593 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1594 .await
1595 .unwrap();
1596 fs.create_file(
1597 path!("/the-root/target/x/out/x2.rs").as_ref(),
1598 Default::default(),
1599 )
1600 .await
1601 .unwrap();
1602 fs.create_file(
1603 path!("/the-root/target/y/out/y2.rs").as_ref(),
1604 Default::default(),
1605 )
1606 .await
1607 .unwrap();
1608 fs.save(
1609 path!("/the-root/Cargo.lock").as_ref(),
1610 &"".into(),
1611 Default::default(),
1612 )
1613 .await
1614 .unwrap();
1615 fs.save(
1616 path!("/the-stdlib/LICENSE").as_ref(),
1617 &"".into(),
1618 Default::default(),
1619 )
1620 .await
1621 .unwrap();
1622 fs.save(
1623 path!("/the/stdlib/src/string.rs").as_ref(),
1624 &"".into(),
1625 Default::default(),
1626 )
1627 .await
1628 .unwrap();
1629
1630 // The language server receives events for the FS mutations that match its watch patterns.
1631 cx.executor().run_until_parked();
1632 assert_eq!(
1633 &*file_changes.lock(),
1634 &[
1635 lsp::FileEvent {
1636 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1637 typ: lsp::FileChangeType::CHANGED,
1638 },
1639 lsp::FileEvent {
1640 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1641 typ: lsp::FileChangeType::DELETED,
1642 },
1643 lsp::FileEvent {
1644 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1645 typ: lsp::FileChangeType::CREATED,
1646 },
1647 lsp::FileEvent {
1648 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1649 typ: lsp::FileChangeType::CREATED,
1650 },
1651 lsp::FileEvent {
1652 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1653 typ: lsp::FileChangeType::CHANGED,
1654 },
1655 ]
1656 );
1657}
1658
1659#[gpui::test]
1660async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1661 init_test(cx);
1662
1663 let fs = FakeFs::new(cx.executor());
1664 fs.insert_tree(
1665 path!("/dir"),
1666 json!({
1667 "a.rs": "let a = 1;",
1668 "b.rs": "let b = 2;"
1669 }),
1670 )
1671 .await;
1672
1673 let project = Project::test(
1674 fs,
1675 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1676 cx,
1677 )
1678 .await;
1679 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1680
1681 let buffer_a = project
1682 .update(cx, |project, cx| {
1683 project.open_local_buffer(path!("/dir/a.rs"), cx)
1684 })
1685 .await
1686 .unwrap();
1687 let buffer_b = project
1688 .update(cx, |project, cx| {
1689 project.open_local_buffer(path!("/dir/b.rs"), cx)
1690 })
1691 .await
1692 .unwrap();
1693
1694 lsp_store.update(cx, |lsp_store, cx| {
1695 lsp_store
1696 .update_diagnostics(
1697 LanguageServerId(0),
1698 lsp::PublishDiagnosticsParams {
1699 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1700 version: None,
1701 diagnostics: vec![lsp::Diagnostic {
1702 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1703 severity: Some(lsp::DiagnosticSeverity::ERROR),
1704 message: "error 1".to_string(),
1705 ..Default::default()
1706 }],
1707 },
1708 None,
1709 DiagnosticSourceKind::Pushed,
1710 &[],
1711 cx,
1712 )
1713 .unwrap();
1714 lsp_store
1715 .update_diagnostics(
1716 LanguageServerId(0),
1717 lsp::PublishDiagnosticsParams {
1718 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1719 version: None,
1720 diagnostics: vec![lsp::Diagnostic {
1721 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1722 severity: Some(DiagnosticSeverity::WARNING),
1723 message: "error 2".to_string(),
1724 ..Default::default()
1725 }],
1726 },
1727 None,
1728 DiagnosticSourceKind::Pushed,
1729 &[],
1730 cx,
1731 )
1732 .unwrap();
1733 });
1734
1735 buffer_a.update(cx, |buffer, _| {
1736 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1737 assert_eq!(
1738 chunks
1739 .iter()
1740 .map(|(s, d)| (s.as_str(), *d))
1741 .collect::<Vec<_>>(),
1742 &[
1743 ("let ", None),
1744 ("a", Some(DiagnosticSeverity::ERROR)),
1745 (" = 1;", None),
1746 ]
1747 );
1748 });
1749 buffer_b.update(cx, |buffer, _| {
1750 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1751 assert_eq!(
1752 chunks
1753 .iter()
1754 .map(|(s, d)| (s.as_str(), *d))
1755 .collect::<Vec<_>>(),
1756 &[
1757 ("let ", None),
1758 ("b", Some(DiagnosticSeverity::WARNING)),
1759 (" = 2;", None),
1760 ]
1761 );
1762 });
1763}
1764
1765#[gpui::test]
1766async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1767 init_test(cx);
1768
1769 let fs = FakeFs::new(cx.executor());
1770 fs.insert_tree(
1771 path!("/root"),
1772 json!({
1773 "dir": {
1774 ".git": {
1775 "HEAD": "ref: refs/heads/main",
1776 },
1777 ".gitignore": "b.rs",
1778 "a.rs": "let a = 1;",
1779 "b.rs": "let b = 2;",
1780 },
1781 "other.rs": "let b = c;"
1782 }),
1783 )
1784 .await;
1785
1786 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1787 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1788 let (worktree, _) = project
1789 .update(cx, |project, cx| {
1790 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1791 })
1792 .await
1793 .unwrap();
1794 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1795
1796 let (worktree, _) = project
1797 .update(cx, |project, cx| {
1798 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1799 })
1800 .await
1801 .unwrap();
1802 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1803
1804 let server_id = LanguageServerId(0);
1805 lsp_store.update(cx, |lsp_store, cx| {
1806 lsp_store
1807 .update_diagnostics(
1808 server_id,
1809 lsp::PublishDiagnosticsParams {
1810 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1811 version: None,
1812 diagnostics: vec![lsp::Diagnostic {
1813 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1814 severity: Some(lsp::DiagnosticSeverity::ERROR),
1815 message: "unused variable 'b'".to_string(),
1816 ..Default::default()
1817 }],
1818 },
1819 None,
1820 DiagnosticSourceKind::Pushed,
1821 &[],
1822 cx,
1823 )
1824 .unwrap();
1825 lsp_store
1826 .update_diagnostics(
1827 server_id,
1828 lsp::PublishDiagnosticsParams {
1829 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1830 version: None,
1831 diagnostics: vec![lsp::Diagnostic {
1832 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1833 severity: Some(lsp::DiagnosticSeverity::ERROR),
1834 message: "unknown variable 'c'".to_string(),
1835 ..Default::default()
1836 }],
1837 },
1838 None,
1839 DiagnosticSourceKind::Pushed,
1840 &[],
1841 cx,
1842 )
1843 .unwrap();
1844 });
1845
1846 let main_ignored_buffer = project
1847 .update(cx, |project, cx| {
1848 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1849 })
1850 .await
1851 .unwrap();
1852 main_ignored_buffer.update(cx, |buffer, _| {
1853 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1854 assert_eq!(
1855 chunks
1856 .iter()
1857 .map(|(s, d)| (s.as_str(), *d))
1858 .collect::<Vec<_>>(),
1859 &[
1860 ("let ", None),
1861 ("b", Some(DiagnosticSeverity::ERROR)),
1862 (" = 2;", None),
1863 ],
1864 "Gigitnored buffers should still get in-buffer diagnostics",
1865 );
1866 });
1867 let other_buffer = project
1868 .update(cx, |project, cx| {
1869 project.open_buffer((other_worktree_id, rel_path("")), cx)
1870 })
1871 .await
1872 .unwrap();
1873 other_buffer.update(cx, |buffer, _| {
1874 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1875 assert_eq!(
1876 chunks
1877 .iter()
1878 .map(|(s, d)| (s.as_str(), *d))
1879 .collect::<Vec<_>>(),
1880 &[
1881 ("let b = ", None),
1882 ("c", Some(DiagnosticSeverity::ERROR)),
1883 (";", None),
1884 ],
1885 "Buffers from hidden projects should still get in-buffer diagnostics"
1886 );
1887 });
1888
1889 project.update(cx, |project, cx| {
1890 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1891 assert_eq!(
1892 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1893 vec![(
1894 ProjectPath {
1895 worktree_id: main_worktree_id,
1896 path: rel_path("b.rs").into(),
1897 },
1898 server_id,
1899 DiagnosticSummary {
1900 error_count: 1,
1901 warning_count: 0,
1902 }
1903 )]
1904 );
1905 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1906 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1907 });
1908}
1909
1910#[gpui::test]
1911async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1912 init_test(cx);
1913
1914 let progress_token = "the-progress-token";
1915
1916 let fs = FakeFs::new(cx.executor());
1917 fs.insert_tree(
1918 path!("/dir"),
1919 json!({
1920 "a.rs": "fn a() { A }",
1921 "b.rs": "const y: i32 = 1",
1922 }),
1923 )
1924 .await;
1925
1926 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1927 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1928
1929 language_registry.add(rust_lang());
1930 let mut fake_servers = language_registry.register_fake_lsp(
1931 "Rust",
1932 FakeLspAdapter {
1933 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1934 disk_based_diagnostics_sources: vec!["disk".into()],
1935 ..Default::default()
1936 },
1937 );
1938
1939 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1940
1941 // Cause worktree to start the fake language server
1942 let _ = project
1943 .update(cx, |project, cx| {
1944 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1945 })
1946 .await
1947 .unwrap();
1948
1949 let mut events = cx.events(&project);
1950
1951 let fake_server = fake_servers.next().await.unwrap();
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(0),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 ),
1959 );
1960
1961 fake_server
1962 .start_progress(format!("{}/0", progress_token))
1963 .await;
1964 assert_eq!(
1965 events.next().await.unwrap(),
1966 Event::DiskBasedDiagnosticsStarted {
1967 language_server_id: LanguageServerId(0),
1968 }
1969 );
1970
1971 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1972 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1973 version: None,
1974 diagnostics: vec![lsp::Diagnostic {
1975 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1976 severity: Some(lsp::DiagnosticSeverity::ERROR),
1977 message: "undefined variable 'A'".to_string(),
1978 ..Default::default()
1979 }],
1980 });
1981 assert_eq!(
1982 events.next().await.unwrap(),
1983 Event::DiagnosticsUpdated {
1984 language_server_id: LanguageServerId(0),
1985 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1986 }
1987 );
1988
1989 fake_server.end_progress(format!("{}/0", progress_token));
1990 assert_eq!(
1991 events.next().await.unwrap(),
1992 Event::DiskBasedDiagnosticsFinished {
1993 language_server_id: LanguageServerId(0)
1994 }
1995 );
1996
1997 let buffer = project
1998 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1999 .await
2000 .unwrap();
2001
2002 buffer.update(cx, |buffer, _| {
2003 let snapshot = buffer.snapshot();
2004 let diagnostics = snapshot
2005 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2006 .collect::<Vec<_>>();
2007 assert_eq!(
2008 diagnostics,
2009 &[DiagnosticEntryRef {
2010 range: Point::new(0, 9)..Point::new(0, 10),
2011 diagnostic: &Diagnostic {
2012 severity: lsp::DiagnosticSeverity::ERROR,
2013 message: "undefined variable 'A'".to_string(),
2014 group_id: 0,
2015 is_primary: true,
2016 source_kind: DiagnosticSourceKind::Pushed,
2017 ..Diagnostic::default()
2018 }
2019 }]
2020 )
2021 });
2022
2023 // Ensure publishing empty diagnostics twice only results in one update event.
2024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2025 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2026 version: None,
2027 diagnostics: Default::default(),
2028 });
2029 assert_eq!(
2030 events.next().await.unwrap(),
2031 Event::DiagnosticsUpdated {
2032 language_server_id: LanguageServerId(0),
2033 paths: vec![(worktree_id, rel_path("a.rs")).into()],
2034 }
2035 );
2036
2037 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2038 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2039 version: None,
2040 diagnostics: Default::default(),
2041 });
2042 cx.executor().run_until_parked();
2043 assert_eq!(futures::poll!(events.next()), Poll::Pending);
2044}
2045
2046#[gpui::test]
2047async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
2048 init_test(cx);
2049
2050 let progress_token = "the-progress-token";
2051
2052 let fs = FakeFs::new(cx.executor());
2053 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2054
2055 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2056
2057 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2058 language_registry.add(rust_lang());
2059 let mut fake_servers = language_registry.register_fake_lsp(
2060 "Rust",
2061 FakeLspAdapter {
2062 name: "the-language-server",
2063 disk_based_diagnostics_sources: vec!["disk".into()],
2064 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2065 ..FakeLspAdapter::default()
2066 },
2067 );
2068
2069 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2070
2071 let (buffer, _handle) = project
2072 .update(cx, |project, cx| {
2073 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2074 })
2075 .await
2076 .unwrap();
2077 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2078 // Simulate diagnostics starting to update.
2079 let fake_server = fake_servers.next().await.unwrap();
2080 fake_server.start_progress(progress_token).await;
2081
2082 // Restart the server before the diagnostics finish updating.
2083 project.update(cx, |project, cx| {
2084 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2085 });
2086 let mut events = cx.events(&project);
2087
2088 // Simulate the newly started server sending more diagnostics.
2089 let fake_server = fake_servers.next().await.unwrap();
2090 assert_eq!(
2091 events.next().await.unwrap(),
2092 Event::LanguageServerRemoved(LanguageServerId(0))
2093 );
2094 assert_eq!(
2095 events.next().await.unwrap(),
2096 Event::LanguageServerAdded(
2097 LanguageServerId(1),
2098 fake_server.server.name(),
2099 Some(worktree_id)
2100 )
2101 );
2102 fake_server.start_progress(progress_token).await;
2103 assert_eq!(
2104 events.next().await.unwrap(),
2105 Event::LanguageServerBufferRegistered {
2106 server_id: LanguageServerId(1),
2107 buffer_id,
2108 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2109 name: Some(fake_server.server.name())
2110 }
2111 );
2112 assert_eq!(
2113 events.next().await.unwrap(),
2114 Event::DiskBasedDiagnosticsStarted {
2115 language_server_id: LanguageServerId(1)
2116 }
2117 );
2118 project.update(cx, |project, cx| {
2119 assert_eq!(
2120 project
2121 .language_servers_running_disk_based_diagnostics(cx)
2122 .collect::<Vec<_>>(),
2123 [LanguageServerId(1)]
2124 );
2125 });
2126
2127 // All diagnostics are considered done, despite the old server's diagnostic
2128 // task never completing.
2129 fake_server.end_progress(progress_token);
2130 assert_eq!(
2131 events.next().await.unwrap(),
2132 Event::DiskBasedDiagnosticsFinished {
2133 language_server_id: LanguageServerId(1)
2134 }
2135 );
2136 project.update(cx, |project, cx| {
2137 assert_eq!(
2138 project
2139 .language_servers_running_disk_based_diagnostics(cx)
2140 .collect::<Vec<_>>(),
2141 [] as [language::LanguageServerId; 0]
2142 );
2143 });
2144}
2145
2146#[gpui::test]
2147async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2148 init_test(cx);
2149
2150 let fs = FakeFs::new(cx.executor());
2151 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2152
2153 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2154
2155 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2156 language_registry.add(rust_lang());
2157 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2158
2159 let (buffer, _) = project
2160 .update(cx, |project, cx| {
2161 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2162 })
2163 .await
2164 .unwrap();
2165
2166 // Publish diagnostics
2167 let fake_server = fake_servers.next().await.unwrap();
2168 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2169 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2170 version: None,
2171 diagnostics: vec![lsp::Diagnostic {
2172 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2173 severity: Some(lsp::DiagnosticSeverity::ERROR),
2174 message: "the message".to_string(),
2175 ..Default::default()
2176 }],
2177 });
2178
2179 cx.executor().run_until_parked();
2180 buffer.update(cx, |buffer, _| {
2181 assert_eq!(
2182 buffer
2183 .snapshot()
2184 .diagnostics_in_range::<_, usize>(0..1, false)
2185 .map(|entry| entry.diagnostic.message.clone())
2186 .collect::<Vec<_>>(),
2187 ["the message".to_string()]
2188 );
2189 });
2190 project.update(cx, |project, cx| {
2191 assert_eq!(
2192 project.diagnostic_summary(false, cx),
2193 DiagnosticSummary {
2194 error_count: 1,
2195 warning_count: 0,
2196 }
2197 );
2198 });
2199
2200 project.update(cx, |project, cx| {
2201 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2202 });
2203
2204 // The diagnostics are cleared.
2205 cx.executor().run_until_parked();
2206 buffer.update(cx, |buffer, _| {
2207 assert_eq!(
2208 buffer
2209 .snapshot()
2210 .diagnostics_in_range::<_, usize>(0..1, false)
2211 .map(|entry| entry.diagnostic.message.clone())
2212 .collect::<Vec<_>>(),
2213 Vec::<String>::new(),
2214 );
2215 });
2216 project.update(cx, |project, cx| {
2217 assert_eq!(
2218 project.diagnostic_summary(false, cx),
2219 DiagnosticSummary {
2220 error_count: 0,
2221 warning_count: 0,
2222 }
2223 );
2224 });
2225}
2226
2227#[gpui::test]
2228async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2229 init_test(cx);
2230
2231 let fs = FakeFs::new(cx.executor());
2232 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2233
2234 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2235 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2236
2237 language_registry.add(rust_lang());
2238 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2239
2240 let (buffer, _handle) = project
2241 .update(cx, |project, cx| {
2242 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2243 })
2244 .await
2245 .unwrap();
2246
2247 // Before restarting the server, report diagnostics with an unknown buffer version.
2248 let fake_server = fake_servers.next().await.unwrap();
2249 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2250 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2251 version: Some(10000),
2252 diagnostics: Vec::new(),
2253 });
2254 cx.executor().run_until_parked();
2255 project.update(cx, |project, cx| {
2256 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2257 });
2258
2259 let mut fake_server = fake_servers.next().await.unwrap();
2260 let notification = fake_server
2261 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2262 .await
2263 .text_document;
2264 assert_eq!(notification.version, 0);
2265}
2266
2267#[gpui::test]
2268async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2269 init_test(cx);
2270
2271 let progress_token = "the-progress-token";
2272
2273 let fs = FakeFs::new(cx.executor());
2274 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2275
2276 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2277
2278 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2279 language_registry.add(rust_lang());
2280 let mut fake_servers = language_registry.register_fake_lsp(
2281 "Rust",
2282 FakeLspAdapter {
2283 name: "the-language-server",
2284 disk_based_diagnostics_sources: vec!["disk".into()],
2285 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2286 ..Default::default()
2287 },
2288 );
2289
2290 let (buffer, _handle) = project
2291 .update(cx, |project, cx| {
2292 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2293 })
2294 .await
2295 .unwrap();
2296
2297 // Simulate diagnostics starting to update.
2298 let mut fake_server = fake_servers.next().await.unwrap();
2299 fake_server
2300 .start_progress_with(
2301 "another-token",
2302 lsp::WorkDoneProgressBegin {
2303 cancellable: Some(false),
2304 ..Default::default()
2305 },
2306 )
2307 .await;
2308 fake_server
2309 .start_progress_with(
2310 progress_token,
2311 lsp::WorkDoneProgressBegin {
2312 cancellable: Some(true),
2313 ..Default::default()
2314 },
2315 )
2316 .await;
2317 cx.executor().run_until_parked();
2318
2319 project.update(cx, |project, cx| {
2320 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2321 });
2322
2323 let cancel_notification = fake_server
2324 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2325 .await;
2326 assert_eq!(
2327 cancel_notification.token,
2328 NumberOrString::String(progress_token.into())
2329 );
2330}
2331
2332#[gpui::test]
2333async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2334 init_test(cx);
2335
2336 let fs = FakeFs::new(cx.executor());
2337 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2338 .await;
2339
2340 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2341 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2342
2343 let mut fake_rust_servers = language_registry.register_fake_lsp(
2344 "Rust",
2345 FakeLspAdapter {
2346 name: "rust-lsp",
2347 ..Default::default()
2348 },
2349 );
2350 let mut fake_js_servers = language_registry.register_fake_lsp(
2351 "JavaScript",
2352 FakeLspAdapter {
2353 name: "js-lsp",
2354 ..Default::default()
2355 },
2356 );
2357 language_registry.add(rust_lang());
2358 language_registry.add(js_lang());
2359
2360 let _rs_buffer = project
2361 .update(cx, |project, cx| {
2362 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2363 })
2364 .await
2365 .unwrap();
2366 let _js_buffer = project
2367 .update(cx, |project, cx| {
2368 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2369 })
2370 .await
2371 .unwrap();
2372
2373 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2374 assert_eq!(
2375 fake_rust_server_1
2376 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2377 .await
2378 .text_document
2379 .uri
2380 .as_str(),
2381 uri!("file:///dir/a.rs")
2382 );
2383
2384 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2385 assert_eq!(
2386 fake_js_server
2387 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2388 .await
2389 .text_document
2390 .uri
2391 .as_str(),
2392 uri!("file:///dir/b.js")
2393 );
2394
2395 // Disable Rust language server, ensuring only that server gets stopped.
2396 cx.update(|cx| {
2397 SettingsStore::update_global(cx, |settings, cx| {
2398 settings.update_user_settings(cx, |settings| {
2399 settings.languages_mut().insert(
2400 "Rust".into(),
2401 LanguageSettingsContent {
2402 enable_language_server: Some(false),
2403 ..Default::default()
2404 },
2405 );
2406 });
2407 })
2408 });
2409 fake_rust_server_1
2410 .receive_notification::<lsp::notification::Exit>()
2411 .await;
2412
2413 // Enable Rust and disable JavaScript language servers, ensuring that the
2414 // former gets started again and that the latter stops.
2415 cx.update(|cx| {
2416 SettingsStore::update_global(cx, |settings, cx| {
2417 settings.update_user_settings(cx, |settings| {
2418 settings.languages_mut().insert(
2419 "Rust".into(),
2420 LanguageSettingsContent {
2421 enable_language_server: Some(true),
2422 ..Default::default()
2423 },
2424 );
2425 settings.languages_mut().insert(
2426 "JavaScript".into(),
2427 LanguageSettingsContent {
2428 enable_language_server: Some(false),
2429 ..Default::default()
2430 },
2431 );
2432 });
2433 })
2434 });
2435 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2436 assert_eq!(
2437 fake_rust_server_2
2438 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2439 .await
2440 .text_document
2441 .uri
2442 .as_str(),
2443 uri!("file:///dir/a.rs")
2444 );
2445 fake_js_server
2446 .receive_notification::<lsp::notification::Exit>()
2447 .await;
2448}
2449
2450#[gpui::test(iterations = 3)]
2451async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2452 init_test(cx);
2453
2454 let text = "
2455 fn a() { A }
2456 fn b() { BB }
2457 fn c() { CCC }
2458 "
2459 .unindent();
2460
2461 let fs = FakeFs::new(cx.executor());
2462 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2463
2464 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2465 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2466
2467 language_registry.add(rust_lang());
2468 let mut fake_servers = language_registry.register_fake_lsp(
2469 "Rust",
2470 FakeLspAdapter {
2471 disk_based_diagnostics_sources: vec!["disk".into()],
2472 ..Default::default()
2473 },
2474 );
2475
2476 let buffer = project
2477 .update(cx, |project, cx| {
2478 project.open_local_buffer(path!("/dir/a.rs"), cx)
2479 })
2480 .await
2481 .unwrap();
2482
2483 let _handle = project.update(cx, |project, cx| {
2484 project.register_buffer_with_language_servers(&buffer, cx)
2485 });
2486
2487 let mut fake_server = fake_servers.next().await.unwrap();
2488 let open_notification = fake_server
2489 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2490 .await;
2491
2492 // Edit the buffer, moving the content down
2493 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2494 let change_notification_1 = fake_server
2495 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2496 .await;
2497 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2498
2499 // Report some diagnostics for the initial version of the buffer
2500 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2501 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2502 version: Some(open_notification.text_document.version),
2503 diagnostics: vec![
2504 lsp::Diagnostic {
2505 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2506 severity: Some(DiagnosticSeverity::ERROR),
2507 message: "undefined variable 'A'".to_string(),
2508 source: Some("disk".to_string()),
2509 ..Default::default()
2510 },
2511 lsp::Diagnostic {
2512 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2513 severity: Some(DiagnosticSeverity::ERROR),
2514 message: "undefined variable 'BB'".to_string(),
2515 source: Some("disk".to_string()),
2516 ..Default::default()
2517 },
2518 lsp::Diagnostic {
2519 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2520 severity: Some(DiagnosticSeverity::ERROR),
2521 source: Some("disk".to_string()),
2522 message: "undefined variable 'CCC'".to_string(),
2523 ..Default::default()
2524 },
2525 ],
2526 });
2527
2528 // The diagnostics have moved down since they were created.
2529 cx.executor().run_until_parked();
2530 buffer.update(cx, |buffer, _| {
2531 assert_eq!(
2532 buffer
2533 .snapshot()
2534 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2535 .collect::<Vec<_>>(),
2536 &[
2537 DiagnosticEntry {
2538 range: Point::new(3, 9)..Point::new(3, 11),
2539 diagnostic: Diagnostic {
2540 source: Some("disk".into()),
2541 severity: DiagnosticSeverity::ERROR,
2542 message: "undefined variable 'BB'".to_string(),
2543 is_disk_based: true,
2544 group_id: 1,
2545 is_primary: true,
2546 source_kind: DiagnosticSourceKind::Pushed,
2547 ..Diagnostic::default()
2548 },
2549 },
2550 DiagnosticEntry {
2551 range: Point::new(4, 9)..Point::new(4, 12),
2552 diagnostic: Diagnostic {
2553 source: Some("disk".into()),
2554 severity: DiagnosticSeverity::ERROR,
2555 message: "undefined variable 'CCC'".to_string(),
2556 is_disk_based: true,
2557 group_id: 2,
2558 is_primary: true,
2559 source_kind: DiagnosticSourceKind::Pushed,
2560 ..Diagnostic::default()
2561 }
2562 }
2563 ]
2564 );
2565 assert_eq!(
2566 chunks_with_diagnostics(buffer, 0..buffer.len()),
2567 [
2568 ("\n\nfn a() { ".to_string(), None),
2569 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2570 (" }\nfn b() { ".to_string(), None),
2571 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2572 (" }\nfn c() { ".to_string(), None),
2573 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2574 (" }\n".to_string(), None),
2575 ]
2576 );
2577 assert_eq!(
2578 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2579 [
2580 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2581 (" }\nfn c() { ".to_string(), None),
2582 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2583 ]
2584 );
2585 });
2586
2587 // Ensure overlapping diagnostics are highlighted correctly.
2588 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2589 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2590 version: Some(open_notification.text_document.version),
2591 diagnostics: vec![
2592 lsp::Diagnostic {
2593 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2594 severity: Some(DiagnosticSeverity::ERROR),
2595 message: "undefined variable 'A'".to_string(),
2596 source: Some("disk".to_string()),
2597 ..Default::default()
2598 },
2599 lsp::Diagnostic {
2600 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2601 severity: Some(DiagnosticSeverity::WARNING),
2602 message: "unreachable statement".to_string(),
2603 source: Some("disk".to_string()),
2604 ..Default::default()
2605 },
2606 ],
2607 });
2608
2609 cx.executor().run_until_parked();
2610 buffer.update(cx, |buffer, _| {
2611 assert_eq!(
2612 buffer
2613 .snapshot()
2614 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2615 .collect::<Vec<_>>(),
2616 &[
2617 DiagnosticEntry {
2618 range: Point::new(2, 9)..Point::new(2, 12),
2619 diagnostic: Diagnostic {
2620 source: Some("disk".into()),
2621 severity: DiagnosticSeverity::WARNING,
2622 message: "unreachable statement".to_string(),
2623 is_disk_based: true,
2624 group_id: 4,
2625 is_primary: true,
2626 source_kind: DiagnosticSourceKind::Pushed,
2627 ..Diagnostic::default()
2628 }
2629 },
2630 DiagnosticEntry {
2631 range: Point::new(2, 9)..Point::new(2, 10),
2632 diagnostic: Diagnostic {
2633 source: Some("disk".into()),
2634 severity: DiagnosticSeverity::ERROR,
2635 message: "undefined variable 'A'".to_string(),
2636 is_disk_based: true,
2637 group_id: 3,
2638 is_primary: true,
2639 source_kind: DiagnosticSourceKind::Pushed,
2640 ..Diagnostic::default()
2641 },
2642 }
2643 ]
2644 );
2645 assert_eq!(
2646 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2647 [
2648 ("fn a() { ".to_string(), None),
2649 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2650 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2651 ("\n".to_string(), None),
2652 ]
2653 );
2654 assert_eq!(
2655 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2656 [
2657 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2658 ("\n".to_string(), None),
2659 ]
2660 );
2661 });
2662
2663 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2664 // changes since the last save.
2665 buffer.update(cx, |buffer, cx| {
2666 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2667 buffer.edit(
2668 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2669 None,
2670 cx,
2671 );
2672 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2673 });
2674 let change_notification_2 = fake_server
2675 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2676 .await;
2677 assert!(
2678 change_notification_2.text_document.version > change_notification_1.text_document.version
2679 );
2680
2681 // Handle out-of-order diagnostics
2682 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2683 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2684 version: Some(change_notification_2.text_document.version),
2685 diagnostics: vec![
2686 lsp::Diagnostic {
2687 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2688 severity: Some(DiagnosticSeverity::ERROR),
2689 message: "undefined variable 'BB'".to_string(),
2690 source: Some("disk".to_string()),
2691 ..Default::default()
2692 },
2693 lsp::Diagnostic {
2694 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2695 severity: Some(DiagnosticSeverity::WARNING),
2696 message: "undefined variable 'A'".to_string(),
2697 source: Some("disk".to_string()),
2698 ..Default::default()
2699 },
2700 ],
2701 });
2702
2703 cx.executor().run_until_parked();
2704 buffer.update(cx, |buffer, _| {
2705 assert_eq!(
2706 buffer
2707 .snapshot()
2708 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2709 .collect::<Vec<_>>(),
2710 &[
2711 DiagnosticEntry {
2712 range: Point::new(2, 21)..Point::new(2, 22),
2713 diagnostic: Diagnostic {
2714 source: Some("disk".into()),
2715 severity: DiagnosticSeverity::WARNING,
2716 message: "undefined variable 'A'".to_string(),
2717 is_disk_based: true,
2718 group_id: 6,
2719 is_primary: true,
2720 source_kind: DiagnosticSourceKind::Pushed,
2721 ..Diagnostic::default()
2722 }
2723 },
2724 DiagnosticEntry {
2725 range: Point::new(3, 9)..Point::new(3, 14),
2726 diagnostic: Diagnostic {
2727 source: Some("disk".into()),
2728 severity: DiagnosticSeverity::ERROR,
2729 message: "undefined variable 'BB'".to_string(),
2730 is_disk_based: true,
2731 group_id: 5,
2732 is_primary: true,
2733 source_kind: DiagnosticSourceKind::Pushed,
2734 ..Diagnostic::default()
2735 },
2736 }
2737 ]
2738 );
2739 });
2740}
2741
2742#[gpui::test]
2743async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2744 init_test(cx);
2745
2746 let text = concat!(
2747 "let one = ;\n", //
2748 "let two = \n",
2749 "let three = 3;\n",
2750 );
2751
2752 let fs = FakeFs::new(cx.executor());
2753 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2754
2755 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2756 let buffer = project
2757 .update(cx, |project, cx| {
2758 project.open_local_buffer(path!("/dir/a.rs"), cx)
2759 })
2760 .await
2761 .unwrap();
2762
2763 project.update(cx, |project, cx| {
2764 project.lsp_store.update(cx, |lsp_store, cx| {
2765 lsp_store
2766 .update_diagnostic_entries(
2767 LanguageServerId(0),
2768 PathBuf::from(path!("/dir/a.rs")),
2769 None,
2770 None,
2771 vec![
2772 DiagnosticEntry {
2773 range: Unclipped(PointUtf16::new(0, 10))
2774 ..Unclipped(PointUtf16::new(0, 10)),
2775 diagnostic: Diagnostic {
2776 severity: DiagnosticSeverity::ERROR,
2777 message: "syntax error 1".to_string(),
2778 source_kind: DiagnosticSourceKind::Pushed,
2779 ..Diagnostic::default()
2780 },
2781 },
2782 DiagnosticEntry {
2783 range: Unclipped(PointUtf16::new(1, 10))
2784 ..Unclipped(PointUtf16::new(1, 10)),
2785 diagnostic: Diagnostic {
2786 severity: DiagnosticSeverity::ERROR,
2787 message: "syntax error 2".to_string(),
2788 source_kind: DiagnosticSourceKind::Pushed,
2789 ..Diagnostic::default()
2790 },
2791 },
2792 ],
2793 cx,
2794 )
2795 .unwrap();
2796 })
2797 });
2798
2799 // An empty range is extended forward to include the following character.
2800 // At the end of a line, an empty range is extended backward to include
2801 // the preceding character.
2802 buffer.update(cx, |buffer, _| {
2803 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2804 assert_eq!(
2805 chunks
2806 .iter()
2807 .map(|(s, d)| (s.as_str(), *d))
2808 .collect::<Vec<_>>(),
2809 &[
2810 ("let one = ", None),
2811 (";", Some(DiagnosticSeverity::ERROR)),
2812 ("\nlet two =", None),
2813 (" ", Some(DiagnosticSeverity::ERROR)),
2814 ("\nlet three = 3;\n", None)
2815 ]
2816 );
2817 });
2818}
2819
2820#[gpui::test]
2821async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2822 init_test(cx);
2823
2824 let fs = FakeFs::new(cx.executor());
2825 fs.insert_tree(path!("/dir"), json!({ "a.rs": "one two three" }))
2826 .await;
2827
2828 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
2829 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2830
2831 lsp_store.update(cx, |lsp_store, cx| {
2832 lsp_store
2833 .update_diagnostic_entries(
2834 LanguageServerId(0),
2835 Path::new(path!("/dir/a.rs")).to_owned(),
2836 None,
2837 None,
2838 vec![DiagnosticEntry {
2839 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2840 diagnostic: Diagnostic {
2841 severity: DiagnosticSeverity::ERROR,
2842 is_primary: true,
2843 message: "syntax error a1".to_string(),
2844 source_kind: DiagnosticSourceKind::Pushed,
2845 ..Diagnostic::default()
2846 },
2847 }],
2848 cx,
2849 )
2850 .unwrap();
2851 lsp_store
2852 .update_diagnostic_entries(
2853 LanguageServerId(1),
2854 Path::new(path!("/dir/a.rs")).to_owned(),
2855 None,
2856 None,
2857 vec![DiagnosticEntry {
2858 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2859 diagnostic: Diagnostic {
2860 severity: DiagnosticSeverity::ERROR,
2861 is_primary: true,
2862 message: "syntax error b1".to_string(),
2863 source_kind: DiagnosticSourceKind::Pushed,
2864 ..Diagnostic::default()
2865 },
2866 }],
2867 cx,
2868 )
2869 .unwrap();
2870
2871 assert_eq!(
2872 lsp_store.diagnostic_summary(false, cx),
2873 DiagnosticSummary {
2874 error_count: 2,
2875 warning_count: 0,
2876 }
2877 );
2878 });
2879}
2880
2881#[gpui::test]
2882async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2883 init_test(cx);
2884
2885 let text = "
2886 fn a() {
2887 f1();
2888 }
2889 fn b() {
2890 f2();
2891 }
2892 fn c() {
2893 f3();
2894 }
2895 "
2896 .unindent();
2897
2898 let fs = FakeFs::new(cx.executor());
2899 fs.insert_tree(
2900 path!("/dir"),
2901 json!({
2902 "a.rs": text.clone(),
2903 }),
2904 )
2905 .await;
2906
2907 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2908 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2909
2910 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2911 language_registry.add(rust_lang());
2912 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2913
2914 let (buffer, _handle) = project
2915 .update(cx, |project, cx| {
2916 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2917 })
2918 .await
2919 .unwrap();
2920
2921 let mut fake_server = fake_servers.next().await.unwrap();
2922 let lsp_document_version = fake_server
2923 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2924 .await
2925 .text_document
2926 .version;
2927
2928 // Simulate editing the buffer after the language server computes some edits.
2929 buffer.update(cx, |buffer, cx| {
2930 buffer.edit(
2931 [(
2932 Point::new(0, 0)..Point::new(0, 0),
2933 "// above first function\n",
2934 )],
2935 None,
2936 cx,
2937 );
2938 buffer.edit(
2939 [(
2940 Point::new(2, 0)..Point::new(2, 0),
2941 " // inside first function\n",
2942 )],
2943 None,
2944 cx,
2945 );
2946 buffer.edit(
2947 [(
2948 Point::new(6, 4)..Point::new(6, 4),
2949 "// inside second function ",
2950 )],
2951 None,
2952 cx,
2953 );
2954
2955 assert_eq!(
2956 buffer.text(),
2957 "
2958 // above first function
2959 fn a() {
2960 // inside first function
2961 f1();
2962 }
2963 fn b() {
2964 // inside second function f2();
2965 }
2966 fn c() {
2967 f3();
2968 }
2969 "
2970 .unindent()
2971 );
2972 });
2973
2974 let edits = lsp_store
2975 .update(cx, |lsp_store, cx| {
2976 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2977 &buffer,
2978 vec![
2979 // replace body of first function
2980 lsp::TextEdit {
2981 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2982 new_text: "
2983 fn a() {
2984 f10();
2985 }
2986 "
2987 .unindent(),
2988 },
2989 // edit inside second function
2990 lsp::TextEdit {
2991 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2992 new_text: "00".into(),
2993 },
2994 // edit inside third function via two distinct edits
2995 lsp::TextEdit {
2996 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2997 new_text: "4000".into(),
2998 },
2999 lsp::TextEdit {
3000 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
3001 new_text: "".into(),
3002 },
3003 ],
3004 LanguageServerId(0),
3005 Some(lsp_document_version),
3006 cx,
3007 )
3008 })
3009 .await
3010 .unwrap();
3011
3012 buffer.update(cx, |buffer, cx| {
3013 for (range, new_text) in edits {
3014 buffer.edit([(range, new_text)], None, cx);
3015 }
3016 assert_eq!(
3017 buffer.text(),
3018 "
3019 // above first function
3020 fn a() {
3021 // inside first function
3022 f10();
3023 }
3024 fn b() {
3025 // inside second function f200();
3026 }
3027 fn c() {
3028 f4000();
3029 }
3030 "
3031 .unindent()
3032 );
3033 });
3034}
3035
3036#[gpui::test]
3037async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
3038 init_test(cx);
3039
3040 let text = "
3041 use a::b;
3042 use a::c;
3043
3044 fn f() {
3045 b();
3046 c();
3047 }
3048 "
3049 .unindent();
3050
3051 let fs = FakeFs::new(cx.executor());
3052 fs.insert_tree(
3053 path!("/dir"),
3054 json!({
3055 "a.rs": text.clone(),
3056 }),
3057 )
3058 .await;
3059
3060 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3061 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3062 let buffer = project
3063 .update(cx, |project, cx| {
3064 project.open_local_buffer(path!("/dir/a.rs"), cx)
3065 })
3066 .await
3067 .unwrap();
3068
3069 // Simulate the language server sending us a small edit in the form of a very large diff.
3070 // Rust-analyzer does this when performing a merge-imports code action.
3071 let edits = lsp_store
3072 .update(cx, |lsp_store, cx| {
3073 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3074 &buffer,
3075 [
3076 // Replace the first use statement without editing the semicolon.
3077 lsp::TextEdit {
3078 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3079 new_text: "a::{b, c}".into(),
3080 },
3081 // Reinsert the remainder of the file between the semicolon and the final
3082 // newline of the file.
3083 lsp::TextEdit {
3084 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3085 new_text: "\n\n".into(),
3086 },
3087 lsp::TextEdit {
3088 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3089 new_text: "
3090 fn f() {
3091 b();
3092 c();
3093 }"
3094 .unindent(),
3095 },
3096 // Delete everything after the first newline of the file.
3097 lsp::TextEdit {
3098 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3099 new_text: "".into(),
3100 },
3101 ],
3102 LanguageServerId(0),
3103 None,
3104 cx,
3105 )
3106 })
3107 .await
3108 .unwrap();
3109
3110 buffer.update(cx, |buffer, cx| {
3111 let edits = edits
3112 .into_iter()
3113 .map(|(range, text)| {
3114 (
3115 range.start.to_point(buffer)..range.end.to_point(buffer),
3116 text,
3117 )
3118 })
3119 .collect::<Vec<_>>();
3120
3121 assert_eq!(
3122 edits,
3123 [
3124 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3125 (Point::new(1, 0)..Point::new(2, 0), "".into())
3126 ]
3127 );
3128
3129 for (range, new_text) in edits {
3130 buffer.edit([(range, new_text)], None, cx);
3131 }
3132 assert_eq!(
3133 buffer.text(),
3134 "
3135 use a::{b, c};
3136
3137 fn f() {
3138 b();
3139 c();
3140 }
3141 "
3142 .unindent()
3143 );
3144 });
3145}
3146
3147#[gpui::test]
3148async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3149 cx: &mut gpui::TestAppContext,
3150) {
3151 init_test(cx);
3152
3153 let text = "Path()";
3154
3155 let fs = FakeFs::new(cx.executor());
3156 fs.insert_tree(
3157 path!("/dir"),
3158 json!({
3159 "a.rs": text
3160 }),
3161 )
3162 .await;
3163
3164 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3165 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3166 let buffer = project
3167 .update(cx, |project, cx| {
3168 project.open_local_buffer(path!("/dir/a.rs"), cx)
3169 })
3170 .await
3171 .unwrap();
3172
3173 // Simulate the language server sending us a pair of edits at the same location,
3174 // with an insertion following a replacement (which violates the LSP spec).
3175 let edits = lsp_store
3176 .update(cx, |lsp_store, cx| {
3177 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3178 &buffer,
3179 [
3180 lsp::TextEdit {
3181 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3182 new_text: "Path".into(),
3183 },
3184 lsp::TextEdit {
3185 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3186 new_text: "from path import Path\n\n\n".into(),
3187 },
3188 ],
3189 LanguageServerId(0),
3190 None,
3191 cx,
3192 )
3193 })
3194 .await
3195 .unwrap();
3196
3197 buffer.update(cx, |buffer, cx| {
3198 buffer.edit(edits, None, cx);
3199 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3200 });
3201}
3202
3203#[gpui::test]
3204async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3205 init_test(cx);
3206
3207 let text = "
3208 use a::b;
3209 use a::c;
3210
3211 fn f() {
3212 b();
3213 c();
3214 }
3215 "
3216 .unindent();
3217
3218 let fs = FakeFs::new(cx.executor());
3219 fs.insert_tree(
3220 path!("/dir"),
3221 json!({
3222 "a.rs": text.clone(),
3223 }),
3224 )
3225 .await;
3226
3227 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3228 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3229 let buffer = project
3230 .update(cx, |project, cx| {
3231 project.open_local_buffer(path!("/dir/a.rs"), cx)
3232 })
3233 .await
3234 .unwrap();
3235
3236 // Simulate the language server sending us edits in a non-ordered fashion,
3237 // with ranges sometimes being inverted or pointing to invalid locations.
3238 let edits = lsp_store
3239 .update(cx, |lsp_store, cx| {
3240 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3241 &buffer,
3242 [
3243 lsp::TextEdit {
3244 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3245 new_text: "\n\n".into(),
3246 },
3247 lsp::TextEdit {
3248 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3249 new_text: "a::{b, c}".into(),
3250 },
3251 lsp::TextEdit {
3252 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3253 new_text: "".into(),
3254 },
3255 lsp::TextEdit {
3256 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3257 new_text: "
3258 fn f() {
3259 b();
3260 c();
3261 }"
3262 .unindent(),
3263 },
3264 ],
3265 LanguageServerId(0),
3266 None,
3267 cx,
3268 )
3269 })
3270 .await
3271 .unwrap();
3272
3273 buffer.update(cx, |buffer, cx| {
3274 let edits = edits
3275 .into_iter()
3276 .map(|(range, text)| {
3277 (
3278 range.start.to_point(buffer)..range.end.to_point(buffer),
3279 text,
3280 )
3281 })
3282 .collect::<Vec<_>>();
3283
3284 assert_eq!(
3285 edits,
3286 [
3287 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3288 (Point::new(1, 0)..Point::new(2, 0), "".into())
3289 ]
3290 );
3291
3292 for (range, new_text) in edits {
3293 buffer.edit([(range, new_text)], None, cx);
3294 }
3295 assert_eq!(
3296 buffer.text(),
3297 "
3298 use a::{b, c};
3299
3300 fn f() {
3301 b();
3302 c();
3303 }
3304 "
3305 .unindent()
3306 );
3307 });
3308}
3309
3310fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3311 buffer: &Buffer,
3312 range: Range<T>,
3313) -> Vec<(String, Option<DiagnosticSeverity>)> {
3314 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3315 for chunk in buffer.snapshot().chunks(range, true) {
3316 if chunks
3317 .last()
3318 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3319 {
3320 chunks.last_mut().unwrap().0.push_str(chunk.text);
3321 } else {
3322 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3323 }
3324 }
3325 chunks
3326}
3327
3328#[gpui::test(iterations = 10)]
3329async fn test_definition(cx: &mut gpui::TestAppContext) {
3330 init_test(cx);
3331
3332 let fs = FakeFs::new(cx.executor());
3333 fs.insert_tree(
3334 path!("/dir"),
3335 json!({
3336 "a.rs": "const fn a() { A }",
3337 "b.rs": "const y: i32 = crate::a()",
3338 }),
3339 )
3340 .await;
3341
3342 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3343
3344 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3345 language_registry.add(rust_lang());
3346 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3347
3348 let (buffer, _handle) = project
3349 .update(cx, |project, cx| {
3350 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3351 })
3352 .await
3353 .unwrap();
3354
3355 let fake_server = fake_servers.next().await.unwrap();
3356 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3357 let params = params.text_document_position_params;
3358 assert_eq!(
3359 params.text_document.uri.to_file_path().unwrap(),
3360 Path::new(path!("/dir/b.rs")),
3361 );
3362 assert_eq!(params.position, lsp::Position::new(0, 22));
3363
3364 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3365 lsp::Location::new(
3366 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3367 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3368 ),
3369 )))
3370 });
3371 let mut definitions = project
3372 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3373 .await
3374 .unwrap()
3375 .unwrap();
3376
3377 // Assert no new language server started
3378 cx.executor().run_until_parked();
3379 assert!(fake_servers.try_next().is_err());
3380
3381 assert_eq!(definitions.len(), 1);
3382 let definition = definitions.pop().unwrap();
3383 cx.update(|cx| {
3384 let target_buffer = definition.target.buffer.read(cx);
3385 assert_eq!(
3386 target_buffer
3387 .file()
3388 .unwrap()
3389 .as_local()
3390 .unwrap()
3391 .abs_path(cx),
3392 Path::new(path!("/dir/a.rs")),
3393 );
3394 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3395 assert_eq!(
3396 list_worktrees(&project, cx),
3397 [
3398 (path!("/dir/a.rs").as_ref(), false),
3399 (path!("/dir/b.rs").as_ref(), true)
3400 ],
3401 );
3402
3403 drop(definition);
3404 });
3405 cx.update(|cx| {
3406 assert_eq!(
3407 list_worktrees(&project, cx),
3408 [(path!("/dir/b.rs").as_ref(), true)]
3409 );
3410 });
3411
3412 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3413 project
3414 .read(cx)
3415 .worktrees(cx)
3416 .map(|worktree| {
3417 let worktree = worktree.read(cx);
3418 (
3419 worktree.as_local().unwrap().abs_path().as_ref(),
3420 worktree.is_visible(),
3421 )
3422 })
3423 .collect::<Vec<_>>()
3424 }
3425}
3426
3427#[gpui::test]
3428async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3429 init_test(cx);
3430
3431 let fs = FakeFs::new(cx.executor());
3432 fs.insert_tree(
3433 path!("/dir"),
3434 json!({
3435 "a.ts": "",
3436 }),
3437 )
3438 .await;
3439
3440 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3441
3442 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3443 language_registry.add(typescript_lang());
3444 let mut fake_language_servers = language_registry.register_fake_lsp(
3445 "TypeScript",
3446 FakeLspAdapter {
3447 capabilities: lsp::ServerCapabilities {
3448 completion_provider: Some(lsp::CompletionOptions {
3449 trigger_characters: Some(vec![".".to_string()]),
3450 ..Default::default()
3451 }),
3452 ..Default::default()
3453 },
3454 ..Default::default()
3455 },
3456 );
3457
3458 let (buffer, _handle) = project
3459 .update(cx, |p, cx| {
3460 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3461 })
3462 .await
3463 .unwrap();
3464
3465 let fake_server = fake_language_servers.next().await.unwrap();
3466
3467 // When text_edit exists, it takes precedence over insert_text and label
3468 let text = "let a = obj.fqn";
3469 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3470 let completions = project.update(cx, |project, cx| {
3471 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3472 });
3473
3474 fake_server
3475 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3476 Ok(Some(lsp::CompletionResponse::Array(vec![
3477 lsp::CompletionItem {
3478 label: "labelText".into(),
3479 insert_text: Some("insertText".into()),
3480 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3481 range: lsp::Range::new(
3482 lsp::Position::new(0, text.len() as u32 - 3),
3483 lsp::Position::new(0, text.len() as u32),
3484 ),
3485 new_text: "textEditText".into(),
3486 })),
3487 ..Default::default()
3488 },
3489 ])))
3490 })
3491 .next()
3492 .await;
3493
3494 let completions = completions
3495 .await
3496 .unwrap()
3497 .into_iter()
3498 .flat_map(|response| response.completions)
3499 .collect::<Vec<_>>();
3500 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3501
3502 assert_eq!(completions.len(), 1);
3503 assert_eq!(completions[0].new_text, "textEditText");
3504 assert_eq!(
3505 completions[0].replace_range.to_offset(&snapshot),
3506 text.len() - 3..text.len()
3507 );
3508}
3509
3510#[gpui::test]
3511async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3512 init_test(cx);
3513
3514 let fs = FakeFs::new(cx.executor());
3515 fs.insert_tree(
3516 path!("/dir"),
3517 json!({
3518 "a.ts": "",
3519 }),
3520 )
3521 .await;
3522
3523 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3524
3525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3526 language_registry.add(typescript_lang());
3527 let mut fake_language_servers = language_registry.register_fake_lsp(
3528 "TypeScript",
3529 FakeLspAdapter {
3530 capabilities: lsp::ServerCapabilities {
3531 completion_provider: Some(lsp::CompletionOptions {
3532 trigger_characters: Some(vec![".".to_string()]),
3533 ..Default::default()
3534 }),
3535 ..Default::default()
3536 },
3537 ..Default::default()
3538 },
3539 );
3540
3541 let (buffer, _handle) = project
3542 .update(cx, |p, cx| {
3543 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3544 })
3545 .await
3546 .unwrap();
3547
3548 let fake_server = fake_language_servers.next().await.unwrap();
3549 let text = "let a = obj.fqn";
3550
3551 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3552 {
3553 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3554 let completions = project.update(cx, |project, cx| {
3555 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3556 });
3557
3558 fake_server
3559 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3560 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3561 is_incomplete: false,
3562 item_defaults: Some(lsp::CompletionListItemDefaults {
3563 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3564 lsp::Range::new(
3565 lsp::Position::new(0, text.len() as u32 - 3),
3566 lsp::Position::new(0, text.len() as u32),
3567 ),
3568 )),
3569 ..Default::default()
3570 }),
3571 items: vec![lsp::CompletionItem {
3572 label: "labelText".into(),
3573 text_edit_text: Some("textEditText".into()),
3574 text_edit: None,
3575 ..Default::default()
3576 }],
3577 })))
3578 })
3579 .next()
3580 .await;
3581
3582 let completions = completions
3583 .await
3584 .unwrap()
3585 .into_iter()
3586 .flat_map(|response| response.completions)
3587 .collect::<Vec<_>>();
3588 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3589
3590 assert_eq!(completions.len(), 1);
3591 assert_eq!(completions[0].new_text, "textEditText");
3592 assert_eq!(
3593 completions[0].replace_range.to_offset(&snapshot),
3594 text.len() - 3..text.len()
3595 );
3596 }
3597
3598 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3599 {
3600 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3601 let completions = project.update(cx, |project, cx| {
3602 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3603 });
3604
3605 fake_server
3606 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3607 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3608 is_incomplete: false,
3609 item_defaults: Some(lsp::CompletionListItemDefaults {
3610 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3611 lsp::Range::new(
3612 lsp::Position::new(0, text.len() as u32 - 3),
3613 lsp::Position::new(0, text.len() as u32),
3614 ),
3615 )),
3616 ..Default::default()
3617 }),
3618 items: vec![lsp::CompletionItem {
3619 label: "labelText".into(),
3620 text_edit_text: None,
3621 insert_text: Some("irrelevant".into()),
3622 text_edit: None,
3623 ..Default::default()
3624 }],
3625 })))
3626 })
3627 .next()
3628 .await;
3629
3630 let completions = completions
3631 .await
3632 .unwrap()
3633 .into_iter()
3634 .flat_map(|response| response.completions)
3635 .collect::<Vec<_>>();
3636 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3637
3638 assert_eq!(completions.len(), 1);
3639 assert_eq!(completions[0].new_text, "labelText");
3640 assert_eq!(
3641 completions[0].replace_range.to_offset(&snapshot),
3642 text.len() - 3..text.len()
3643 );
3644 }
3645}
3646
3647#[gpui::test]
3648async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3649 init_test(cx);
3650
3651 let fs = FakeFs::new(cx.executor());
3652 fs.insert_tree(
3653 path!("/dir"),
3654 json!({
3655 "a.ts": "",
3656 }),
3657 )
3658 .await;
3659
3660 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3661
3662 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3663 language_registry.add(typescript_lang());
3664 let mut fake_language_servers = language_registry.register_fake_lsp(
3665 "TypeScript",
3666 FakeLspAdapter {
3667 capabilities: lsp::ServerCapabilities {
3668 completion_provider: Some(lsp::CompletionOptions {
3669 trigger_characters: Some(vec![":".to_string()]),
3670 ..Default::default()
3671 }),
3672 ..Default::default()
3673 },
3674 ..Default::default()
3675 },
3676 );
3677
3678 let (buffer, _handle) = project
3679 .update(cx, |p, cx| {
3680 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3681 })
3682 .await
3683 .unwrap();
3684
3685 let fake_server = fake_language_servers.next().await.unwrap();
3686
3687 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3688 let text = "let a = b.fqn";
3689 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3690 let completions = project.update(cx, |project, cx| {
3691 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3692 });
3693
3694 fake_server
3695 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3696 Ok(Some(lsp::CompletionResponse::Array(vec![
3697 lsp::CompletionItem {
3698 label: "fullyQualifiedName?".into(),
3699 insert_text: Some("fullyQualifiedName".into()),
3700 ..Default::default()
3701 },
3702 ])))
3703 })
3704 .next()
3705 .await;
3706 let completions = completions
3707 .await
3708 .unwrap()
3709 .into_iter()
3710 .flat_map(|response| response.completions)
3711 .collect::<Vec<_>>();
3712 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3713 assert_eq!(completions.len(), 1);
3714 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3715 assert_eq!(
3716 completions[0].replace_range.to_offset(&snapshot),
3717 text.len() - 3..text.len()
3718 );
3719
3720 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3721 let text = "let a = \"atoms/cmp\"";
3722 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3723 let completions = project.update(cx, |project, cx| {
3724 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3725 });
3726
3727 fake_server
3728 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3729 Ok(Some(lsp::CompletionResponse::Array(vec![
3730 lsp::CompletionItem {
3731 label: "component".into(),
3732 ..Default::default()
3733 },
3734 ])))
3735 })
3736 .next()
3737 .await;
3738 let completions = completions
3739 .await
3740 .unwrap()
3741 .into_iter()
3742 .flat_map(|response| response.completions)
3743 .collect::<Vec<_>>();
3744 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3745 assert_eq!(completions.len(), 1);
3746 assert_eq!(completions[0].new_text, "component");
3747 assert_eq!(
3748 completions[0].replace_range.to_offset(&snapshot),
3749 text.len() - 4..text.len() - 1
3750 );
3751}
3752
3753#[gpui::test]
3754async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3755 init_test(cx);
3756
3757 let fs = FakeFs::new(cx.executor());
3758 fs.insert_tree(
3759 path!("/dir"),
3760 json!({
3761 "a.ts": "",
3762 }),
3763 )
3764 .await;
3765
3766 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3767
3768 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3769 language_registry.add(typescript_lang());
3770 let mut fake_language_servers = language_registry.register_fake_lsp(
3771 "TypeScript",
3772 FakeLspAdapter {
3773 capabilities: lsp::ServerCapabilities {
3774 completion_provider: Some(lsp::CompletionOptions {
3775 trigger_characters: Some(vec![":".to_string()]),
3776 ..Default::default()
3777 }),
3778 ..Default::default()
3779 },
3780 ..Default::default()
3781 },
3782 );
3783
3784 let (buffer, _handle) = project
3785 .update(cx, |p, cx| {
3786 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3787 })
3788 .await
3789 .unwrap();
3790
3791 let fake_server = fake_language_servers.next().await.unwrap();
3792
3793 let text = "let a = b.fqn";
3794 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3795 let completions = project.update(cx, |project, cx| {
3796 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3797 });
3798
3799 fake_server
3800 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3801 Ok(Some(lsp::CompletionResponse::Array(vec![
3802 lsp::CompletionItem {
3803 label: "fullyQualifiedName?".into(),
3804 insert_text: Some("fully\rQualified\r\nName".into()),
3805 ..Default::default()
3806 },
3807 ])))
3808 })
3809 .next()
3810 .await;
3811 let completions = completions
3812 .await
3813 .unwrap()
3814 .into_iter()
3815 .flat_map(|response| response.completions)
3816 .collect::<Vec<_>>();
3817 assert_eq!(completions.len(), 1);
3818 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3819}
3820
3821#[gpui::test(iterations = 10)]
3822async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3823 init_test(cx);
3824
3825 let fs = FakeFs::new(cx.executor());
3826 fs.insert_tree(
3827 path!("/dir"),
3828 json!({
3829 "a.ts": "a",
3830 }),
3831 )
3832 .await;
3833
3834 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3835
3836 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3837 language_registry.add(typescript_lang());
3838 let mut fake_language_servers = language_registry.register_fake_lsp(
3839 "TypeScript",
3840 FakeLspAdapter {
3841 capabilities: lsp::ServerCapabilities {
3842 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3843 lsp::CodeActionOptions {
3844 resolve_provider: Some(true),
3845 ..lsp::CodeActionOptions::default()
3846 },
3847 )),
3848 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3849 commands: vec!["_the/command".to_string()],
3850 ..lsp::ExecuteCommandOptions::default()
3851 }),
3852 ..lsp::ServerCapabilities::default()
3853 },
3854 ..FakeLspAdapter::default()
3855 },
3856 );
3857
3858 let (buffer, _handle) = project
3859 .update(cx, |p, cx| {
3860 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3861 })
3862 .await
3863 .unwrap();
3864
3865 let fake_server = fake_language_servers.next().await.unwrap();
3866
3867 // Language server returns code actions that contain commands, and not edits.
3868 let actions = project.update(cx, |project, cx| {
3869 project.code_actions(&buffer, 0..0, None, cx)
3870 });
3871 fake_server
3872 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3873 Ok(Some(vec![
3874 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3875 title: "The code action".into(),
3876 data: Some(serde_json::json!({
3877 "command": "_the/command",
3878 })),
3879 ..lsp::CodeAction::default()
3880 }),
3881 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3882 title: "two".into(),
3883 ..lsp::CodeAction::default()
3884 }),
3885 ]))
3886 })
3887 .next()
3888 .await;
3889
3890 let action = actions.await.unwrap().unwrap()[0].clone();
3891 let apply = project.update(cx, |project, cx| {
3892 project.apply_code_action(buffer.clone(), action, true, cx)
3893 });
3894
3895 // Resolving the code action does not populate its edits. In absence of
3896 // edits, we must execute the given command.
3897 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3898 |mut action, _| async move {
3899 if action.data.is_some() {
3900 action.command = Some(lsp::Command {
3901 title: "The command".into(),
3902 command: "_the/command".into(),
3903 arguments: Some(vec![json!("the-argument")]),
3904 });
3905 }
3906 Ok(action)
3907 },
3908 );
3909
3910 // While executing the command, the language server sends the editor
3911 // a `workspaceEdit` request.
3912 fake_server
3913 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3914 let fake = fake_server.clone();
3915 move |params, _| {
3916 assert_eq!(params.command, "_the/command");
3917 let fake = fake.clone();
3918 async move {
3919 fake.server
3920 .request::<lsp::request::ApplyWorkspaceEdit>(
3921 lsp::ApplyWorkspaceEditParams {
3922 label: None,
3923 edit: lsp::WorkspaceEdit {
3924 changes: Some(
3925 [(
3926 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3927 vec![lsp::TextEdit {
3928 range: lsp::Range::new(
3929 lsp::Position::new(0, 0),
3930 lsp::Position::new(0, 0),
3931 ),
3932 new_text: "X".into(),
3933 }],
3934 )]
3935 .into_iter()
3936 .collect(),
3937 ),
3938 ..Default::default()
3939 },
3940 },
3941 )
3942 .await
3943 .into_response()
3944 .unwrap();
3945 Ok(Some(json!(null)))
3946 }
3947 }
3948 })
3949 .next()
3950 .await;
3951
3952 // Applying the code action returns a project transaction containing the edits
3953 // sent by the language server in its `workspaceEdit` request.
3954 let transaction = apply.await.unwrap();
3955 assert!(transaction.0.contains_key(&buffer));
3956 buffer.update(cx, |buffer, cx| {
3957 assert_eq!(buffer.text(), "Xa");
3958 buffer.undo(cx);
3959 assert_eq!(buffer.text(), "a");
3960 });
3961}
3962
3963#[gpui::test]
3964async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3965 init_test(cx);
3966 let fs = FakeFs::new(cx.background_executor.clone());
3967 let expected_contents = "content";
3968 fs.as_fake()
3969 .insert_tree(
3970 "/root",
3971 json!({
3972 "test.txt": expected_contents
3973 }),
3974 )
3975 .await;
3976
3977 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3978
3979 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3980 let worktree = project.worktrees(cx).next().unwrap();
3981 let entry_id = worktree
3982 .read(cx)
3983 .entry_for_path(rel_path("test.txt"))
3984 .unwrap()
3985 .id;
3986 (worktree, entry_id)
3987 });
3988 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3989 let _result = project
3990 .update(cx, |project, cx| {
3991 project.rename_entry(
3992 entry_id,
3993 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3994 cx,
3995 )
3996 })
3997 .await
3998 .unwrap();
3999 worktree.read_with(cx, |worktree, _| {
4000 assert!(
4001 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4002 "Old file should have been removed"
4003 );
4004 assert!(
4005 worktree
4006 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4007 .is_some(),
4008 "Whole directory hierarchy and the new file should have been created"
4009 );
4010 });
4011 assert_eq!(
4012 worktree
4013 .update(cx, |worktree, cx| {
4014 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
4015 })
4016 .await
4017 .unwrap()
4018 .text,
4019 expected_contents,
4020 "Moved file's contents should be preserved"
4021 );
4022
4023 let entry_id = worktree.read_with(cx, |worktree, _| {
4024 worktree
4025 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4026 .unwrap()
4027 .id
4028 });
4029
4030 let _result = project
4031 .update(cx, |project, cx| {
4032 project.rename_entry(
4033 entry_id,
4034 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
4035 cx,
4036 )
4037 })
4038 .await
4039 .unwrap();
4040 worktree.read_with(cx, |worktree, _| {
4041 assert!(
4042 worktree.entry_for_path(rel_path("test.txt")).is_none(),
4043 "First file should not reappear"
4044 );
4045 assert!(
4046 worktree
4047 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
4048 .is_none(),
4049 "Old file should have been removed"
4050 );
4051 assert!(
4052 worktree
4053 .entry_for_path(rel_path("dir1/dir2/test.txt"))
4054 .is_some(),
4055 "No error should have occurred after moving into existing directory"
4056 );
4057 });
4058 assert_eq!(
4059 worktree
4060 .update(cx, |worktree, cx| {
4061 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
4062 })
4063 .await
4064 .unwrap()
4065 .text,
4066 expected_contents,
4067 "Moved file's contents should be preserved"
4068 );
4069}
4070
4071#[gpui::test(iterations = 10)]
4072async fn test_save_file(cx: &mut gpui::TestAppContext) {
4073 init_test(cx);
4074
4075 let fs = FakeFs::new(cx.executor());
4076 fs.insert_tree(
4077 path!("/dir"),
4078 json!({
4079 "file1": "the old contents",
4080 }),
4081 )
4082 .await;
4083
4084 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4085 let buffer = project
4086 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4087 .await
4088 .unwrap();
4089 buffer.update(cx, |buffer, cx| {
4090 assert_eq!(buffer.text(), "the old contents");
4091 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4092 });
4093
4094 project
4095 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4096 .await
4097 .unwrap();
4098
4099 let new_text = fs
4100 .load(Path::new(path!("/dir/file1")))
4101 .await
4102 .unwrap()
4103 .replace("\r\n", "\n");
4104 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4105}
4106
4107#[gpui::test(iterations = 10)]
4108async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4109 // Issue: #24349
4110 init_test(cx);
4111
4112 let fs = FakeFs::new(cx.executor());
4113 fs.insert_tree(path!("/dir"), json!({})).await;
4114
4115 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4116 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4117
4118 language_registry.add(rust_lang());
4119 let mut fake_rust_servers = language_registry.register_fake_lsp(
4120 "Rust",
4121 FakeLspAdapter {
4122 name: "the-rust-language-server",
4123 capabilities: lsp::ServerCapabilities {
4124 completion_provider: Some(lsp::CompletionOptions {
4125 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4126 ..Default::default()
4127 }),
4128 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4129 lsp::TextDocumentSyncOptions {
4130 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4131 ..Default::default()
4132 },
4133 )),
4134 ..Default::default()
4135 },
4136 ..Default::default()
4137 },
4138 );
4139
4140 let buffer = project
4141 .update(cx, |this, cx| this.create_buffer(false, cx))
4142 .unwrap()
4143 .await;
4144 project.update(cx, |this, cx| {
4145 this.register_buffer_with_language_servers(&buffer, cx);
4146 buffer.update(cx, |buffer, cx| {
4147 assert!(!this.has_language_servers_for(buffer, cx));
4148 })
4149 });
4150
4151 project
4152 .update(cx, |this, cx| {
4153 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4154 this.save_buffer_as(
4155 buffer.clone(),
4156 ProjectPath {
4157 worktree_id,
4158 path: rel_path("file.rs").into(),
4159 },
4160 cx,
4161 )
4162 })
4163 .await
4164 .unwrap();
4165 // A server is started up, and it is notified about Rust files.
4166 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4167 assert_eq!(
4168 fake_rust_server
4169 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4170 .await
4171 .text_document,
4172 lsp::TextDocumentItem {
4173 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4174 version: 0,
4175 text: "".to_string(),
4176 language_id: "rust".to_string(),
4177 }
4178 );
4179
4180 project.update(cx, |this, cx| {
4181 buffer.update(cx, |buffer, cx| {
4182 assert!(this.has_language_servers_for(buffer, cx));
4183 })
4184 });
4185}
4186
4187#[gpui::test(iterations = 30)]
4188async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4189 init_test(cx);
4190
4191 let fs = FakeFs::new(cx.executor());
4192 fs.insert_tree(
4193 path!("/dir"),
4194 json!({
4195 "file1": "the original contents",
4196 }),
4197 )
4198 .await;
4199
4200 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4201 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4202 let buffer = project
4203 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4204 .await
4205 .unwrap();
4206
4207 // Simulate buffer diffs being slow, so that they don't complete before
4208 // the next file change occurs.
4209 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4210
4211 // Change the buffer's file on disk, and then wait for the file change
4212 // to be detected by the worktree, so that the buffer starts reloading.
4213 fs.save(
4214 path!("/dir/file1").as_ref(),
4215 &"the first contents".into(),
4216 Default::default(),
4217 )
4218 .await
4219 .unwrap();
4220 worktree.next_event(cx).await;
4221
4222 // Change the buffer's file again. Depending on the random seed, the
4223 // previous file change may still be in progress.
4224 fs.save(
4225 path!("/dir/file1").as_ref(),
4226 &"the second contents".into(),
4227 Default::default(),
4228 )
4229 .await
4230 .unwrap();
4231 worktree.next_event(cx).await;
4232
4233 cx.executor().run_until_parked();
4234 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4235 buffer.read_with(cx, |buffer, _| {
4236 assert_eq!(buffer.text(), on_disk_text);
4237 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4238 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4239 });
4240}
4241
4242#[gpui::test(iterations = 30)]
4243async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4244 init_test(cx);
4245
4246 let fs = FakeFs::new(cx.executor());
4247 fs.insert_tree(
4248 path!("/dir"),
4249 json!({
4250 "file1": "the original contents",
4251 }),
4252 )
4253 .await;
4254
4255 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4256 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4257 let buffer = project
4258 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4259 .await
4260 .unwrap();
4261
4262 // Simulate buffer diffs being slow, so that they don't complete before
4263 // the next file change occurs.
4264 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4265
4266 // Change the buffer's file on disk, and then wait for the file change
4267 // to be detected by the worktree, so that the buffer starts reloading.
4268 fs.save(
4269 path!("/dir/file1").as_ref(),
4270 &"the first contents".into(),
4271 Default::default(),
4272 )
4273 .await
4274 .unwrap();
4275 worktree.next_event(cx).await;
4276
4277 cx.executor()
4278 .spawn(cx.executor().simulate_random_delay())
4279 .await;
4280
4281 // Perform a noop edit, causing the buffer's version to increase.
4282 buffer.update(cx, |buffer, cx| {
4283 buffer.edit([(0..0, " ")], None, cx);
4284 buffer.undo(cx);
4285 });
4286
4287 cx.executor().run_until_parked();
4288 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4289 buffer.read_with(cx, |buffer, _| {
4290 let buffer_text = buffer.text();
4291 if buffer_text == on_disk_text {
4292 assert!(
4293 !buffer.is_dirty() && !buffer.has_conflict(),
4294 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4295 );
4296 }
4297 // If the file change occurred while the buffer was processing the first
4298 // change, the buffer will be in a conflicting state.
4299 else {
4300 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4301 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4302 }
4303 });
4304}
4305
4306#[gpui::test]
4307async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4308 init_test(cx);
4309
4310 let fs = FakeFs::new(cx.executor());
4311 fs.insert_tree(
4312 path!("/dir"),
4313 json!({
4314 "file1": "the old contents",
4315 }),
4316 )
4317 .await;
4318
4319 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4320 let buffer = project
4321 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4322 .await
4323 .unwrap();
4324 buffer.update(cx, |buffer, cx| {
4325 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4326 });
4327
4328 project
4329 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4330 .await
4331 .unwrap();
4332
4333 let new_text = fs
4334 .load(Path::new(path!("/dir/file1")))
4335 .await
4336 .unwrap()
4337 .replace("\r\n", "\n");
4338 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4339}
4340
4341#[gpui::test]
4342async fn test_save_as(cx: &mut gpui::TestAppContext) {
4343 init_test(cx);
4344
4345 let fs = FakeFs::new(cx.executor());
4346 fs.insert_tree("/dir", json!({})).await;
4347
4348 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4349
4350 let languages = project.update(cx, |project, _| project.languages().clone());
4351 languages.add(rust_lang());
4352
4353 let buffer = project.update(cx, |project, cx| {
4354 project.create_local_buffer("", None, false, cx)
4355 });
4356 buffer.update(cx, |buffer, cx| {
4357 buffer.edit([(0..0, "abc")], None, cx);
4358 assert!(buffer.is_dirty());
4359 assert!(!buffer.has_conflict());
4360 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4361 });
4362 project
4363 .update(cx, |project, cx| {
4364 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4365 let path = ProjectPath {
4366 worktree_id,
4367 path: rel_path("file1.rs").into(),
4368 };
4369 project.save_buffer_as(buffer.clone(), path, cx)
4370 })
4371 .await
4372 .unwrap();
4373 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4374
4375 cx.executor().run_until_parked();
4376 buffer.update(cx, |buffer, cx| {
4377 assert_eq!(
4378 buffer.file().unwrap().full_path(cx),
4379 Path::new("dir/file1.rs")
4380 );
4381 assert!(!buffer.is_dirty());
4382 assert!(!buffer.has_conflict());
4383 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4384 });
4385
4386 let opened_buffer = project
4387 .update(cx, |project, cx| {
4388 project.open_local_buffer("/dir/file1.rs", cx)
4389 })
4390 .await
4391 .unwrap();
4392 assert_eq!(opened_buffer, buffer);
4393}
4394
4395#[gpui::test]
4396async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4397 init_test(cx);
4398
4399 let fs = FakeFs::new(cx.executor());
4400 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4401
4402 fs.insert_tree(
4403 path!("/dir"),
4404 json!({
4405 "data_a.txt": "data about a"
4406 }),
4407 )
4408 .await;
4409
4410 let buffer = project
4411 .update(cx, |project, cx| {
4412 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4413 })
4414 .await
4415 .unwrap();
4416
4417 buffer.update(cx, |buffer, cx| {
4418 buffer.edit([(11..12, "b")], None, cx);
4419 });
4420
4421 // Save buffer's contents as a new file and confirm that the buffer's now
4422 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4423 // file associated with the buffer has now been updated to `data_b.txt`
4424 project
4425 .update(cx, |project, cx| {
4426 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4427 let new_path = ProjectPath {
4428 worktree_id,
4429 path: rel_path("data_b.txt").into(),
4430 };
4431
4432 project.save_buffer_as(buffer.clone(), new_path, cx)
4433 })
4434 .await
4435 .unwrap();
4436
4437 buffer.update(cx, |buffer, cx| {
4438 assert_eq!(
4439 buffer.file().unwrap().full_path(cx),
4440 Path::new("dir/data_b.txt")
4441 )
4442 });
4443
4444 // Open the original `data_a.txt` file, confirming that its contents are
4445 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4446 let original_buffer = project
4447 .update(cx, |project, cx| {
4448 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4449 })
4450 .await
4451 .unwrap();
4452
4453 original_buffer.update(cx, |buffer, cx| {
4454 assert_eq!(buffer.text(), "data about a");
4455 assert_eq!(
4456 buffer.file().unwrap().full_path(cx),
4457 Path::new("dir/data_a.txt")
4458 )
4459 });
4460}
4461
4462#[gpui::test(retries = 5)]
4463async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4464 use worktree::WorktreeModelHandle as _;
4465
4466 init_test(cx);
4467 cx.executor().allow_parking();
4468
4469 let dir = TempTree::new(json!({
4470 "a": {
4471 "file1": "",
4472 "file2": "",
4473 "file3": "",
4474 },
4475 "b": {
4476 "c": {
4477 "file4": "",
4478 "file5": "",
4479 }
4480 }
4481 }));
4482
4483 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4484
4485 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4486 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4487 async move { buffer.await.unwrap() }
4488 };
4489 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4490 project.update(cx, |project, cx| {
4491 let tree = project.worktrees(cx).next().unwrap();
4492 tree.read(cx)
4493 .entry_for_path(rel_path(path))
4494 .unwrap_or_else(|| panic!("no entry for path {}", path))
4495 .id
4496 })
4497 };
4498
4499 let buffer2 = buffer_for_path("a/file2", cx).await;
4500 let buffer3 = buffer_for_path("a/file3", cx).await;
4501 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4502 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4503
4504 let file2_id = id_for_path("a/file2", cx);
4505 let file3_id = id_for_path("a/file3", cx);
4506 let file4_id = id_for_path("b/c/file4", cx);
4507
4508 // Create a remote copy of this worktree.
4509 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4510 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4511
4512 let updates = Arc::new(Mutex::new(Vec::new()));
4513 tree.update(cx, |tree, cx| {
4514 let updates = updates.clone();
4515 tree.observe_updates(0, cx, move |update| {
4516 updates.lock().push(update);
4517 async { true }
4518 });
4519 });
4520
4521 let remote = cx.update(|cx| {
4522 Worktree::remote(
4523 0,
4524 ReplicaId::REMOTE_SERVER,
4525 metadata,
4526 project.read(cx).client().into(),
4527 project.read(cx).path_style(cx),
4528 cx,
4529 )
4530 });
4531
4532 cx.executor().run_until_parked();
4533
4534 cx.update(|cx| {
4535 assert!(!buffer2.read(cx).is_dirty());
4536 assert!(!buffer3.read(cx).is_dirty());
4537 assert!(!buffer4.read(cx).is_dirty());
4538 assert!(!buffer5.read(cx).is_dirty());
4539 });
4540
4541 // Rename and delete files and directories.
4542 tree.flush_fs_events(cx).await;
4543 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4544 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4545 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4546 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4547 tree.flush_fs_events(cx).await;
4548
4549 cx.update(|app| {
4550 assert_eq!(
4551 tree.read(app).paths().collect::<Vec<_>>(),
4552 vec![
4553 rel_path("a"),
4554 rel_path("a/file1"),
4555 rel_path("a/file2.new"),
4556 rel_path("b"),
4557 rel_path("d"),
4558 rel_path("d/file3"),
4559 rel_path("d/file4"),
4560 ]
4561 );
4562 });
4563
4564 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4565 assert_eq!(id_for_path("d/file3", cx), file3_id);
4566 assert_eq!(id_for_path("d/file4", cx), file4_id);
4567
4568 cx.update(|cx| {
4569 assert_eq!(
4570 buffer2.read(cx).file().unwrap().path().as_ref(),
4571 rel_path("a/file2.new")
4572 );
4573 assert_eq!(
4574 buffer3.read(cx).file().unwrap().path().as_ref(),
4575 rel_path("d/file3")
4576 );
4577 assert_eq!(
4578 buffer4.read(cx).file().unwrap().path().as_ref(),
4579 rel_path("d/file4")
4580 );
4581 assert_eq!(
4582 buffer5.read(cx).file().unwrap().path().as_ref(),
4583 rel_path("b/c/file5")
4584 );
4585
4586 assert_matches!(
4587 buffer2.read(cx).file().unwrap().disk_state(),
4588 DiskState::Present { .. }
4589 );
4590 assert_matches!(
4591 buffer3.read(cx).file().unwrap().disk_state(),
4592 DiskState::Present { .. }
4593 );
4594 assert_matches!(
4595 buffer4.read(cx).file().unwrap().disk_state(),
4596 DiskState::Present { .. }
4597 );
4598 assert_eq!(
4599 buffer5.read(cx).file().unwrap().disk_state(),
4600 DiskState::Deleted
4601 );
4602 });
4603
4604 // Update the remote worktree. Check that it becomes consistent with the
4605 // local worktree.
4606 cx.executor().run_until_parked();
4607
4608 remote.update(cx, |remote, _| {
4609 for update in updates.lock().drain(..) {
4610 remote.as_remote_mut().unwrap().update_from_remote(update);
4611 }
4612 });
4613 cx.executor().run_until_parked();
4614 remote.update(cx, |remote, _| {
4615 assert_eq!(
4616 remote.paths().collect::<Vec<_>>(),
4617 vec![
4618 rel_path("a"),
4619 rel_path("a/file1"),
4620 rel_path("a/file2.new"),
4621 rel_path("b"),
4622 rel_path("d"),
4623 rel_path("d/file3"),
4624 rel_path("d/file4"),
4625 ]
4626 );
4627 });
4628}
4629
4630#[gpui::test(iterations = 10)]
4631async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4632 init_test(cx);
4633
4634 let fs = FakeFs::new(cx.executor());
4635 fs.insert_tree(
4636 path!("/dir"),
4637 json!({
4638 "a": {
4639 "file1": "",
4640 }
4641 }),
4642 )
4643 .await;
4644
4645 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4646 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4647 let tree_id = tree.update(cx, |tree, _| tree.id());
4648
4649 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4650 project.update(cx, |project, cx| {
4651 let tree = project.worktrees(cx).next().unwrap();
4652 tree.read(cx)
4653 .entry_for_path(rel_path(path))
4654 .unwrap_or_else(|| panic!("no entry for path {}", path))
4655 .id
4656 })
4657 };
4658
4659 let dir_id = id_for_path("a", cx);
4660 let file_id = id_for_path("a/file1", cx);
4661 let buffer = project
4662 .update(cx, |p, cx| {
4663 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4664 })
4665 .await
4666 .unwrap();
4667 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4668
4669 project
4670 .update(cx, |project, cx| {
4671 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4672 })
4673 .unwrap()
4674 .await
4675 .into_included()
4676 .unwrap();
4677 cx.executor().run_until_parked();
4678
4679 assert_eq!(id_for_path("b", cx), dir_id);
4680 assert_eq!(id_for_path("b/file1", cx), file_id);
4681 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4682}
4683
4684#[gpui::test]
4685async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4686 init_test(cx);
4687
4688 let fs = FakeFs::new(cx.executor());
4689 fs.insert_tree(
4690 "/dir",
4691 json!({
4692 "a.txt": "a-contents",
4693 "b.txt": "b-contents",
4694 }),
4695 )
4696 .await;
4697
4698 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4699
4700 // Spawn multiple tasks to open paths, repeating some paths.
4701 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4702 (
4703 p.open_local_buffer("/dir/a.txt", cx),
4704 p.open_local_buffer("/dir/b.txt", cx),
4705 p.open_local_buffer("/dir/a.txt", cx),
4706 )
4707 });
4708
4709 let buffer_a_1 = buffer_a_1.await.unwrap();
4710 let buffer_a_2 = buffer_a_2.await.unwrap();
4711 let buffer_b = buffer_b.await.unwrap();
4712 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4713 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4714
4715 // There is only one buffer per path.
4716 let buffer_a_id = buffer_a_1.entity_id();
4717 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4718
4719 // Open the same path again while it is still open.
4720 drop(buffer_a_1);
4721 let buffer_a_3 = project
4722 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4723 .await
4724 .unwrap();
4725
4726 // There's still only one buffer per path.
4727 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4728}
4729
4730#[gpui::test]
4731async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4732 init_test(cx);
4733
4734 let fs = FakeFs::new(cx.executor());
4735 fs.insert_tree(
4736 path!("/dir"),
4737 json!({
4738 "file1": "abc",
4739 "file2": "def",
4740 "file3": "ghi",
4741 }),
4742 )
4743 .await;
4744
4745 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4746
4747 let buffer1 = project
4748 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4749 .await
4750 .unwrap();
4751 let events = Arc::new(Mutex::new(Vec::new()));
4752
4753 // initially, the buffer isn't dirty.
4754 buffer1.update(cx, |buffer, cx| {
4755 cx.subscribe(&buffer1, {
4756 let events = events.clone();
4757 move |_, _, event, _| match event {
4758 BufferEvent::Operation { .. } => {}
4759 _ => events.lock().push(event.clone()),
4760 }
4761 })
4762 .detach();
4763
4764 assert!(!buffer.is_dirty());
4765 assert!(events.lock().is_empty());
4766
4767 buffer.edit([(1..2, "")], None, cx);
4768 });
4769
4770 // after the first edit, the buffer is dirty, and emits a dirtied event.
4771 buffer1.update(cx, |buffer, cx| {
4772 assert!(buffer.text() == "ac");
4773 assert!(buffer.is_dirty());
4774 assert_eq!(
4775 *events.lock(),
4776 &[
4777 language::BufferEvent::Edited,
4778 language::BufferEvent::DirtyChanged
4779 ]
4780 );
4781 events.lock().clear();
4782 buffer.did_save(
4783 buffer.version(),
4784 buffer.file().unwrap().disk_state().mtime(),
4785 cx,
4786 );
4787 });
4788
4789 // after saving, the buffer is not dirty, and emits a saved event.
4790 buffer1.update(cx, |buffer, cx| {
4791 assert!(!buffer.is_dirty());
4792 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4793 events.lock().clear();
4794
4795 buffer.edit([(1..1, "B")], None, cx);
4796 buffer.edit([(2..2, "D")], None, cx);
4797 });
4798
4799 // after editing again, the buffer is dirty, and emits another dirty event.
4800 buffer1.update(cx, |buffer, cx| {
4801 assert!(buffer.text() == "aBDc");
4802 assert!(buffer.is_dirty());
4803 assert_eq!(
4804 *events.lock(),
4805 &[
4806 language::BufferEvent::Edited,
4807 language::BufferEvent::DirtyChanged,
4808 language::BufferEvent::Edited,
4809 ],
4810 );
4811 events.lock().clear();
4812
4813 // After restoring the buffer to its previously-saved state,
4814 // the buffer is not considered dirty anymore.
4815 buffer.edit([(1..3, "")], None, cx);
4816 assert!(buffer.text() == "ac");
4817 assert!(!buffer.is_dirty());
4818 });
4819
4820 assert_eq!(
4821 *events.lock(),
4822 &[
4823 language::BufferEvent::Edited,
4824 language::BufferEvent::DirtyChanged
4825 ]
4826 );
4827
4828 // When a file is deleted, it is not considered dirty.
4829 let events = Arc::new(Mutex::new(Vec::new()));
4830 let buffer2 = project
4831 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4832 .await
4833 .unwrap();
4834 buffer2.update(cx, |_, cx| {
4835 cx.subscribe(&buffer2, {
4836 let events = events.clone();
4837 move |_, _, event, _| match event {
4838 BufferEvent::Operation { .. } => {}
4839 _ => events.lock().push(event.clone()),
4840 }
4841 })
4842 .detach();
4843 });
4844
4845 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4846 .await
4847 .unwrap();
4848 cx.executor().run_until_parked();
4849 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4850 assert_eq!(
4851 mem::take(&mut *events.lock()),
4852 &[language::BufferEvent::FileHandleChanged]
4853 );
4854
4855 // Buffer becomes dirty when edited.
4856 buffer2.update(cx, |buffer, cx| {
4857 buffer.edit([(2..3, "")], None, cx);
4858 assert_eq!(buffer.is_dirty(), true);
4859 });
4860 assert_eq!(
4861 mem::take(&mut *events.lock()),
4862 &[
4863 language::BufferEvent::Edited,
4864 language::BufferEvent::DirtyChanged
4865 ]
4866 );
4867
4868 // Buffer becomes clean again when all of its content is removed, because
4869 // the file was deleted.
4870 buffer2.update(cx, |buffer, cx| {
4871 buffer.edit([(0..2, "")], None, cx);
4872 assert_eq!(buffer.is_empty(), true);
4873 assert_eq!(buffer.is_dirty(), false);
4874 });
4875 assert_eq!(
4876 *events.lock(),
4877 &[
4878 language::BufferEvent::Edited,
4879 language::BufferEvent::DirtyChanged
4880 ]
4881 );
4882
4883 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4884 let events = Arc::new(Mutex::new(Vec::new()));
4885 let buffer3 = project
4886 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4887 .await
4888 .unwrap();
4889 buffer3.update(cx, |_, cx| {
4890 cx.subscribe(&buffer3, {
4891 let events = events.clone();
4892 move |_, _, event, _| match event {
4893 BufferEvent::Operation { .. } => {}
4894 _ => events.lock().push(event.clone()),
4895 }
4896 })
4897 .detach();
4898 });
4899
4900 buffer3.update(cx, |buffer, cx| {
4901 buffer.edit([(0..0, "x")], None, cx);
4902 });
4903 events.lock().clear();
4904 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4905 .await
4906 .unwrap();
4907 cx.executor().run_until_parked();
4908 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4909 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4910}
4911
4912#[gpui::test]
4913async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4914 init_test(cx);
4915
4916 let (initial_contents, initial_offsets) =
4917 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4918 let fs = FakeFs::new(cx.executor());
4919 fs.insert_tree(
4920 path!("/dir"),
4921 json!({
4922 "the-file": initial_contents,
4923 }),
4924 )
4925 .await;
4926 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4927 let buffer = project
4928 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4929 .await
4930 .unwrap();
4931
4932 let anchors = initial_offsets
4933 .iter()
4934 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4935 .collect::<Vec<_>>();
4936
4937 // Change the file on disk, adding two new lines of text, and removing
4938 // one line.
4939 buffer.update(cx, |buffer, _| {
4940 assert!(!buffer.is_dirty());
4941 assert!(!buffer.has_conflict());
4942 });
4943
4944 let (new_contents, new_offsets) =
4945 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4946 fs.save(
4947 path!("/dir/the-file").as_ref(),
4948 &new_contents.as_str().into(),
4949 LineEnding::Unix,
4950 )
4951 .await
4952 .unwrap();
4953
4954 // Because the buffer was not modified, it is reloaded from disk. Its
4955 // contents are edited according to the diff between the old and new
4956 // file contents.
4957 cx.executor().run_until_parked();
4958 buffer.update(cx, |buffer, _| {
4959 assert_eq!(buffer.text(), new_contents);
4960 assert!(!buffer.is_dirty());
4961 assert!(!buffer.has_conflict());
4962
4963 let anchor_offsets = anchors
4964 .iter()
4965 .map(|anchor| anchor.to_offset(&*buffer))
4966 .collect::<Vec<_>>();
4967 assert_eq!(anchor_offsets, new_offsets);
4968 });
4969
4970 // Modify the buffer
4971 buffer.update(cx, |buffer, cx| {
4972 buffer.edit([(0..0, " ")], None, cx);
4973 assert!(buffer.is_dirty());
4974 assert!(!buffer.has_conflict());
4975 });
4976
4977 // Change the file on disk again, adding blank lines to the beginning.
4978 fs.save(
4979 path!("/dir/the-file").as_ref(),
4980 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4981 LineEnding::Unix,
4982 )
4983 .await
4984 .unwrap();
4985
4986 // Because the buffer is modified, it doesn't reload from disk, but is
4987 // marked as having a conflict.
4988 cx.executor().run_until_parked();
4989 buffer.update(cx, |buffer, _| {
4990 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4991 assert!(buffer.has_conflict());
4992 });
4993}
4994
4995#[gpui::test]
4996async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4997 init_test(cx);
4998
4999 let fs = FakeFs::new(cx.executor());
5000 fs.insert_tree(
5001 path!("/dir"),
5002 json!({
5003 "file1": "a\nb\nc\n",
5004 "file2": "one\r\ntwo\r\nthree\r\n",
5005 }),
5006 )
5007 .await;
5008
5009 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5010 let buffer1 = project
5011 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
5012 .await
5013 .unwrap();
5014 let buffer2 = project
5015 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
5016 .await
5017 .unwrap();
5018
5019 buffer1.update(cx, |buffer, _| {
5020 assert_eq!(buffer.text(), "a\nb\nc\n");
5021 assert_eq!(buffer.line_ending(), LineEnding::Unix);
5022 });
5023 buffer2.update(cx, |buffer, _| {
5024 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
5025 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5026 });
5027
5028 // Change a file's line endings on disk from unix to windows. The buffer's
5029 // state updates correctly.
5030 fs.save(
5031 path!("/dir/file1").as_ref(),
5032 &"aaa\nb\nc\n".into(),
5033 LineEnding::Windows,
5034 )
5035 .await
5036 .unwrap();
5037 cx.executor().run_until_parked();
5038 buffer1.update(cx, |buffer, _| {
5039 assert_eq!(buffer.text(), "aaa\nb\nc\n");
5040 assert_eq!(buffer.line_ending(), LineEnding::Windows);
5041 });
5042
5043 // Save a file with windows line endings. The file is written correctly.
5044 buffer2.update(cx, |buffer, cx| {
5045 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
5046 });
5047 project
5048 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
5049 .await
5050 .unwrap();
5051 assert_eq!(
5052 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
5053 "one\r\ntwo\r\nthree\r\nfour\r\n",
5054 );
5055}
5056
5057#[gpui::test]
5058async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
5059 init_test(cx);
5060
5061 let fs = FakeFs::new(cx.executor());
5062 fs.insert_tree(
5063 path!("/dir"),
5064 json!({
5065 "a.rs": "
5066 fn foo(mut v: Vec<usize>) {
5067 for x in &v {
5068 v.push(1);
5069 }
5070 }
5071 "
5072 .unindent(),
5073 }),
5074 )
5075 .await;
5076
5077 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5078 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5079 let buffer = project
5080 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5081 .await
5082 .unwrap();
5083
5084 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5085 let message = lsp::PublishDiagnosticsParams {
5086 uri: buffer_uri.clone(),
5087 diagnostics: vec![
5088 lsp::Diagnostic {
5089 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5090 severity: Some(DiagnosticSeverity::WARNING),
5091 message: "error 1".to_string(),
5092 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5093 location: lsp::Location {
5094 uri: buffer_uri.clone(),
5095 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5096 },
5097 message: "error 1 hint 1".to_string(),
5098 }]),
5099 ..Default::default()
5100 },
5101 lsp::Diagnostic {
5102 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5103 severity: Some(DiagnosticSeverity::HINT),
5104 message: "error 1 hint 1".to_string(),
5105 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5106 location: lsp::Location {
5107 uri: buffer_uri.clone(),
5108 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5109 },
5110 message: "original diagnostic".to_string(),
5111 }]),
5112 ..Default::default()
5113 },
5114 lsp::Diagnostic {
5115 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5116 severity: Some(DiagnosticSeverity::ERROR),
5117 message: "error 2".to_string(),
5118 related_information: Some(vec![
5119 lsp::DiagnosticRelatedInformation {
5120 location: lsp::Location {
5121 uri: buffer_uri.clone(),
5122 range: lsp::Range::new(
5123 lsp::Position::new(1, 13),
5124 lsp::Position::new(1, 15),
5125 ),
5126 },
5127 message: "error 2 hint 1".to_string(),
5128 },
5129 lsp::DiagnosticRelatedInformation {
5130 location: lsp::Location {
5131 uri: buffer_uri.clone(),
5132 range: lsp::Range::new(
5133 lsp::Position::new(1, 13),
5134 lsp::Position::new(1, 15),
5135 ),
5136 },
5137 message: "error 2 hint 2".to_string(),
5138 },
5139 ]),
5140 ..Default::default()
5141 },
5142 lsp::Diagnostic {
5143 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5144 severity: Some(DiagnosticSeverity::HINT),
5145 message: "error 2 hint 1".to_string(),
5146 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5147 location: lsp::Location {
5148 uri: buffer_uri.clone(),
5149 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5150 },
5151 message: "original diagnostic".to_string(),
5152 }]),
5153 ..Default::default()
5154 },
5155 lsp::Diagnostic {
5156 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5157 severity: Some(DiagnosticSeverity::HINT),
5158 message: "error 2 hint 2".to_string(),
5159 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5160 location: lsp::Location {
5161 uri: buffer_uri,
5162 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5163 },
5164 message: "original diagnostic".to_string(),
5165 }]),
5166 ..Default::default()
5167 },
5168 ],
5169 version: None,
5170 };
5171
5172 lsp_store
5173 .update(cx, |lsp_store, cx| {
5174 lsp_store.update_diagnostics(
5175 LanguageServerId(0),
5176 message,
5177 None,
5178 DiagnosticSourceKind::Pushed,
5179 &[],
5180 cx,
5181 )
5182 })
5183 .unwrap();
5184 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5185
5186 assert_eq!(
5187 buffer
5188 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5189 .collect::<Vec<_>>(),
5190 &[
5191 DiagnosticEntry {
5192 range: Point::new(1, 8)..Point::new(1, 9),
5193 diagnostic: Diagnostic {
5194 severity: DiagnosticSeverity::WARNING,
5195 message: "error 1".to_string(),
5196 group_id: 1,
5197 is_primary: true,
5198 source_kind: DiagnosticSourceKind::Pushed,
5199 ..Diagnostic::default()
5200 }
5201 },
5202 DiagnosticEntry {
5203 range: Point::new(1, 8)..Point::new(1, 9),
5204 diagnostic: Diagnostic {
5205 severity: DiagnosticSeverity::HINT,
5206 message: "error 1 hint 1".to_string(),
5207 group_id: 1,
5208 is_primary: false,
5209 source_kind: DiagnosticSourceKind::Pushed,
5210 ..Diagnostic::default()
5211 }
5212 },
5213 DiagnosticEntry {
5214 range: Point::new(1, 13)..Point::new(1, 15),
5215 diagnostic: Diagnostic {
5216 severity: DiagnosticSeverity::HINT,
5217 message: "error 2 hint 1".to_string(),
5218 group_id: 0,
5219 is_primary: false,
5220 source_kind: DiagnosticSourceKind::Pushed,
5221 ..Diagnostic::default()
5222 }
5223 },
5224 DiagnosticEntry {
5225 range: Point::new(1, 13)..Point::new(1, 15),
5226 diagnostic: Diagnostic {
5227 severity: DiagnosticSeverity::HINT,
5228 message: "error 2 hint 2".to_string(),
5229 group_id: 0,
5230 is_primary: false,
5231 source_kind: DiagnosticSourceKind::Pushed,
5232 ..Diagnostic::default()
5233 }
5234 },
5235 DiagnosticEntry {
5236 range: Point::new(2, 8)..Point::new(2, 17),
5237 diagnostic: Diagnostic {
5238 severity: DiagnosticSeverity::ERROR,
5239 message: "error 2".to_string(),
5240 group_id: 0,
5241 is_primary: true,
5242 source_kind: DiagnosticSourceKind::Pushed,
5243 ..Diagnostic::default()
5244 }
5245 }
5246 ]
5247 );
5248
5249 assert_eq!(
5250 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5251 &[
5252 DiagnosticEntry {
5253 range: Point::new(1, 13)..Point::new(1, 15),
5254 diagnostic: Diagnostic {
5255 severity: DiagnosticSeverity::HINT,
5256 message: "error 2 hint 1".to_string(),
5257 group_id: 0,
5258 is_primary: false,
5259 source_kind: DiagnosticSourceKind::Pushed,
5260 ..Diagnostic::default()
5261 }
5262 },
5263 DiagnosticEntry {
5264 range: Point::new(1, 13)..Point::new(1, 15),
5265 diagnostic: Diagnostic {
5266 severity: DiagnosticSeverity::HINT,
5267 message: "error 2 hint 2".to_string(),
5268 group_id: 0,
5269 is_primary: false,
5270 source_kind: DiagnosticSourceKind::Pushed,
5271 ..Diagnostic::default()
5272 }
5273 },
5274 DiagnosticEntry {
5275 range: Point::new(2, 8)..Point::new(2, 17),
5276 diagnostic: Diagnostic {
5277 severity: DiagnosticSeverity::ERROR,
5278 message: "error 2".to_string(),
5279 group_id: 0,
5280 is_primary: true,
5281 source_kind: DiagnosticSourceKind::Pushed,
5282 ..Diagnostic::default()
5283 }
5284 }
5285 ]
5286 );
5287
5288 assert_eq!(
5289 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5290 &[
5291 DiagnosticEntry {
5292 range: Point::new(1, 8)..Point::new(1, 9),
5293 diagnostic: Diagnostic {
5294 severity: DiagnosticSeverity::WARNING,
5295 message: "error 1".to_string(),
5296 group_id: 1,
5297 is_primary: true,
5298 source_kind: DiagnosticSourceKind::Pushed,
5299 ..Diagnostic::default()
5300 }
5301 },
5302 DiagnosticEntry {
5303 range: Point::new(1, 8)..Point::new(1, 9),
5304 diagnostic: Diagnostic {
5305 severity: DiagnosticSeverity::HINT,
5306 message: "error 1 hint 1".to_string(),
5307 group_id: 1,
5308 is_primary: false,
5309 source_kind: DiagnosticSourceKind::Pushed,
5310 ..Diagnostic::default()
5311 }
5312 },
5313 ]
5314 );
5315}
5316
5317#[gpui::test]
5318async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5319 init_test(cx);
5320
5321 let fs = FakeFs::new(cx.executor());
5322 fs.insert_tree(
5323 path!("/dir"),
5324 json!({
5325 "one.rs": "const ONE: usize = 1;",
5326 "two": {
5327 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5328 }
5329
5330 }),
5331 )
5332 .await;
5333 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5334
5335 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5336 language_registry.add(rust_lang());
5337 let watched_paths = lsp::FileOperationRegistrationOptions {
5338 filters: vec![
5339 FileOperationFilter {
5340 scheme: Some("file".to_owned()),
5341 pattern: lsp::FileOperationPattern {
5342 glob: "**/*.rs".to_owned(),
5343 matches: Some(lsp::FileOperationPatternKind::File),
5344 options: None,
5345 },
5346 },
5347 FileOperationFilter {
5348 scheme: Some("file".to_owned()),
5349 pattern: lsp::FileOperationPattern {
5350 glob: "**/**".to_owned(),
5351 matches: Some(lsp::FileOperationPatternKind::Folder),
5352 options: None,
5353 },
5354 },
5355 ],
5356 };
5357 let mut fake_servers = language_registry.register_fake_lsp(
5358 "Rust",
5359 FakeLspAdapter {
5360 capabilities: lsp::ServerCapabilities {
5361 workspace: Some(lsp::WorkspaceServerCapabilities {
5362 workspace_folders: None,
5363 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5364 did_rename: Some(watched_paths.clone()),
5365 will_rename: Some(watched_paths),
5366 ..Default::default()
5367 }),
5368 }),
5369 ..Default::default()
5370 },
5371 ..Default::default()
5372 },
5373 );
5374
5375 let _ = project
5376 .update(cx, |project, cx| {
5377 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5378 })
5379 .await
5380 .unwrap();
5381
5382 let fake_server = fake_servers.next().await.unwrap();
5383 let response = project.update(cx, |project, cx| {
5384 let worktree = project.worktrees(cx).next().unwrap();
5385 let entry = worktree
5386 .read(cx)
5387 .entry_for_path(rel_path("one.rs"))
5388 .unwrap();
5389 project.rename_entry(
5390 entry.id,
5391 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5392 cx,
5393 )
5394 });
5395 let expected_edit = lsp::WorkspaceEdit {
5396 changes: None,
5397 document_changes: Some(DocumentChanges::Edits({
5398 vec![TextDocumentEdit {
5399 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5400 range: lsp::Range {
5401 start: lsp::Position {
5402 line: 0,
5403 character: 1,
5404 },
5405 end: lsp::Position {
5406 line: 0,
5407 character: 3,
5408 },
5409 },
5410 new_text: "This is not a drill".to_owned(),
5411 })],
5412 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5413 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5414 version: Some(1337),
5415 },
5416 }]
5417 })),
5418 change_annotations: None,
5419 };
5420 let resolved_workspace_edit = Arc::new(OnceLock::new());
5421 fake_server
5422 .set_request_handler::<WillRenameFiles, _, _>({
5423 let resolved_workspace_edit = resolved_workspace_edit.clone();
5424 let expected_edit = expected_edit.clone();
5425 move |params, _| {
5426 let resolved_workspace_edit = resolved_workspace_edit.clone();
5427 let expected_edit = expected_edit.clone();
5428 async move {
5429 assert_eq!(params.files.len(), 1);
5430 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5431 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5432 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5433 Ok(Some(expected_edit))
5434 }
5435 }
5436 })
5437 .next()
5438 .await
5439 .unwrap();
5440 let _ = response.await.unwrap();
5441 fake_server
5442 .handle_notification::<DidRenameFiles, _>(|params, _| {
5443 assert_eq!(params.files.len(), 1);
5444 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5445 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5446 })
5447 .next()
5448 .await
5449 .unwrap();
5450 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5451}
5452
5453#[gpui::test]
5454async fn test_rename(cx: &mut gpui::TestAppContext) {
5455 // hi
5456 init_test(cx);
5457
5458 let fs = FakeFs::new(cx.executor());
5459 fs.insert_tree(
5460 path!("/dir"),
5461 json!({
5462 "one.rs": "const ONE: usize = 1;",
5463 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5464 }),
5465 )
5466 .await;
5467
5468 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5469
5470 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5471 language_registry.add(rust_lang());
5472 let mut fake_servers = language_registry.register_fake_lsp(
5473 "Rust",
5474 FakeLspAdapter {
5475 capabilities: lsp::ServerCapabilities {
5476 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5477 prepare_provider: Some(true),
5478 work_done_progress_options: Default::default(),
5479 })),
5480 ..Default::default()
5481 },
5482 ..Default::default()
5483 },
5484 );
5485
5486 let (buffer, _handle) = project
5487 .update(cx, |project, cx| {
5488 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5489 })
5490 .await
5491 .unwrap();
5492
5493 let fake_server = fake_servers.next().await.unwrap();
5494
5495 let response = project.update(cx, |project, cx| {
5496 project.prepare_rename(buffer.clone(), 7, cx)
5497 });
5498 fake_server
5499 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5500 assert_eq!(
5501 params.text_document.uri.as_str(),
5502 uri!("file:///dir/one.rs")
5503 );
5504 assert_eq!(params.position, lsp::Position::new(0, 7));
5505 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5506 lsp::Position::new(0, 6),
5507 lsp::Position::new(0, 9),
5508 ))))
5509 })
5510 .next()
5511 .await
5512 .unwrap();
5513 let response = response.await.unwrap();
5514 let PrepareRenameResponse::Success(range) = response else {
5515 panic!("{:?}", response);
5516 };
5517 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5518 assert_eq!(range, 6..9);
5519
5520 let response = project.update(cx, |project, cx| {
5521 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5522 });
5523 fake_server
5524 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5525 assert_eq!(
5526 params.text_document_position.text_document.uri.as_str(),
5527 uri!("file:///dir/one.rs")
5528 );
5529 assert_eq!(
5530 params.text_document_position.position,
5531 lsp::Position::new(0, 7)
5532 );
5533 assert_eq!(params.new_name, "THREE");
5534 Ok(Some(lsp::WorkspaceEdit {
5535 changes: Some(
5536 [
5537 (
5538 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5539 vec![lsp::TextEdit::new(
5540 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5541 "THREE".to_string(),
5542 )],
5543 ),
5544 (
5545 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5546 vec![
5547 lsp::TextEdit::new(
5548 lsp::Range::new(
5549 lsp::Position::new(0, 24),
5550 lsp::Position::new(0, 27),
5551 ),
5552 "THREE".to_string(),
5553 ),
5554 lsp::TextEdit::new(
5555 lsp::Range::new(
5556 lsp::Position::new(0, 35),
5557 lsp::Position::new(0, 38),
5558 ),
5559 "THREE".to_string(),
5560 ),
5561 ],
5562 ),
5563 ]
5564 .into_iter()
5565 .collect(),
5566 ),
5567 ..Default::default()
5568 }))
5569 })
5570 .next()
5571 .await
5572 .unwrap();
5573 let mut transaction = response.await.unwrap().0;
5574 assert_eq!(transaction.len(), 2);
5575 assert_eq!(
5576 transaction
5577 .remove_entry(&buffer)
5578 .unwrap()
5579 .0
5580 .update(cx, |buffer, _| buffer.text()),
5581 "const THREE: usize = 1;"
5582 );
5583 assert_eq!(
5584 transaction
5585 .into_keys()
5586 .next()
5587 .unwrap()
5588 .update(cx, |buffer, _| buffer.text()),
5589 "const TWO: usize = one::THREE + one::THREE;"
5590 );
5591}
5592
5593#[gpui::test]
5594async fn test_search(cx: &mut gpui::TestAppContext) {
5595 init_test(cx);
5596
5597 let fs = FakeFs::new(cx.executor());
5598 fs.insert_tree(
5599 path!("/dir"),
5600 json!({
5601 "one.rs": "const ONE: usize = 1;",
5602 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5603 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5604 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5605 }),
5606 )
5607 .await;
5608 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5609 assert_eq!(
5610 search(
5611 &project,
5612 SearchQuery::text(
5613 "TWO",
5614 false,
5615 true,
5616 false,
5617 Default::default(),
5618 Default::default(),
5619 false,
5620 None
5621 )
5622 .unwrap(),
5623 cx
5624 )
5625 .await
5626 .unwrap(),
5627 HashMap::from_iter([
5628 (path!("dir/two.rs").to_string(), vec![6..9]),
5629 (path!("dir/three.rs").to_string(), vec![37..40])
5630 ])
5631 );
5632
5633 let buffer_4 = project
5634 .update(cx, |project, cx| {
5635 project.open_local_buffer(path!("/dir/four.rs"), cx)
5636 })
5637 .await
5638 .unwrap();
5639 buffer_4.update(cx, |buffer, cx| {
5640 let text = "two::TWO";
5641 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5642 });
5643
5644 assert_eq!(
5645 search(
5646 &project,
5647 SearchQuery::text(
5648 "TWO",
5649 false,
5650 true,
5651 false,
5652 Default::default(),
5653 Default::default(),
5654 false,
5655 None,
5656 )
5657 .unwrap(),
5658 cx
5659 )
5660 .await
5661 .unwrap(),
5662 HashMap::from_iter([
5663 (path!("dir/two.rs").to_string(), vec![6..9]),
5664 (path!("dir/three.rs").to_string(), vec![37..40]),
5665 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5666 ])
5667 );
5668}
5669
5670#[gpui::test]
5671async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5672 init_test(cx);
5673
5674 let search_query = "file";
5675
5676 let fs = FakeFs::new(cx.executor());
5677 fs.insert_tree(
5678 path!("/dir"),
5679 json!({
5680 "one.rs": r#"// Rust file one"#,
5681 "one.ts": r#"// TypeScript file one"#,
5682 "two.rs": r#"// Rust file two"#,
5683 "two.ts": r#"// TypeScript file two"#,
5684 }),
5685 )
5686 .await;
5687 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5688
5689 assert!(
5690 search(
5691 &project,
5692 SearchQuery::text(
5693 search_query,
5694 false,
5695 true,
5696 false,
5697 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5698 Default::default(),
5699 false,
5700 None
5701 )
5702 .unwrap(),
5703 cx
5704 )
5705 .await
5706 .unwrap()
5707 .is_empty(),
5708 "If no inclusions match, no files should be returned"
5709 );
5710
5711 assert_eq!(
5712 search(
5713 &project,
5714 SearchQuery::text(
5715 search_query,
5716 false,
5717 true,
5718 false,
5719 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5720 Default::default(),
5721 false,
5722 None
5723 )
5724 .unwrap(),
5725 cx
5726 )
5727 .await
5728 .unwrap(),
5729 HashMap::from_iter([
5730 (path!("dir/one.rs").to_string(), vec![8..12]),
5731 (path!("dir/two.rs").to_string(), vec![8..12]),
5732 ]),
5733 "Rust only search should give only Rust files"
5734 );
5735
5736 assert_eq!(
5737 search(
5738 &project,
5739 SearchQuery::text(
5740 search_query,
5741 false,
5742 true,
5743 false,
5744 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5745 .unwrap(),
5746 Default::default(),
5747 false,
5748 None,
5749 )
5750 .unwrap(),
5751 cx
5752 )
5753 .await
5754 .unwrap(),
5755 HashMap::from_iter([
5756 (path!("dir/one.ts").to_string(), vec![14..18]),
5757 (path!("dir/two.ts").to_string(), vec![14..18]),
5758 ]),
5759 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5760 );
5761
5762 assert_eq!(
5763 search(
5764 &project,
5765 SearchQuery::text(
5766 search_query,
5767 false,
5768 true,
5769 false,
5770 PathMatcher::new(
5771 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5772 PathStyle::local()
5773 )
5774 .unwrap(),
5775 Default::default(),
5776 false,
5777 None,
5778 )
5779 .unwrap(),
5780 cx
5781 )
5782 .await
5783 .unwrap(),
5784 HashMap::from_iter([
5785 (path!("dir/two.ts").to_string(), vec![14..18]),
5786 (path!("dir/one.rs").to_string(), vec![8..12]),
5787 (path!("dir/one.ts").to_string(), vec![14..18]),
5788 (path!("dir/two.rs").to_string(), vec![8..12]),
5789 ]),
5790 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5791 );
5792}
5793
5794#[gpui::test]
5795async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5796 init_test(cx);
5797
5798 let search_query = "file";
5799
5800 let fs = FakeFs::new(cx.executor());
5801 fs.insert_tree(
5802 path!("/dir"),
5803 json!({
5804 "one.rs": r#"// Rust file one"#,
5805 "one.ts": r#"// TypeScript file one"#,
5806 "two.rs": r#"// Rust file two"#,
5807 "two.ts": r#"// TypeScript file two"#,
5808 }),
5809 )
5810 .await;
5811 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5812
5813 assert_eq!(
5814 search(
5815 &project,
5816 SearchQuery::text(
5817 search_query,
5818 false,
5819 true,
5820 false,
5821 Default::default(),
5822 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5823 false,
5824 None,
5825 )
5826 .unwrap(),
5827 cx
5828 )
5829 .await
5830 .unwrap(),
5831 HashMap::from_iter([
5832 (path!("dir/one.rs").to_string(), vec![8..12]),
5833 (path!("dir/one.ts").to_string(), vec![14..18]),
5834 (path!("dir/two.rs").to_string(), vec![8..12]),
5835 (path!("dir/two.ts").to_string(), vec![14..18]),
5836 ]),
5837 "If no exclusions match, all files should be returned"
5838 );
5839
5840 assert_eq!(
5841 search(
5842 &project,
5843 SearchQuery::text(
5844 search_query,
5845 false,
5846 true,
5847 false,
5848 Default::default(),
5849 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5850 false,
5851 None,
5852 )
5853 .unwrap(),
5854 cx
5855 )
5856 .await
5857 .unwrap(),
5858 HashMap::from_iter([
5859 (path!("dir/one.ts").to_string(), vec![14..18]),
5860 (path!("dir/two.ts").to_string(), vec![14..18]),
5861 ]),
5862 "Rust exclusion search should give only TypeScript files"
5863 );
5864
5865 assert_eq!(
5866 search(
5867 &project,
5868 SearchQuery::text(
5869 search_query,
5870 false,
5871 true,
5872 false,
5873 Default::default(),
5874 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5875 .unwrap(),
5876 false,
5877 None,
5878 )
5879 .unwrap(),
5880 cx
5881 )
5882 .await
5883 .unwrap(),
5884 HashMap::from_iter([
5885 (path!("dir/one.rs").to_string(), vec![8..12]),
5886 (path!("dir/two.rs").to_string(), vec![8..12]),
5887 ]),
5888 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5889 );
5890
5891 assert!(
5892 search(
5893 &project,
5894 SearchQuery::text(
5895 search_query,
5896 false,
5897 true,
5898 false,
5899 Default::default(),
5900 PathMatcher::new(
5901 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5902 PathStyle::local(),
5903 )
5904 .unwrap(),
5905 false,
5906 None,
5907 )
5908 .unwrap(),
5909 cx
5910 )
5911 .await
5912 .unwrap()
5913 .is_empty(),
5914 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5915 );
5916}
5917
5918#[gpui::test]
5919async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5920 init_test(cx);
5921
5922 let search_query = "file";
5923
5924 let fs = FakeFs::new(cx.executor());
5925 fs.insert_tree(
5926 path!("/dir"),
5927 json!({
5928 "one.rs": r#"// Rust file one"#,
5929 "one.ts": r#"// TypeScript file one"#,
5930 "two.rs": r#"// Rust file two"#,
5931 "two.ts": r#"// TypeScript file two"#,
5932 }),
5933 )
5934 .await;
5935
5936 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5937 let path_style = PathStyle::local();
5938 let _buffer = project.update(cx, |project, cx| {
5939 project.create_local_buffer("file", None, false, cx)
5940 });
5941
5942 assert_eq!(
5943 search(
5944 &project,
5945 SearchQuery::text(
5946 search_query,
5947 false,
5948 true,
5949 false,
5950 Default::default(),
5951 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5952 false,
5953 None,
5954 )
5955 .unwrap(),
5956 cx
5957 )
5958 .await
5959 .unwrap(),
5960 HashMap::from_iter([
5961 (path!("dir/one.rs").to_string(), vec![8..12]),
5962 (path!("dir/one.ts").to_string(), vec![14..18]),
5963 (path!("dir/two.rs").to_string(), vec![8..12]),
5964 (path!("dir/two.ts").to_string(), vec![14..18]),
5965 ]),
5966 "If no exclusions match, all files should be returned"
5967 );
5968
5969 assert_eq!(
5970 search(
5971 &project,
5972 SearchQuery::text(
5973 search_query,
5974 false,
5975 true,
5976 false,
5977 Default::default(),
5978 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5979 false,
5980 None,
5981 )
5982 .unwrap(),
5983 cx
5984 )
5985 .await
5986 .unwrap(),
5987 HashMap::from_iter([
5988 (path!("dir/one.ts").to_string(), vec![14..18]),
5989 (path!("dir/two.ts").to_string(), vec![14..18]),
5990 ]),
5991 "Rust exclusion search should give only TypeScript files"
5992 );
5993
5994 assert_eq!(
5995 search(
5996 &project,
5997 SearchQuery::text(
5998 search_query,
5999 false,
6000 true,
6001 false,
6002 Default::default(),
6003 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
6004 false,
6005 None,
6006 )
6007 .unwrap(),
6008 cx
6009 )
6010 .await
6011 .unwrap(),
6012 HashMap::from_iter([
6013 (path!("dir/one.rs").to_string(), vec![8..12]),
6014 (path!("dir/two.rs").to_string(), vec![8..12]),
6015 ]),
6016 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
6017 );
6018
6019 assert!(
6020 search(
6021 &project,
6022 SearchQuery::text(
6023 search_query,
6024 false,
6025 true,
6026 false,
6027 Default::default(),
6028 PathMatcher::new(
6029 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
6030 PathStyle::local(),
6031 )
6032 .unwrap(),
6033 false,
6034 None,
6035 )
6036 .unwrap(),
6037 cx
6038 )
6039 .await
6040 .unwrap()
6041 .is_empty(),
6042 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
6043 );
6044}
6045
6046#[gpui::test]
6047async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
6048 init_test(cx);
6049
6050 let search_query = "file";
6051
6052 let fs = FakeFs::new(cx.executor());
6053 fs.insert_tree(
6054 path!("/dir"),
6055 json!({
6056 "one.rs": r#"// Rust file one"#,
6057 "one.ts": r#"// TypeScript file one"#,
6058 "two.rs": r#"// Rust file two"#,
6059 "two.ts": r#"// TypeScript file two"#,
6060 }),
6061 )
6062 .await;
6063 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6064 assert!(
6065 search(
6066 &project,
6067 SearchQuery::text(
6068 search_query,
6069 false,
6070 true,
6071 false,
6072 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6073 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6074 false,
6075 None,
6076 )
6077 .unwrap(),
6078 cx
6079 )
6080 .await
6081 .unwrap()
6082 .is_empty(),
6083 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6084 );
6085
6086 assert!(
6087 search(
6088 &project,
6089 SearchQuery::text(
6090 search_query,
6091 false,
6092 true,
6093 false,
6094 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6095 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6096 false,
6097 None,
6098 )
6099 .unwrap(),
6100 cx
6101 )
6102 .await
6103 .unwrap()
6104 .is_empty(),
6105 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6106 );
6107
6108 assert!(
6109 search(
6110 &project,
6111 SearchQuery::text(
6112 search_query,
6113 false,
6114 true,
6115 false,
6116 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6117 .unwrap(),
6118 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6119 .unwrap(),
6120 false,
6121 None,
6122 )
6123 .unwrap(),
6124 cx
6125 )
6126 .await
6127 .unwrap()
6128 .is_empty(),
6129 "Non-matching inclusions and exclusions should not change that."
6130 );
6131
6132 assert_eq!(
6133 search(
6134 &project,
6135 SearchQuery::text(
6136 search_query,
6137 false,
6138 true,
6139 false,
6140 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6141 .unwrap(),
6142 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6143 .unwrap(),
6144 false,
6145 None,
6146 )
6147 .unwrap(),
6148 cx
6149 )
6150 .await
6151 .unwrap(),
6152 HashMap::from_iter([
6153 (path!("dir/one.ts").to_string(), vec![14..18]),
6154 (path!("dir/two.ts").to_string(), vec![14..18]),
6155 ]),
6156 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6157 );
6158}
6159
6160#[gpui::test]
6161async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6162 init_test(cx);
6163
6164 let fs = FakeFs::new(cx.executor());
6165 fs.insert_tree(
6166 path!("/worktree-a"),
6167 json!({
6168 "haystack.rs": r#"// NEEDLE"#,
6169 "haystack.ts": r#"// NEEDLE"#,
6170 }),
6171 )
6172 .await;
6173 fs.insert_tree(
6174 path!("/worktree-b"),
6175 json!({
6176 "haystack.rs": r#"// NEEDLE"#,
6177 "haystack.ts": r#"// NEEDLE"#,
6178 }),
6179 )
6180 .await;
6181
6182 let path_style = PathStyle::local();
6183 let project = Project::test(
6184 fs.clone(),
6185 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6186 cx,
6187 )
6188 .await;
6189
6190 assert_eq!(
6191 search(
6192 &project,
6193 SearchQuery::text(
6194 "NEEDLE",
6195 false,
6196 true,
6197 false,
6198 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6199 Default::default(),
6200 true,
6201 None,
6202 )
6203 .unwrap(),
6204 cx
6205 )
6206 .await
6207 .unwrap(),
6208 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6209 "should only return results from included worktree"
6210 );
6211 assert_eq!(
6212 search(
6213 &project,
6214 SearchQuery::text(
6215 "NEEDLE",
6216 false,
6217 true,
6218 false,
6219 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6220 Default::default(),
6221 true,
6222 None,
6223 )
6224 .unwrap(),
6225 cx
6226 )
6227 .await
6228 .unwrap(),
6229 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6230 "should only return results from included worktree"
6231 );
6232
6233 assert_eq!(
6234 search(
6235 &project,
6236 SearchQuery::text(
6237 "NEEDLE",
6238 false,
6239 true,
6240 false,
6241 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6242 Default::default(),
6243 false,
6244 None,
6245 )
6246 .unwrap(),
6247 cx
6248 )
6249 .await
6250 .unwrap(),
6251 HashMap::from_iter([
6252 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6253 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6254 ]),
6255 "should return results from both worktrees"
6256 );
6257}
6258
6259#[gpui::test]
6260async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6261 init_test(cx);
6262
6263 let fs = FakeFs::new(cx.background_executor.clone());
6264 fs.insert_tree(
6265 path!("/dir"),
6266 json!({
6267 ".git": {},
6268 ".gitignore": "**/target\n/node_modules\n",
6269 "target": {
6270 "index.txt": "index_key:index_value"
6271 },
6272 "node_modules": {
6273 "eslint": {
6274 "index.ts": "const eslint_key = 'eslint value'",
6275 "package.json": r#"{ "some_key": "some value" }"#,
6276 },
6277 "prettier": {
6278 "index.ts": "const prettier_key = 'prettier value'",
6279 "package.json": r#"{ "other_key": "other value" }"#,
6280 },
6281 },
6282 "package.json": r#"{ "main_key": "main value" }"#,
6283 }),
6284 )
6285 .await;
6286 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6287
6288 let query = "key";
6289 assert_eq!(
6290 search(
6291 &project,
6292 SearchQuery::text(
6293 query,
6294 false,
6295 false,
6296 false,
6297 Default::default(),
6298 Default::default(),
6299 false,
6300 None,
6301 )
6302 .unwrap(),
6303 cx
6304 )
6305 .await
6306 .unwrap(),
6307 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6308 "Only one non-ignored file should have the query"
6309 );
6310
6311 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6312 let path_style = PathStyle::local();
6313 assert_eq!(
6314 search(
6315 &project,
6316 SearchQuery::text(
6317 query,
6318 false,
6319 false,
6320 true,
6321 Default::default(),
6322 Default::default(),
6323 false,
6324 None,
6325 )
6326 .unwrap(),
6327 cx
6328 )
6329 .await
6330 .unwrap(),
6331 HashMap::from_iter([
6332 (path!("dir/package.json").to_string(), vec![8..11]),
6333 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6334 (
6335 path!("dir/node_modules/prettier/package.json").to_string(),
6336 vec![9..12]
6337 ),
6338 (
6339 path!("dir/node_modules/prettier/index.ts").to_string(),
6340 vec![15..18]
6341 ),
6342 (
6343 path!("dir/node_modules/eslint/index.ts").to_string(),
6344 vec![13..16]
6345 ),
6346 (
6347 path!("dir/node_modules/eslint/package.json").to_string(),
6348 vec![8..11]
6349 ),
6350 ]),
6351 "Unrestricted search with ignored directories should find every file with the query"
6352 );
6353
6354 let files_to_include =
6355 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6356 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6357 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6358 assert_eq!(
6359 search(
6360 &project,
6361 SearchQuery::text(
6362 query,
6363 false,
6364 false,
6365 true,
6366 files_to_include,
6367 files_to_exclude,
6368 false,
6369 None,
6370 )
6371 .unwrap(),
6372 cx
6373 )
6374 .await
6375 .unwrap(),
6376 HashMap::from_iter([(
6377 path!("dir/node_modules/prettier/package.json").to_string(),
6378 vec![9..12]
6379 )]),
6380 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6381 );
6382}
6383
6384#[gpui::test]
6385async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6386 init_test(cx);
6387
6388 let fs = FakeFs::new(cx.executor());
6389 fs.insert_tree(
6390 path!("/dir"),
6391 json!({
6392 "one.rs": "// ПРИВЕТ? привет!",
6393 "two.rs": "// ПРИВЕТ.",
6394 "three.rs": "// привет",
6395 }),
6396 )
6397 .await;
6398 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6399 let unicode_case_sensitive_query = SearchQuery::text(
6400 "привет",
6401 false,
6402 true,
6403 false,
6404 Default::default(),
6405 Default::default(),
6406 false,
6407 None,
6408 );
6409 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6410 assert_eq!(
6411 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6412 .await
6413 .unwrap(),
6414 HashMap::from_iter([
6415 (path!("dir/one.rs").to_string(), vec![17..29]),
6416 (path!("dir/three.rs").to_string(), vec![3..15]),
6417 ])
6418 );
6419
6420 let unicode_case_insensitive_query = SearchQuery::text(
6421 "привет",
6422 false,
6423 false,
6424 false,
6425 Default::default(),
6426 Default::default(),
6427 false,
6428 None,
6429 );
6430 assert_matches!(
6431 unicode_case_insensitive_query,
6432 Ok(SearchQuery::Regex { .. })
6433 );
6434 assert_eq!(
6435 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6436 .await
6437 .unwrap(),
6438 HashMap::from_iter([
6439 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6440 (path!("dir/two.rs").to_string(), vec![3..15]),
6441 (path!("dir/three.rs").to_string(), vec![3..15]),
6442 ])
6443 );
6444
6445 assert_eq!(
6446 search(
6447 &project,
6448 SearchQuery::text(
6449 "привет.",
6450 false,
6451 false,
6452 false,
6453 Default::default(),
6454 Default::default(),
6455 false,
6456 None,
6457 )
6458 .unwrap(),
6459 cx
6460 )
6461 .await
6462 .unwrap(),
6463 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6464 );
6465}
6466
6467#[gpui::test]
6468async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6469 init_test(cx);
6470
6471 let fs = FakeFs::new(cx.executor());
6472 fs.insert_tree(
6473 "/one/two",
6474 json!({
6475 "three": {
6476 "a.txt": "",
6477 "four": {}
6478 },
6479 "c.rs": ""
6480 }),
6481 )
6482 .await;
6483
6484 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6485 project
6486 .update(cx, |project, cx| {
6487 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6488 project.create_entry((id, rel_path("b..")), true, cx)
6489 })
6490 .await
6491 .unwrap()
6492 .into_included()
6493 .unwrap();
6494
6495 assert_eq!(
6496 fs.paths(true),
6497 vec![
6498 PathBuf::from(path!("/")),
6499 PathBuf::from(path!("/one")),
6500 PathBuf::from(path!("/one/two")),
6501 PathBuf::from(path!("/one/two/c.rs")),
6502 PathBuf::from(path!("/one/two/three")),
6503 PathBuf::from(path!("/one/two/three/a.txt")),
6504 PathBuf::from(path!("/one/two/three/b..")),
6505 PathBuf::from(path!("/one/two/three/four")),
6506 ]
6507 );
6508}
6509
6510#[gpui::test]
6511async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6512 init_test(cx);
6513
6514 let fs = FakeFs::new(cx.executor());
6515 fs.insert_tree(
6516 path!("/dir"),
6517 json!({
6518 "a.tsx": "a",
6519 }),
6520 )
6521 .await;
6522
6523 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6524
6525 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6526 language_registry.add(tsx_lang());
6527 let language_server_names = [
6528 "TypeScriptServer",
6529 "TailwindServer",
6530 "ESLintServer",
6531 "NoHoverCapabilitiesServer",
6532 ];
6533 let mut language_servers = [
6534 language_registry.register_fake_lsp(
6535 "tsx",
6536 FakeLspAdapter {
6537 name: language_server_names[0],
6538 capabilities: lsp::ServerCapabilities {
6539 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6540 ..lsp::ServerCapabilities::default()
6541 },
6542 ..FakeLspAdapter::default()
6543 },
6544 ),
6545 language_registry.register_fake_lsp(
6546 "tsx",
6547 FakeLspAdapter {
6548 name: language_server_names[1],
6549 capabilities: lsp::ServerCapabilities {
6550 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6551 ..lsp::ServerCapabilities::default()
6552 },
6553 ..FakeLspAdapter::default()
6554 },
6555 ),
6556 language_registry.register_fake_lsp(
6557 "tsx",
6558 FakeLspAdapter {
6559 name: language_server_names[2],
6560 capabilities: lsp::ServerCapabilities {
6561 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6562 ..lsp::ServerCapabilities::default()
6563 },
6564 ..FakeLspAdapter::default()
6565 },
6566 ),
6567 language_registry.register_fake_lsp(
6568 "tsx",
6569 FakeLspAdapter {
6570 name: language_server_names[3],
6571 capabilities: lsp::ServerCapabilities {
6572 hover_provider: None,
6573 ..lsp::ServerCapabilities::default()
6574 },
6575 ..FakeLspAdapter::default()
6576 },
6577 ),
6578 ];
6579
6580 let (buffer, _handle) = project
6581 .update(cx, |p, cx| {
6582 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6583 })
6584 .await
6585 .unwrap();
6586 cx.executor().run_until_parked();
6587
6588 let mut servers_with_hover_requests = HashMap::default();
6589 for i in 0..language_server_names.len() {
6590 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6591 panic!(
6592 "Failed to get language server #{i} with name {}",
6593 &language_server_names[i]
6594 )
6595 });
6596 let new_server_name = new_server.server.name();
6597 assert!(
6598 !servers_with_hover_requests.contains_key(&new_server_name),
6599 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6600 );
6601 match new_server_name.as_ref() {
6602 "TailwindServer" | "TypeScriptServer" => {
6603 servers_with_hover_requests.insert(
6604 new_server_name.clone(),
6605 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6606 move |_, _| {
6607 let name = new_server_name.clone();
6608 async move {
6609 Ok(Some(lsp::Hover {
6610 contents: lsp::HoverContents::Scalar(
6611 lsp::MarkedString::String(format!("{name} hover")),
6612 ),
6613 range: None,
6614 }))
6615 }
6616 },
6617 ),
6618 );
6619 }
6620 "ESLintServer" => {
6621 servers_with_hover_requests.insert(
6622 new_server_name,
6623 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6624 |_, _| async move { Ok(None) },
6625 ),
6626 );
6627 }
6628 "NoHoverCapabilitiesServer" => {
6629 let _never_handled = new_server
6630 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6631 panic!(
6632 "Should not call for hovers server with no corresponding capabilities"
6633 )
6634 });
6635 }
6636 unexpected => panic!("Unexpected server name: {unexpected}"),
6637 }
6638 }
6639
6640 let hover_task = project.update(cx, |project, cx| {
6641 project.hover(&buffer, Point::new(0, 0), cx)
6642 });
6643 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6644 |mut hover_request| async move {
6645 hover_request
6646 .next()
6647 .await
6648 .expect("All hover requests should have been triggered")
6649 },
6650 ))
6651 .await;
6652 assert_eq!(
6653 vec!["TailwindServer hover", "TypeScriptServer hover"],
6654 hover_task
6655 .await
6656 .into_iter()
6657 .flatten()
6658 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6659 .sorted()
6660 .collect::<Vec<_>>(),
6661 "Should receive hover responses from all related servers with hover capabilities"
6662 );
6663}
6664
6665#[gpui::test]
6666async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6667 init_test(cx);
6668
6669 let fs = FakeFs::new(cx.executor());
6670 fs.insert_tree(
6671 path!("/dir"),
6672 json!({
6673 "a.ts": "a",
6674 }),
6675 )
6676 .await;
6677
6678 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6679
6680 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6681 language_registry.add(typescript_lang());
6682 let mut fake_language_servers = language_registry.register_fake_lsp(
6683 "TypeScript",
6684 FakeLspAdapter {
6685 capabilities: lsp::ServerCapabilities {
6686 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6687 ..lsp::ServerCapabilities::default()
6688 },
6689 ..FakeLspAdapter::default()
6690 },
6691 );
6692
6693 let (buffer, _handle) = project
6694 .update(cx, |p, cx| {
6695 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6696 })
6697 .await
6698 .unwrap();
6699 cx.executor().run_until_parked();
6700
6701 let fake_server = fake_language_servers
6702 .next()
6703 .await
6704 .expect("failed to get the language server");
6705
6706 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6707 move |_, _| async move {
6708 Ok(Some(lsp::Hover {
6709 contents: lsp::HoverContents::Array(vec![
6710 lsp::MarkedString::String("".to_string()),
6711 lsp::MarkedString::String(" ".to_string()),
6712 lsp::MarkedString::String("\n\n\n".to_string()),
6713 ]),
6714 range: None,
6715 }))
6716 },
6717 );
6718
6719 let hover_task = project.update(cx, |project, cx| {
6720 project.hover(&buffer, Point::new(0, 0), cx)
6721 });
6722 let () = request_handled
6723 .next()
6724 .await
6725 .expect("All hover requests should have been triggered");
6726 assert_eq!(
6727 Vec::<String>::new(),
6728 hover_task
6729 .await
6730 .into_iter()
6731 .flatten()
6732 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6733 .sorted()
6734 .collect::<Vec<_>>(),
6735 "Empty hover parts should be ignored"
6736 );
6737}
6738
6739#[gpui::test]
6740async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6741 init_test(cx);
6742
6743 let fs = FakeFs::new(cx.executor());
6744 fs.insert_tree(
6745 path!("/dir"),
6746 json!({
6747 "a.ts": "a",
6748 }),
6749 )
6750 .await;
6751
6752 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6753
6754 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6755 language_registry.add(typescript_lang());
6756 let mut fake_language_servers = language_registry.register_fake_lsp(
6757 "TypeScript",
6758 FakeLspAdapter {
6759 capabilities: lsp::ServerCapabilities {
6760 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6761 ..lsp::ServerCapabilities::default()
6762 },
6763 ..FakeLspAdapter::default()
6764 },
6765 );
6766
6767 let (buffer, _handle) = project
6768 .update(cx, |p, cx| {
6769 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6770 })
6771 .await
6772 .unwrap();
6773 cx.executor().run_until_parked();
6774
6775 let fake_server = fake_language_servers
6776 .next()
6777 .await
6778 .expect("failed to get the language server");
6779
6780 let mut request_handled = fake_server
6781 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6782 Ok(Some(vec![
6783 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6784 title: "organize imports".to_string(),
6785 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6786 ..lsp::CodeAction::default()
6787 }),
6788 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6789 title: "fix code".to_string(),
6790 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6791 ..lsp::CodeAction::default()
6792 }),
6793 ]))
6794 });
6795
6796 let code_actions_task = project.update(cx, |project, cx| {
6797 project.code_actions(
6798 &buffer,
6799 0..buffer.read(cx).len(),
6800 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6801 cx,
6802 )
6803 });
6804
6805 let () = request_handled
6806 .next()
6807 .await
6808 .expect("The code action request should have been triggered");
6809
6810 let code_actions = code_actions_task.await.unwrap().unwrap();
6811 assert_eq!(code_actions.len(), 1);
6812 assert_eq!(
6813 code_actions[0].lsp_action.action_kind(),
6814 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6815 );
6816}
6817
6818#[gpui::test]
6819async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6820 init_test(cx);
6821
6822 let fs = FakeFs::new(cx.executor());
6823 fs.insert_tree(
6824 path!("/dir"),
6825 json!({
6826 "a.tsx": "a",
6827 }),
6828 )
6829 .await;
6830
6831 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6832
6833 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6834 language_registry.add(tsx_lang());
6835 let language_server_names = [
6836 "TypeScriptServer",
6837 "TailwindServer",
6838 "ESLintServer",
6839 "NoActionsCapabilitiesServer",
6840 ];
6841
6842 let mut language_server_rxs = [
6843 language_registry.register_fake_lsp(
6844 "tsx",
6845 FakeLspAdapter {
6846 name: language_server_names[0],
6847 capabilities: lsp::ServerCapabilities {
6848 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6849 ..lsp::ServerCapabilities::default()
6850 },
6851 ..FakeLspAdapter::default()
6852 },
6853 ),
6854 language_registry.register_fake_lsp(
6855 "tsx",
6856 FakeLspAdapter {
6857 name: language_server_names[1],
6858 capabilities: lsp::ServerCapabilities {
6859 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6860 ..lsp::ServerCapabilities::default()
6861 },
6862 ..FakeLspAdapter::default()
6863 },
6864 ),
6865 language_registry.register_fake_lsp(
6866 "tsx",
6867 FakeLspAdapter {
6868 name: language_server_names[2],
6869 capabilities: lsp::ServerCapabilities {
6870 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6871 ..lsp::ServerCapabilities::default()
6872 },
6873 ..FakeLspAdapter::default()
6874 },
6875 ),
6876 language_registry.register_fake_lsp(
6877 "tsx",
6878 FakeLspAdapter {
6879 name: language_server_names[3],
6880 capabilities: lsp::ServerCapabilities {
6881 code_action_provider: None,
6882 ..lsp::ServerCapabilities::default()
6883 },
6884 ..FakeLspAdapter::default()
6885 },
6886 ),
6887 ];
6888
6889 let (buffer, _handle) = project
6890 .update(cx, |p, cx| {
6891 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6892 })
6893 .await
6894 .unwrap();
6895 cx.executor().run_until_parked();
6896
6897 let mut servers_with_actions_requests = HashMap::default();
6898 for i in 0..language_server_names.len() {
6899 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6900 panic!(
6901 "Failed to get language server #{i} with name {}",
6902 &language_server_names[i]
6903 )
6904 });
6905 let new_server_name = new_server.server.name();
6906
6907 assert!(
6908 !servers_with_actions_requests.contains_key(&new_server_name),
6909 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6910 );
6911 match new_server_name.0.as_ref() {
6912 "TailwindServer" | "TypeScriptServer" => {
6913 servers_with_actions_requests.insert(
6914 new_server_name.clone(),
6915 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6916 move |_, _| {
6917 let name = new_server_name.clone();
6918 async move {
6919 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6920 lsp::CodeAction {
6921 title: format!("{name} code action"),
6922 ..lsp::CodeAction::default()
6923 },
6924 )]))
6925 }
6926 },
6927 ),
6928 );
6929 }
6930 "ESLintServer" => {
6931 servers_with_actions_requests.insert(
6932 new_server_name,
6933 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6934 |_, _| async move { Ok(None) },
6935 ),
6936 );
6937 }
6938 "NoActionsCapabilitiesServer" => {
6939 let _never_handled = new_server
6940 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6941 panic!(
6942 "Should not call for code actions server with no corresponding capabilities"
6943 )
6944 });
6945 }
6946 unexpected => panic!("Unexpected server name: {unexpected}"),
6947 }
6948 }
6949
6950 let code_actions_task = project.update(cx, |project, cx| {
6951 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6952 });
6953
6954 // cx.run_until_parked();
6955 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6956 |mut code_actions_request| async move {
6957 code_actions_request
6958 .next()
6959 .await
6960 .expect("All code actions requests should have been triggered")
6961 },
6962 ))
6963 .await;
6964 assert_eq!(
6965 vec!["TailwindServer code action", "TypeScriptServer code action"],
6966 code_actions_task
6967 .await
6968 .unwrap()
6969 .unwrap()
6970 .into_iter()
6971 .map(|code_action| code_action.lsp_action.title().to_owned())
6972 .sorted()
6973 .collect::<Vec<_>>(),
6974 "Should receive code actions responses from all related servers with hover capabilities"
6975 );
6976}
6977
6978#[gpui::test]
6979async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6980 init_test(cx);
6981
6982 let fs = FakeFs::new(cx.executor());
6983 fs.insert_tree(
6984 "/dir",
6985 json!({
6986 "a.rs": "let a = 1;",
6987 "b.rs": "let b = 2;",
6988 "c.rs": "let c = 2;",
6989 }),
6990 )
6991 .await;
6992
6993 let project = Project::test(
6994 fs,
6995 [
6996 "/dir/a.rs".as_ref(),
6997 "/dir/b.rs".as_ref(),
6998 "/dir/c.rs".as_ref(),
6999 ],
7000 cx,
7001 )
7002 .await;
7003
7004 // check the initial state and get the worktrees
7005 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
7006 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7007 assert_eq!(worktrees.len(), 3);
7008
7009 let worktree_a = worktrees[0].read(cx);
7010 let worktree_b = worktrees[1].read(cx);
7011 let worktree_c = worktrees[2].read(cx);
7012
7013 // check they start in the right order
7014 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
7015 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
7016 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
7017
7018 (
7019 worktrees[0].clone(),
7020 worktrees[1].clone(),
7021 worktrees[2].clone(),
7022 )
7023 });
7024
7025 // move first worktree to after the second
7026 // [a, b, c] -> [b, a, c]
7027 project
7028 .update(cx, |project, cx| {
7029 let first = worktree_a.read(cx);
7030 let second = worktree_b.read(cx);
7031 project.move_worktree(first.id(), second.id(), cx)
7032 })
7033 .expect("moving first after second");
7034
7035 // check the state after moving
7036 project.update(cx, |project, cx| {
7037 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7038 assert_eq!(worktrees.len(), 3);
7039
7040 let first = worktrees[0].read(cx);
7041 let second = worktrees[1].read(cx);
7042 let third = worktrees[2].read(cx);
7043
7044 // check they are now in the right order
7045 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7046 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
7047 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7048 });
7049
7050 // move the second worktree to before the first
7051 // [b, a, c] -> [a, b, c]
7052 project
7053 .update(cx, |project, cx| {
7054 let second = worktree_a.read(cx);
7055 let first = worktree_b.read(cx);
7056 project.move_worktree(first.id(), second.id(), cx)
7057 })
7058 .expect("moving second before first");
7059
7060 // check the state after moving
7061 project.update(cx, |project, cx| {
7062 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7063 assert_eq!(worktrees.len(), 3);
7064
7065 let first = worktrees[0].read(cx);
7066 let second = worktrees[1].read(cx);
7067 let third = worktrees[2].read(cx);
7068
7069 // check they are now in the right order
7070 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7071 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7072 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7073 });
7074
7075 // move the second worktree to after the third
7076 // [a, b, c] -> [a, c, b]
7077 project
7078 .update(cx, |project, cx| {
7079 let second = worktree_b.read(cx);
7080 let third = worktree_c.read(cx);
7081 project.move_worktree(second.id(), third.id(), cx)
7082 })
7083 .expect("moving second after third");
7084
7085 // check the state after moving
7086 project.update(cx, |project, cx| {
7087 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7088 assert_eq!(worktrees.len(), 3);
7089
7090 let first = worktrees[0].read(cx);
7091 let second = worktrees[1].read(cx);
7092 let third = worktrees[2].read(cx);
7093
7094 // check they are now in the right order
7095 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7096 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7097 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7098 });
7099
7100 // move the third worktree to before the second
7101 // [a, c, b] -> [a, b, c]
7102 project
7103 .update(cx, |project, cx| {
7104 let third = worktree_c.read(cx);
7105 let second = worktree_b.read(cx);
7106 project.move_worktree(third.id(), second.id(), cx)
7107 })
7108 .expect("moving third before second");
7109
7110 // check the state after moving
7111 project.update(cx, |project, cx| {
7112 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7113 assert_eq!(worktrees.len(), 3);
7114
7115 let first = worktrees[0].read(cx);
7116 let second = worktrees[1].read(cx);
7117 let third = worktrees[2].read(cx);
7118
7119 // check they are now in the right order
7120 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7121 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7122 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7123 });
7124
7125 // move the first worktree to after the third
7126 // [a, b, c] -> [b, c, a]
7127 project
7128 .update(cx, |project, cx| {
7129 let first = worktree_a.read(cx);
7130 let third = worktree_c.read(cx);
7131 project.move_worktree(first.id(), third.id(), cx)
7132 })
7133 .expect("moving first after third");
7134
7135 // check the state after moving
7136 project.update(cx, |project, cx| {
7137 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7138 assert_eq!(worktrees.len(), 3);
7139
7140 let first = worktrees[0].read(cx);
7141 let second = worktrees[1].read(cx);
7142 let third = worktrees[2].read(cx);
7143
7144 // check they are now in the right order
7145 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7146 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7147 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7148 });
7149
7150 // move the third worktree to before the first
7151 // [b, c, a] -> [a, b, c]
7152 project
7153 .update(cx, |project, cx| {
7154 let third = worktree_a.read(cx);
7155 let first = worktree_b.read(cx);
7156 project.move_worktree(third.id(), first.id(), cx)
7157 })
7158 .expect("moving third before first");
7159
7160 // check the state after moving
7161 project.update(cx, |project, cx| {
7162 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7163 assert_eq!(worktrees.len(), 3);
7164
7165 let first = worktrees[0].read(cx);
7166 let second = worktrees[1].read(cx);
7167 let third = worktrees[2].read(cx);
7168
7169 // check they are now in the right order
7170 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7171 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7172 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7173 });
7174}
7175
7176#[gpui::test]
7177async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7178 init_test(cx);
7179
7180 let staged_contents = r#"
7181 fn main() {
7182 println!("hello world");
7183 }
7184 "#
7185 .unindent();
7186 let file_contents = r#"
7187 // print goodbye
7188 fn main() {
7189 println!("goodbye world");
7190 }
7191 "#
7192 .unindent();
7193
7194 let fs = FakeFs::new(cx.background_executor.clone());
7195 fs.insert_tree(
7196 "/dir",
7197 json!({
7198 ".git": {},
7199 "src": {
7200 "main.rs": file_contents,
7201 }
7202 }),
7203 )
7204 .await;
7205
7206 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7207
7208 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7209
7210 let buffer = project
7211 .update(cx, |project, cx| {
7212 project.open_local_buffer("/dir/src/main.rs", cx)
7213 })
7214 .await
7215 .unwrap();
7216 let unstaged_diff = project
7217 .update(cx, |project, cx| {
7218 project.open_unstaged_diff(buffer.clone(), cx)
7219 })
7220 .await
7221 .unwrap();
7222
7223 cx.run_until_parked();
7224 unstaged_diff.update(cx, |unstaged_diff, cx| {
7225 let snapshot = buffer.read(cx).snapshot();
7226 assert_hunks(
7227 unstaged_diff.snapshot(cx).hunks(&snapshot),
7228 &snapshot,
7229 &unstaged_diff.base_text_string(cx).unwrap(),
7230 &[
7231 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7232 (
7233 2..3,
7234 " println!(\"hello world\");\n",
7235 " println!(\"goodbye world\");\n",
7236 DiffHunkStatus::modified_none(),
7237 ),
7238 ],
7239 );
7240 });
7241
7242 let staged_contents = r#"
7243 // print goodbye
7244 fn main() {
7245 }
7246 "#
7247 .unindent();
7248
7249 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7250
7251 cx.run_until_parked();
7252 unstaged_diff.update(cx, |unstaged_diff, cx| {
7253 let snapshot = buffer.read(cx).snapshot();
7254 assert_hunks(
7255 unstaged_diff
7256 .snapshot(cx)
7257 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7258 &snapshot,
7259 &unstaged_diff.base_text(cx).text(),
7260 &[(
7261 2..3,
7262 "",
7263 " println!(\"goodbye world\");\n",
7264 DiffHunkStatus::added_none(),
7265 )],
7266 );
7267 });
7268}
7269
7270#[gpui::test]
7271async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7272 init_test(cx);
7273
7274 let committed_contents = r#"
7275 fn main() {
7276 println!("hello world");
7277 }
7278 "#
7279 .unindent();
7280 let staged_contents = r#"
7281 fn main() {
7282 println!("goodbye world");
7283 }
7284 "#
7285 .unindent();
7286 let file_contents = r#"
7287 // print goodbye
7288 fn main() {
7289 println!("goodbye world");
7290 }
7291 "#
7292 .unindent();
7293
7294 let fs = FakeFs::new(cx.background_executor.clone());
7295 fs.insert_tree(
7296 "/dir",
7297 json!({
7298 ".git": {},
7299 "src": {
7300 "modification.rs": file_contents,
7301 }
7302 }),
7303 )
7304 .await;
7305
7306 fs.set_head_for_repo(
7307 Path::new("/dir/.git"),
7308 &[
7309 ("src/modification.rs", committed_contents),
7310 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7311 ],
7312 "deadbeef",
7313 );
7314 fs.set_index_for_repo(
7315 Path::new("/dir/.git"),
7316 &[
7317 ("src/modification.rs", staged_contents),
7318 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7319 ],
7320 );
7321
7322 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7323 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7324 let language = rust_lang();
7325 language_registry.add(language.clone());
7326
7327 let buffer_1 = project
7328 .update(cx, |project, cx| {
7329 project.open_local_buffer("/dir/src/modification.rs", cx)
7330 })
7331 .await
7332 .unwrap();
7333 let diff_1 = project
7334 .update(cx, |project, cx| {
7335 project.open_uncommitted_diff(buffer_1.clone(), cx)
7336 })
7337 .await
7338 .unwrap();
7339 diff_1.read_with(cx, |diff, cx| {
7340 assert_eq!(diff.base_text(cx).language().cloned(), Some(language))
7341 });
7342 cx.run_until_parked();
7343 diff_1.update(cx, |diff, cx| {
7344 let snapshot = buffer_1.read(cx).snapshot();
7345 assert_hunks(
7346 diff.snapshot(cx)
7347 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7348 &snapshot,
7349 &diff.base_text_string(cx).unwrap(),
7350 &[
7351 (
7352 0..1,
7353 "",
7354 "// print goodbye\n",
7355 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7356 ),
7357 (
7358 2..3,
7359 " println!(\"hello world\");\n",
7360 " println!(\"goodbye world\");\n",
7361 DiffHunkStatus::modified_none(),
7362 ),
7363 ],
7364 );
7365 });
7366
7367 // Reset HEAD to a version that differs from both the buffer and the index.
7368 let committed_contents = r#"
7369 // print goodbye
7370 fn main() {
7371 }
7372 "#
7373 .unindent();
7374 fs.set_head_for_repo(
7375 Path::new("/dir/.git"),
7376 &[
7377 ("src/modification.rs", committed_contents.clone()),
7378 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7379 ],
7380 "deadbeef",
7381 );
7382
7383 // Buffer now has an unstaged hunk.
7384 cx.run_until_parked();
7385 diff_1.update(cx, |diff, cx| {
7386 let snapshot = buffer_1.read(cx).snapshot();
7387 assert_hunks(
7388 diff.snapshot(cx)
7389 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7390 &snapshot,
7391 &diff.base_text(cx).text(),
7392 &[(
7393 2..3,
7394 "",
7395 " println!(\"goodbye world\");\n",
7396 DiffHunkStatus::added_none(),
7397 )],
7398 );
7399 });
7400
7401 // Open a buffer for a file that's been deleted.
7402 let buffer_2 = project
7403 .update(cx, |project, cx| {
7404 project.open_local_buffer("/dir/src/deletion.rs", cx)
7405 })
7406 .await
7407 .unwrap();
7408 let diff_2 = project
7409 .update(cx, |project, cx| {
7410 project.open_uncommitted_diff(buffer_2.clone(), cx)
7411 })
7412 .await
7413 .unwrap();
7414 cx.run_until_parked();
7415 diff_2.update(cx, |diff, cx| {
7416 let snapshot = buffer_2.read(cx).snapshot();
7417 assert_hunks(
7418 diff.snapshot(cx)
7419 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7420 &snapshot,
7421 &diff.base_text_string(cx).unwrap(),
7422 &[(
7423 0..0,
7424 "// the-deleted-contents\n",
7425 "",
7426 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7427 )],
7428 );
7429 });
7430
7431 // Stage the deletion of this file
7432 fs.set_index_for_repo(
7433 Path::new("/dir/.git"),
7434 &[("src/modification.rs", committed_contents.clone())],
7435 );
7436 cx.run_until_parked();
7437 diff_2.update(cx, |diff, cx| {
7438 let snapshot = buffer_2.read(cx).snapshot();
7439 assert_hunks(
7440 diff.snapshot(cx)
7441 .hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot),
7442 &snapshot,
7443 &diff.base_text_string(cx).unwrap(),
7444 &[(
7445 0..0,
7446 "// the-deleted-contents\n",
7447 "",
7448 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7449 )],
7450 );
7451 });
7452}
7453
7454#[gpui::test]
7455async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7456 use DiffHunkSecondaryStatus::*;
7457 init_test(cx);
7458
7459 let committed_contents = r#"
7460 zero
7461 one
7462 two
7463 three
7464 four
7465 five
7466 "#
7467 .unindent();
7468 let file_contents = r#"
7469 one
7470 TWO
7471 three
7472 FOUR
7473 five
7474 "#
7475 .unindent();
7476
7477 let fs = FakeFs::new(cx.background_executor.clone());
7478 fs.insert_tree(
7479 "/dir",
7480 json!({
7481 ".git": {},
7482 "file.txt": file_contents.clone()
7483 }),
7484 )
7485 .await;
7486
7487 fs.set_head_and_index_for_repo(
7488 path!("/dir/.git").as_ref(),
7489 &[("file.txt", committed_contents.clone())],
7490 );
7491
7492 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7493
7494 let buffer = project
7495 .update(cx, |project, cx| {
7496 project.open_local_buffer("/dir/file.txt", cx)
7497 })
7498 .await
7499 .unwrap();
7500 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7501 let uncommitted_diff = project
7502 .update(cx, |project, cx| {
7503 project.open_uncommitted_diff(buffer.clone(), cx)
7504 })
7505 .await
7506 .unwrap();
7507 let mut diff_events = cx.events(&uncommitted_diff);
7508
7509 // The hunks are initially unstaged.
7510 uncommitted_diff.read_with(cx, |diff, cx| {
7511 assert_hunks(
7512 diff.snapshot(cx).hunks(&snapshot),
7513 &snapshot,
7514 &diff.base_text_string(cx).unwrap(),
7515 &[
7516 (
7517 0..0,
7518 "zero\n",
7519 "",
7520 DiffHunkStatus::deleted(HasSecondaryHunk),
7521 ),
7522 (
7523 1..2,
7524 "two\n",
7525 "TWO\n",
7526 DiffHunkStatus::modified(HasSecondaryHunk),
7527 ),
7528 (
7529 3..4,
7530 "four\n",
7531 "FOUR\n",
7532 DiffHunkStatus::modified(HasSecondaryHunk),
7533 ),
7534 ],
7535 );
7536 });
7537
7538 // Stage a hunk. It appears as optimistically staged.
7539 uncommitted_diff.update(cx, |diff, cx| {
7540 let range =
7541 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7542 let hunks = diff
7543 .snapshot(cx)
7544 .hunks_intersecting_range(range, &snapshot)
7545 .collect::<Vec<_>>();
7546 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7547
7548 assert_hunks(
7549 diff.snapshot(cx).hunks(&snapshot),
7550 &snapshot,
7551 &diff.base_text_string(cx).unwrap(),
7552 &[
7553 (
7554 0..0,
7555 "zero\n",
7556 "",
7557 DiffHunkStatus::deleted(HasSecondaryHunk),
7558 ),
7559 (
7560 1..2,
7561 "two\n",
7562 "TWO\n",
7563 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7564 ),
7565 (
7566 3..4,
7567 "four\n",
7568 "FOUR\n",
7569 DiffHunkStatus::modified(HasSecondaryHunk),
7570 ),
7571 ],
7572 );
7573 });
7574
7575 // The diff emits a change event for the range of the staged hunk.
7576 assert!(matches!(
7577 diff_events.next().await.unwrap(),
7578 BufferDiffEvent::HunksStagedOrUnstaged(_)
7579 ));
7580 let event = diff_events.next().await.unwrap();
7581 if let BufferDiffEvent::DiffChanged {
7582 changed_range: Some(changed_range),
7583 base_text_changed_range: _,
7584 } = event
7585 {
7586 let changed_range = changed_range.to_point(&snapshot);
7587 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7588 } else {
7589 panic!("Unexpected event {event:?}");
7590 }
7591
7592 // When the write to the index completes, it appears as staged.
7593 cx.run_until_parked();
7594 uncommitted_diff.update(cx, |diff, cx| {
7595 assert_hunks(
7596 diff.snapshot(cx).hunks(&snapshot),
7597 &snapshot,
7598 &diff.base_text_string(cx).unwrap(),
7599 &[
7600 (
7601 0..0,
7602 "zero\n",
7603 "",
7604 DiffHunkStatus::deleted(HasSecondaryHunk),
7605 ),
7606 (
7607 1..2,
7608 "two\n",
7609 "TWO\n",
7610 DiffHunkStatus::modified(NoSecondaryHunk),
7611 ),
7612 (
7613 3..4,
7614 "four\n",
7615 "FOUR\n",
7616 DiffHunkStatus::modified(HasSecondaryHunk),
7617 ),
7618 ],
7619 );
7620 });
7621
7622 // The diff emits a change event for the changed index text.
7623 let event = diff_events.next().await.unwrap();
7624 if let BufferDiffEvent::DiffChanged {
7625 changed_range: Some(changed_range),
7626 base_text_changed_range: _,
7627 } = event
7628 {
7629 let changed_range = changed_range.to_point(&snapshot);
7630 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7631 } else {
7632 panic!("Unexpected event {event:?}");
7633 }
7634
7635 // Simulate a problem writing to the git index.
7636 fs.set_error_message_for_index_write(
7637 "/dir/.git".as_ref(),
7638 Some("failed to write git index".into()),
7639 );
7640
7641 // Stage another hunk.
7642 uncommitted_diff.update(cx, |diff, cx| {
7643 let range =
7644 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7645 let hunks = diff
7646 .snapshot(cx)
7647 .hunks_intersecting_range(range, &snapshot)
7648 .collect::<Vec<_>>();
7649 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7650
7651 assert_hunks(
7652 diff.snapshot(cx).hunks(&snapshot),
7653 &snapshot,
7654 &diff.base_text_string(cx).unwrap(),
7655 &[
7656 (
7657 0..0,
7658 "zero\n",
7659 "",
7660 DiffHunkStatus::deleted(HasSecondaryHunk),
7661 ),
7662 (
7663 1..2,
7664 "two\n",
7665 "TWO\n",
7666 DiffHunkStatus::modified(NoSecondaryHunk),
7667 ),
7668 (
7669 3..4,
7670 "four\n",
7671 "FOUR\n",
7672 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7673 ),
7674 ],
7675 );
7676 });
7677 assert!(matches!(
7678 diff_events.next().await.unwrap(),
7679 BufferDiffEvent::HunksStagedOrUnstaged(_)
7680 ));
7681 let event = diff_events.next().await.unwrap();
7682 if let BufferDiffEvent::DiffChanged {
7683 changed_range: Some(changed_range),
7684 base_text_changed_range: _,
7685 } = event
7686 {
7687 let changed_range = changed_range.to_point(&snapshot);
7688 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7689 } else {
7690 panic!("Unexpected event {event:?}");
7691 }
7692
7693 // When the write fails, the hunk returns to being unstaged.
7694 cx.run_until_parked();
7695 uncommitted_diff.update(cx, |diff, cx| {
7696 assert_hunks(
7697 diff.snapshot(cx).hunks(&snapshot),
7698 &snapshot,
7699 &diff.base_text_string(cx).unwrap(),
7700 &[
7701 (
7702 0..0,
7703 "zero\n",
7704 "",
7705 DiffHunkStatus::deleted(HasSecondaryHunk),
7706 ),
7707 (
7708 1..2,
7709 "two\n",
7710 "TWO\n",
7711 DiffHunkStatus::modified(NoSecondaryHunk),
7712 ),
7713 (
7714 3..4,
7715 "four\n",
7716 "FOUR\n",
7717 DiffHunkStatus::modified(HasSecondaryHunk),
7718 ),
7719 ],
7720 );
7721 });
7722
7723 let event = diff_events.next().await.unwrap();
7724 if let BufferDiffEvent::DiffChanged {
7725 changed_range: Some(changed_range),
7726 base_text_changed_range: _,
7727 } = event
7728 {
7729 let changed_range = changed_range.to_point(&snapshot);
7730 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7731 } else {
7732 panic!("Unexpected event {event:?}");
7733 }
7734
7735 // Allow writing to the git index to succeed again.
7736 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7737
7738 // Stage two hunks with separate operations.
7739 uncommitted_diff.update(cx, |diff, cx| {
7740 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
7741 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7742 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7743 });
7744
7745 // Both staged hunks appear as pending.
7746 uncommitted_diff.update(cx, |diff, cx| {
7747 assert_hunks(
7748 diff.snapshot(cx).hunks(&snapshot),
7749 &snapshot,
7750 &diff.base_text_string(cx).unwrap(),
7751 &[
7752 (
7753 0..0,
7754 "zero\n",
7755 "",
7756 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7757 ),
7758 (
7759 1..2,
7760 "two\n",
7761 "TWO\n",
7762 DiffHunkStatus::modified(NoSecondaryHunk),
7763 ),
7764 (
7765 3..4,
7766 "four\n",
7767 "FOUR\n",
7768 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7769 ),
7770 ],
7771 );
7772 });
7773
7774 // Both staging operations take effect.
7775 cx.run_until_parked();
7776 uncommitted_diff.update(cx, |diff, cx| {
7777 assert_hunks(
7778 diff.snapshot(cx).hunks(&snapshot),
7779 &snapshot,
7780 &diff.base_text_string(cx).unwrap(),
7781 &[
7782 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7783 (
7784 1..2,
7785 "two\n",
7786 "TWO\n",
7787 DiffHunkStatus::modified(NoSecondaryHunk),
7788 ),
7789 (
7790 3..4,
7791 "four\n",
7792 "FOUR\n",
7793 DiffHunkStatus::modified(NoSecondaryHunk),
7794 ),
7795 ],
7796 );
7797 });
7798}
7799
7800#[gpui::test(seeds(340, 472))]
7801async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7802 use DiffHunkSecondaryStatus::*;
7803 init_test(cx);
7804
7805 let committed_contents = r#"
7806 zero
7807 one
7808 two
7809 three
7810 four
7811 five
7812 "#
7813 .unindent();
7814 let file_contents = r#"
7815 one
7816 TWO
7817 three
7818 FOUR
7819 five
7820 "#
7821 .unindent();
7822
7823 let fs = FakeFs::new(cx.background_executor.clone());
7824 fs.insert_tree(
7825 "/dir",
7826 json!({
7827 ".git": {},
7828 "file.txt": file_contents.clone()
7829 }),
7830 )
7831 .await;
7832
7833 fs.set_head_for_repo(
7834 "/dir/.git".as_ref(),
7835 &[("file.txt", committed_contents.clone())],
7836 "deadbeef",
7837 );
7838 fs.set_index_for_repo(
7839 "/dir/.git".as_ref(),
7840 &[("file.txt", committed_contents.clone())],
7841 );
7842
7843 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7844
7845 let buffer = project
7846 .update(cx, |project, cx| {
7847 project.open_local_buffer("/dir/file.txt", cx)
7848 })
7849 .await
7850 .unwrap();
7851 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7852 let uncommitted_diff = project
7853 .update(cx, |project, cx| {
7854 project.open_uncommitted_diff(buffer.clone(), cx)
7855 })
7856 .await
7857 .unwrap();
7858
7859 // The hunks are initially unstaged.
7860 uncommitted_diff.read_with(cx, |diff, cx| {
7861 assert_hunks(
7862 diff.snapshot(cx).hunks(&snapshot),
7863 &snapshot,
7864 &diff.base_text_string(cx).unwrap(),
7865 &[
7866 (
7867 0..0,
7868 "zero\n",
7869 "",
7870 DiffHunkStatus::deleted(HasSecondaryHunk),
7871 ),
7872 (
7873 1..2,
7874 "two\n",
7875 "TWO\n",
7876 DiffHunkStatus::modified(HasSecondaryHunk),
7877 ),
7878 (
7879 3..4,
7880 "four\n",
7881 "FOUR\n",
7882 DiffHunkStatus::modified(HasSecondaryHunk),
7883 ),
7884 ],
7885 );
7886 });
7887
7888 // Pause IO events
7889 fs.pause_events();
7890
7891 // Stage the first hunk.
7892 uncommitted_diff.update(cx, |diff, cx| {
7893 let hunk = diff.snapshot(cx).hunks(&snapshot).next().unwrap();
7894 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7895 assert_hunks(
7896 diff.snapshot(cx).hunks(&snapshot),
7897 &snapshot,
7898 &diff.base_text_string(cx).unwrap(),
7899 &[
7900 (
7901 0..0,
7902 "zero\n",
7903 "",
7904 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7905 ),
7906 (
7907 1..2,
7908 "two\n",
7909 "TWO\n",
7910 DiffHunkStatus::modified(HasSecondaryHunk),
7911 ),
7912 (
7913 3..4,
7914 "four\n",
7915 "FOUR\n",
7916 DiffHunkStatus::modified(HasSecondaryHunk),
7917 ),
7918 ],
7919 );
7920 });
7921
7922 // Stage the second hunk *before* receiving the FS event for the first hunk.
7923 cx.run_until_parked();
7924 uncommitted_diff.update(cx, |diff, cx| {
7925 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(1).unwrap();
7926 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7927 assert_hunks(
7928 diff.snapshot(cx).hunks(&snapshot),
7929 &snapshot,
7930 &diff.base_text_string(cx).unwrap(),
7931 &[
7932 (
7933 0..0,
7934 "zero\n",
7935 "",
7936 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7937 ),
7938 (
7939 1..2,
7940 "two\n",
7941 "TWO\n",
7942 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7943 ),
7944 (
7945 3..4,
7946 "four\n",
7947 "FOUR\n",
7948 DiffHunkStatus::modified(HasSecondaryHunk),
7949 ),
7950 ],
7951 );
7952 });
7953
7954 // Process the FS event for staging the first hunk (second event is still pending).
7955 fs.flush_events(1);
7956 cx.run_until_parked();
7957
7958 // Stage the third hunk before receiving the second FS event.
7959 uncommitted_diff.update(cx, |diff, cx| {
7960 let hunk = diff.snapshot(cx).hunks(&snapshot).nth(2).unwrap();
7961 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7962 });
7963
7964 // Wait for all remaining IO.
7965 cx.run_until_parked();
7966 fs.flush_events(fs.buffered_event_count());
7967
7968 // Now all hunks are staged.
7969 cx.run_until_parked();
7970 uncommitted_diff.update(cx, |diff, cx| {
7971 assert_hunks(
7972 diff.snapshot(cx).hunks(&snapshot),
7973 &snapshot,
7974 &diff.base_text_string(cx).unwrap(),
7975 &[
7976 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7977 (
7978 1..2,
7979 "two\n",
7980 "TWO\n",
7981 DiffHunkStatus::modified(NoSecondaryHunk),
7982 ),
7983 (
7984 3..4,
7985 "four\n",
7986 "FOUR\n",
7987 DiffHunkStatus::modified(NoSecondaryHunk),
7988 ),
7989 ],
7990 );
7991 });
7992}
7993
7994#[gpui::test(iterations = 25)]
7995async fn test_staging_random_hunks(
7996 mut rng: StdRng,
7997 executor: BackgroundExecutor,
7998 cx: &mut gpui::TestAppContext,
7999) {
8000 let operations = env::var("OPERATIONS")
8001 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
8002 .unwrap_or(20);
8003
8004 // Try to induce races between diff recalculation and index writes.
8005 if rng.random_bool(0.5) {
8006 executor.deprioritize(*CALCULATE_DIFF_TASK);
8007 }
8008
8009 use DiffHunkSecondaryStatus::*;
8010 init_test(cx);
8011
8012 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
8013 let index_text = committed_text.clone();
8014 let buffer_text = (0..30)
8015 .map(|i| match i % 5 {
8016 0 => format!("line {i} (modified)\n"),
8017 _ => format!("line {i}\n"),
8018 })
8019 .collect::<String>();
8020
8021 let fs = FakeFs::new(cx.background_executor.clone());
8022 fs.insert_tree(
8023 path!("/dir"),
8024 json!({
8025 ".git": {},
8026 "file.txt": buffer_text.clone()
8027 }),
8028 )
8029 .await;
8030 fs.set_head_for_repo(
8031 path!("/dir/.git").as_ref(),
8032 &[("file.txt", committed_text.clone())],
8033 "deadbeef",
8034 );
8035 fs.set_index_for_repo(
8036 path!("/dir/.git").as_ref(),
8037 &[("file.txt", index_text.clone())],
8038 );
8039 let repo = fs
8040 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
8041 .unwrap();
8042
8043 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
8044 let buffer = project
8045 .update(cx, |project, cx| {
8046 project.open_local_buffer(path!("/dir/file.txt"), cx)
8047 })
8048 .await
8049 .unwrap();
8050 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8051 let uncommitted_diff = project
8052 .update(cx, |project, cx| {
8053 project.open_uncommitted_diff(buffer.clone(), cx)
8054 })
8055 .await
8056 .unwrap();
8057
8058 let mut hunks = uncommitted_diff.update(cx, |diff, cx| {
8059 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
8060 });
8061 assert_eq!(hunks.len(), 6);
8062
8063 for _i in 0..operations {
8064 let hunk_ix = rng.random_range(0..hunks.len());
8065 let hunk = &mut hunks[hunk_ix];
8066 let row = hunk.range.start.row;
8067
8068 if hunk.status().has_secondary_hunk() {
8069 log::info!("staging hunk at {row}");
8070 uncommitted_diff.update(cx, |diff, cx| {
8071 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
8072 });
8073 hunk.secondary_status = SecondaryHunkRemovalPending;
8074 } else {
8075 log::info!("unstaging hunk at {row}");
8076 uncommitted_diff.update(cx, |diff, cx| {
8077 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8078 });
8079 hunk.secondary_status = SecondaryHunkAdditionPending;
8080 }
8081
8082 for _ in 0..rng.random_range(0..10) {
8083 log::info!("yielding");
8084 cx.executor().simulate_random_delay().await;
8085 }
8086 }
8087
8088 cx.executor().run_until_parked();
8089
8090 for hunk in &mut hunks {
8091 if hunk.secondary_status == SecondaryHunkRemovalPending {
8092 hunk.secondary_status = NoSecondaryHunk;
8093 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8094 hunk.secondary_status = HasSecondaryHunk;
8095 }
8096 }
8097
8098 log::info!(
8099 "index text:\n{}",
8100 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8101 .await
8102 .unwrap()
8103 );
8104
8105 uncommitted_diff.update(cx, |diff, cx| {
8106 let expected_hunks = hunks
8107 .iter()
8108 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8109 .collect::<Vec<_>>();
8110 let actual_hunks = diff
8111 .snapshot(cx)
8112 .hunks(&snapshot)
8113 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8114 .collect::<Vec<_>>();
8115 assert_eq!(actual_hunks, expected_hunks);
8116 });
8117}
8118
8119#[gpui::test]
8120async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8121 init_test(cx);
8122
8123 let committed_contents = r#"
8124 fn main() {
8125 println!("hello from HEAD");
8126 }
8127 "#
8128 .unindent();
8129 let file_contents = r#"
8130 fn main() {
8131 println!("hello from the working copy");
8132 }
8133 "#
8134 .unindent();
8135
8136 let fs = FakeFs::new(cx.background_executor.clone());
8137 fs.insert_tree(
8138 "/dir",
8139 json!({
8140 ".git": {},
8141 "src": {
8142 "main.rs": file_contents,
8143 }
8144 }),
8145 )
8146 .await;
8147
8148 fs.set_head_for_repo(
8149 Path::new("/dir/.git"),
8150 &[("src/main.rs", committed_contents.clone())],
8151 "deadbeef",
8152 );
8153 fs.set_index_for_repo(
8154 Path::new("/dir/.git"),
8155 &[("src/main.rs", committed_contents.clone())],
8156 );
8157
8158 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8159
8160 let buffer = project
8161 .update(cx, |project, cx| {
8162 project.open_local_buffer("/dir/src/main.rs", cx)
8163 })
8164 .await
8165 .unwrap();
8166 let uncommitted_diff = project
8167 .update(cx, |project, cx| {
8168 project.open_uncommitted_diff(buffer.clone(), cx)
8169 })
8170 .await
8171 .unwrap();
8172
8173 cx.run_until_parked();
8174 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8175 let snapshot = buffer.read(cx).snapshot();
8176 assert_hunks(
8177 uncommitted_diff.snapshot(cx).hunks(&snapshot),
8178 &snapshot,
8179 &uncommitted_diff.base_text_string(cx).unwrap(),
8180 &[(
8181 1..2,
8182 " println!(\"hello from HEAD\");\n",
8183 " println!(\"hello from the working copy\");\n",
8184 DiffHunkStatus {
8185 kind: DiffHunkStatusKind::Modified,
8186 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8187 },
8188 )],
8189 );
8190 });
8191}
8192
8193// TODO: Should we test this on Windows also?
8194#[gpui::test]
8195#[cfg(not(windows))]
8196async fn test_staging_hunk_preserve_executable_permission(cx: &mut gpui::TestAppContext) {
8197 use std::os::unix::fs::PermissionsExt;
8198 init_test(cx);
8199 cx.executor().allow_parking();
8200 let committed_contents = "bar\n";
8201 let file_contents = "baz\n";
8202 let root = TempTree::new(json!({
8203 "project": {
8204 "foo": committed_contents
8205 },
8206 }));
8207
8208 let work_dir = root.path().join("project");
8209 let file_path = work_dir.join("foo");
8210 let repo = git_init(work_dir.as_path());
8211 let mut perms = std::fs::metadata(&file_path).unwrap().permissions();
8212 perms.set_mode(0o755);
8213 std::fs::set_permissions(&file_path, perms).unwrap();
8214 git_add("foo", &repo);
8215 git_commit("Initial commit", &repo);
8216 std::fs::write(&file_path, file_contents).unwrap();
8217
8218 let project = Project::test(
8219 Arc::new(RealFs::new(None, cx.executor())),
8220 [root.path()],
8221 cx,
8222 )
8223 .await;
8224
8225 let buffer = project
8226 .update(cx, |project, cx| {
8227 project.open_local_buffer(file_path.as_path(), cx)
8228 })
8229 .await
8230 .unwrap();
8231
8232 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
8233
8234 let uncommitted_diff = project
8235 .update(cx, |project, cx| {
8236 project.open_uncommitted_diff(buffer.clone(), cx)
8237 })
8238 .await
8239 .unwrap();
8240
8241 uncommitted_diff.update(cx, |diff, cx| {
8242 let hunks = diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>();
8243 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
8244 });
8245
8246 cx.run_until_parked();
8247
8248 let output = smol::process::Command::new("git")
8249 .current_dir(&work_dir)
8250 .args(["diff", "--staged"])
8251 .output()
8252 .await
8253 .unwrap();
8254
8255 let staged_diff = String::from_utf8_lossy(&output.stdout);
8256
8257 assert!(
8258 !staged_diff.contains("new mode 100644"),
8259 "Staging should not change file mode from 755 to 644.\ngit diff --staged:\n{}",
8260 staged_diff
8261 );
8262
8263 let output = smol::process::Command::new("git")
8264 .current_dir(&work_dir)
8265 .args(["ls-files", "-s"])
8266 .output()
8267 .await
8268 .unwrap();
8269 let index_contents = String::from_utf8_lossy(&output.stdout);
8270
8271 assert!(
8272 index_contents.contains("100755"),
8273 "Index should show file as executable (100755).\ngit ls-files -s:\n{}",
8274 index_contents
8275 );
8276}
8277
8278#[gpui::test]
8279async fn test_repository_and_path_for_project_path(
8280 background_executor: BackgroundExecutor,
8281 cx: &mut gpui::TestAppContext,
8282) {
8283 init_test(cx);
8284 let fs = FakeFs::new(background_executor);
8285 fs.insert_tree(
8286 path!("/root"),
8287 json!({
8288 "c.txt": "",
8289 "dir1": {
8290 ".git": {},
8291 "deps": {
8292 "dep1": {
8293 ".git": {},
8294 "src": {
8295 "a.txt": ""
8296 }
8297 }
8298 },
8299 "src": {
8300 "b.txt": ""
8301 }
8302 },
8303 }),
8304 )
8305 .await;
8306
8307 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8308 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8309 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8310 project
8311 .update(cx, |project, cx| project.git_scans_complete(cx))
8312 .await;
8313 cx.run_until_parked();
8314
8315 project.read_with(cx, |project, cx| {
8316 let git_store = project.git_store().read(cx);
8317 let pairs = [
8318 ("c.txt", None),
8319 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8320 (
8321 "dir1/deps/dep1/src/a.txt",
8322 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8323 ),
8324 ];
8325 let expected = pairs
8326 .iter()
8327 .map(|(path, result)| {
8328 (
8329 path,
8330 result.map(|(repo, repo_path)| {
8331 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8332 }),
8333 )
8334 })
8335 .collect::<Vec<_>>();
8336 let actual = pairs
8337 .iter()
8338 .map(|(path, _)| {
8339 let project_path = (tree_id, rel_path(path)).into();
8340 let result = maybe!({
8341 let (repo, repo_path) =
8342 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8343 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8344 });
8345 (path, result)
8346 })
8347 .collect::<Vec<_>>();
8348 pretty_assertions::assert_eq!(expected, actual);
8349 });
8350
8351 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8352 .await
8353 .unwrap();
8354 cx.run_until_parked();
8355
8356 project.read_with(cx, |project, cx| {
8357 let git_store = project.git_store().read(cx);
8358 assert_eq!(
8359 git_store.repository_and_path_for_project_path(
8360 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8361 cx
8362 ),
8363 None
8364 );
8365 });
8366}
8367
8368#[gpui::test]
8369async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8370 init_test(cx);
8371 let fs = FakeFs::new(cx.background_executor.clone());
8372 let home = paths::home_dir();
8373 fs.insert_tree(
8374 home,
8375 json!({
8376 ".git": {},
8377 "project": {
8378 "a.txt": "A"
8379 },
8380 }),
8381 )
8382 .await;
8383
8384 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8385 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8386 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8387
8388 project
8389 .update(cx, |project, cx| project.git_scans_complete(cx))
8390 .await;
8391 tree.flush_fs_events(cx).await;
8392
8393 project.read_with(cx, |project, cx| {
8394 let containing = project
8395 .git_store()
8396 .read(cx)
8397 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8398 assert!(containing.is_none());
8399 });
8400
8401 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8402 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8403 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8404 project
8405 .update(cx, |project, cx| project.git_scans_complete(cx))
8406 .await;
8407 tree.flush_fs_events(cx).await;
8408
8409 project.read_with(cx, |project, cx| {
8410 let containing = project
8411 .git_store()
8412 .read(cx)
8413 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8414 assert_eq!(
8415 containing
8416 .unwrap()
8417 .0
8418 .read(cx)
8419 .work_directory_abs_path
8420 .as_ref(),
8421 home,
8422 );
8423 });
8424}
8425
8426#[gpui::test]
8427async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8428 init_test(cx);
8429 cx.executor().allow_parking();
8430
8431 let root = TempTree::new(json!({
8432 "project": {
8433 "a.txt": "a", // Modified
8434 "b.txt": "bb", // Added
8435 "c.txt": "ccc", // Unchanged
8436 "d.txt": "dddd", // Deleted
8437 },
8438 }));
8439
8440 // Set up git repository before creating the project.
8441 let work_dir = root.path().join("project");
8442 let repo = git_init(work_dir.as_path());
8443 git_add("a.txt", &repo);
8444 git_add("c.txt", &repo);
8445 git_add("d.txt", &repo);
8446 git_commit("Initial commit", &repo);
8447 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8448 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8449
8450 let project = Project::test(
8451 Arc::new(RealFs::new(None, cx.executor())),
8452 [root.path()],
8453 cx,
8454 )
8455 .await;
8456
8457 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8458 tree.flush_fs_events(cx).await;
8459 project
8460 .update(cx, |project, cx| project.git_scans_complete(cx))
8461 .await;
8462 cx.executor().run_until_parked();
8463
8464 let repository = project.read_with(cx, |project, cx| {
8465 project.repositories(cx).values().next().unwrap().clone()
8466 });
8467
8468 // Check that the right git state is observed on startup
8469 repository.read_with(cx, |repository, _| {
8470 let entries = repository.cached_status().collect::<Vec<_>>();
8471 assert_eq!(
8472 entries,
8473 [
8474 StatusEntry {
8475 repo_path: repo_path("a.txt"),
8476 status: StatusCode::Modified.worktree(),
8477 },
8478 StatusEntry {
8479 repo_path: repo_path("b.txt"),
8480 status: FileStatus::Untracked,
8481 },
8482 StatusEntry {
8483 repo_path: repo_path("d.txt"),
8484 status: StatusCode::Deleted.worktree(),
8485 },
8486 ]
8487 );
8488 });
8489
8490 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8491
8492 tree.flush_fs_events(cx).await;
8493 project
8494 .update(cx, |project, cx| project.git_scans_complete(cx))
8495 .await;
8496 cx.executor().run_until_parked();
8497
8498 repository.read_with(cx, |repository, _| {
8499 let entries = repository.cached_status().collect::<Vec<_>>();
8500 assert_eq!(
8501 entries,
8502 [
8503 StatusEntry {
8504 repo_path: repo_path("a.txt"),
8505 status: StatusCode::Modified.worktree(),
8506 },
8507 StatusEntry {
8508 repo_path: repo_path("b.txt"),
8509 status: FileStatus::Untracked,
8510 },
8511 StatusEntry {
8512 repo_path: repo_path("c.txt"),
8513 status: StatusCode::Modified.worktree(),
8514 },
8515 StatusEntry {
8516 repo_path: repo_path("d.txt"),
8517 status: StatusCode::Deleted.worktree(),
8518 },
8519 ]
8520 );
8521 });
8522
8523 git_add("a.txt", &repo);
8524 git_add("c.txt", &repo);
8525 git_remove_index(Path::new("d.txt"), &repo);
8526 git_commit("Another commit", &repo);
8527 tree.flush_fs_events(cx).await;
8528 project
8529 .update(cx, |project, cx| project.git_scans_complete(cx))
8530 .await;
8531 cx.executor().run_until_parked();
8532
8533 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8534 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8535 tree.flush_fs_events(cx).await;
8536 project
8537 .update(cx, |project, cx| project.git_scans_complete(cx))
8538 .await;
8539 cx.executor().run_until_parked();
8540
8541 repository.read_with(cx, |repository, _cx| {
8542 let entries = repository.cached_status().collect::<Vec<_>>();
8543
8544 // Deleting an untracked entry, b.txt, should leave no status
8545 // a.txt was tracked, and so should have a status
8546 assert_eq!(
8547 entries,
8548 [StatusEntry {
8549 repo_path: repo_path("a.txt"),
8550 status: StatusCode::Deleted.worktree(),
8551 }]
8552 );
8553 });
8554}
8555
8556#[gpui::test]
8557#[ignore]
8558async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8559 init_test(cx);
8560 cx.executor().allow_parking();
8561
8562 let root = TempTree::new(json!({
8563 "project": {
8564 "sub": {},
8565 "a.txt": "",
8566 },
8567 }));
8568
8569 let work_dir = root.path().join("project");
8570 let repo = git_init(work_dir.as_path());
8571 // a.txt exists in HEAD and the working copy but is deleted in the index.
8572 git_add("a.txt", &repo);
8573 git_commit("Initial commit", &repo);
8574 git_remove_index("a.txt".as_ref(), &repo);
8575 // `sub` is a nested git repository.
8576 let _sub = git_init(&work_dir.join("sub"));
8577
8578 let project = Project::test(
8579 Arc::new(RealFs::new(None, cx.executor())),
8580 [root.path()],
8581 cx,
8582 )
8583 .await;
8584
8585 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8586 tree.flush_fs_events(cx).await;
8587 project
8588 .update(cx, |project, cx| project.git_scans_complete(cx))
8589 .await;
8590 cx.executor().run_until_parked();
8591
8592 let repository = project.read_with(cx, |project, cx| {
8593 project
8594 .repositories(cx)
8595 .values()
8596 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8597 .unwrap()
8598 .clone()
8599 });
8600
8601 repository.read_with(cx, |repository, _cx| {
8602 let entries = repository.cached_status().collect::<Vec<_>>();
8603
8604 // `sub` doesn't appear in our computed statuses.
8605 // a.txt appears with a combined `DA` status.
8606 assert_eq!(
8607 entries,
8608 [StatusEntry {
8609 repo_path: repo_path("a.txt"),
8610 status: TrackedStatus {
8611 index_status: StatusCode::Deleted,
8612 worktree_status: StatusCode::Added
8613 }
8614 .into(),
8615 }]
8616 )
8617 });
8618}
8619
8620#[track_caller]
8621/// We merge lhs into rhs.
8622fn merge_pending_ops_snapshots(
8623 source: Vec<pending_op::PendingOps>,
8624 mut target: Vec<pending_op::PendingOps>,
8625) -> Vec<pending_op::PendingOps> {
8626 for s_ops in source {
8627 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8628 if ops.repo_path == s_ops.repo_path {
8629 Some(idx)
8630 } else {
8631 None
8632 }
8633 }) {
8634 let t_ops = &mut target[idx];
8635 for s_op in s_ops.ops {
8636 if let Some(op_idx) = t_ops
8637 .ops
8638 .iter()
8639 .zip(0..)
8640 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8641 {
8642 let t_op = &mut t_ops.ops[op_idx];
8643 match (s_op.job_status, t_op.job_status) {
8644 (pending_op::JobStatus::Running, _) => {}
8645 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8646 (s_st, t_st) if s_st == t_st => {}
8647 _ => unreachable!(),
8648 }
8649 } else {
8650 t_ops.ops.push(s_op);
8651 }
8652 }
8653 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8654 } else {
8655 target.push(s_ops);
8656 }
8657 }
8658 target
8659}
8660
8661#[gpui::test]
8662async fn test_repository_pending_ops_staging(
8663 executor: gpui::BackgroundExecutor,
8664 cx: &mut gpui::TestAppContext,
8665) {
8666 init_test(cx);
8667
8668 let fs = FakeFs::new(executor);
8669 fs.insert_tree(
8670 path!("/root"),
8671 json!({
8672 "my-repo": {
8673 ".git": {},
8674 "a.txt": "a",
8675 }
8676
8677 }),
8678 )
8679 .await;
8680
8681 fs.set_status_for_repo(
8682 path!("/root/my-repo/.git").as_ref(),
8683 &[("a.txt", FileStatus::Untracked)],
8684 );
8685
8686 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8687 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8688 project.update(cx, |project, cx| {
8689 let pending_ops_all = pending_ops_all.clone();
8690 cx.subscribe(project.git_store(), move |_, _, e, _| {
8691 if let GitStoreEvent::RepositoryUpdated(
8692 _,
8693 RepositoryEvent::PendingOpsChanged { pending_ops },
8694 _,
8695 ) = e
8696 {
8697 let merged = merge_pending_ops_snapshots(
8698 pending_ops.items(()),
8699 pending_ops_all.lock().items(()),
8700 );
8701 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8702 }
8703 })
8704 .detach();
8705 });
8706 project
8707 .update(cx, |project, cx| project.git_scans_complete(cx))
8708 .await;
8709
8710 let repo = project.read_with(cx, |project, cx| {
8711 project.repositories(cx).values().next().unwrap().clone()
8712 });
8713
8714 // Ensure we have no pending ops for any of the untracked files
8715 repo.read_with(cx, |repo, _cx| {
8716 assert!(repo.pending_ops().next().is_none());
8717 });
8718
8719 let mut id = 1u16;
8720
8721 let mut assert_stage = async |path: RepoPath, stage| {
8722 let git_status = if stage {
8723 pending_op::GitStatus::Staged
8724 } else {
8725 pending_op::GitStatus::Unstaged
8726 };
8727 repo.update(cx, |repo, cx| {
8728 let task = if stage {
8729 repo.stage_entries(vec![path.clone()], cx)
8730 } else {
8731 repo.unstage_entries(vec![path.clone()], cx)
8732 };
8733 let ops = repo.pending_ops_for_path(&path).unwrap();
8734 assert_eq!(
8735 ops.ops.last(),
8736 Some(&pending_op::PendingOp {
8737 id: id.into(),
8738 git_status,
8739 job_status: pending_op::JobStatus::Running
8740 })
8741 );
8742 task
8743 })
8744 .await
8745 .unwrap();
8746
8747 repo.read_with(cx, |repo, _cx| {
8748 let ops = repo.pending_ops_for_path(&path).unwrap();
8749 assert_eq!(
8750 ops.ops.last(),
8751 Some(&pending_op::PendingOp {
8752 id: id.into(),
8753 git_status,
8754 job_status: pending_op::JobStatus::Finished
8755 })
8756 );
8757 });
8758
8759 id += 1;
8760 };
8761
8762 assert_stage(repo_path("a.txt"), true).await;
8763 assert_stage(repo_path("a.txt"), false).await;
8764 assert_stage(repo_path("a.txt"), true).await;
8765 assert_stage(repo_path("a.txt"), false).await;
8766 assert_stage(repo_path("a.txt"), true).await;
8767
8768 cx.run_until_parked();
8769
8770 assert_eq!(
8771 pending_ops_all
8772 .lock()
8773 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8774 .unwrap()
8775 .ops,
8776 vec![
8777 pending_op::PendingOp {
8778 id: 1u16.into(),
8779 git_status: pending_op::GitStatus::Staged,
8780 job_status: pending_op::JobStatus::Finished
8781 },
8782 pending_op::PendingOp {
8783 id: 2u16.into(),
8784 git_status: pending_op::GitStatus::Unstaged,
8785 job_status: pending_op::JobStatus::Finished
8786 },
8787 pending_op::PendingOp {
8788 id: 3u16.into(),
8789 git_status: pending_op::GitStatus::Staged,
8790 job_status: pending_op::JobStatus::Finished
8791 },
8792 pending_op::PendingOp {
8793 id: 4u16.into(),
8794 git_status: pending_op::GitStatus::Unstaged,
8795 job_status: pending_op::JobStatus::Finished
8796 },
8797 pending_op::PendingOp {
8798 id: 5u16.into(),
8799 git_status: pending_op::GitStatus::Staged,
8800 job_status: pending_op::JobStatus::Finished
8801 }
8802 ],
8803 );
8804
8805 repo.update(cx, |repo, _cx| {
8806 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8807
8808 assert_eq!(
8809 git_statuses,
8810 [StatusEntry {
8811 repo_path: repo_path("a.txt"),
8812 status: TrackedStatus {
8813 index_status: StatusCode::Added,
8814 worktree_status: StatusCode::Unmodified
8815 }
8816 .into(),
8817 }]
8818 );
8819 });
8820}
8821
8822#[gpui::test]
8823async fn test_repository_pending_ops_long_running_staging(
8824 executor: gpui::BackgroundExecutor,
8825 cx: &mut gpui::TestAppContext,
8826) {
8827 init_test(cx);
8828
8829 let fs = FakeFs::new(executor);
8830 fs.insert_tree(
8831 path!("/root"),
8832 json!({
8833 "my-repo": {
8834 ".git": {},
8835 "a.txt": "a",
8836 }
8837
8838 }),
8839 )
8840 .await;
8841
8842 fs.set_status_for_repo(
8843 path!("/root/my-repo/.git").as_ref(),
8844 &[("a.txt", FileStatus::Untracked)],
8845 );
8846
8847 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8848 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8849 project.update(cx, |project, cx| {
8850 let pending_ops_all = pending_ops_all.clone();
8851 cx.subscribe(project.git_store(), move |_, _, e, _| {
8852 if let GitStoreEvent::RepositoryUpdated(
8853 _,
8854 RepositoryEvent::PendingOpsChanged { pending_ops },
8855 _,
8856 ) = e
8857 {
8858 let merged = merge_pending_ops_snapshots(
8859 pending_ops.items(()),
8860 pending_ops_all.lock().items(()),
8861 );
8862 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8863 }
8864 })
8865 .detach();
8866 });
8867
8868 project
8869 .update(cx, |project, cx| project.git_scans_complete(cx))
8870 .await;
8871
8872 let repo = project.read_with(cx, |project, cx| {
8873 project.repositories(cx).values().next().unwrap().clone()
8874 });
8875
8876 repo.update(cx, |repo, cx| {
8877 repo.stage_entries(vec![repo_path("a.txt")], cx)
8878 })
8879 .detach();
8880
8881 repo.update(cx, |repo, cx| {
8882 repo.stage_entries(vec![repo_path("a.txt")], cx)
8883 })
8884 .unwrap()
8885 .with_timeout(Duration::from_secs(1), &cx.executor())
8886 .await
8887 .unwrap();
8888
8889 cx.run_until_parked();
8890
8891 assert_eq!(
8892 pending_ops_all
8893 .lock()
8894 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8895 .unwrap()
8896 .ops,
8897 vec![
8898 pending_op::PendingOp {
8899 id: 1u16.into(),
8900 git_status: pending_op::GitStatus::Staged,
8901 job_status: pending_op::JobStatus::Skipped
8902 },
8903 pending_op::PendingOp {
8904 id: 2u16.into(),
8905 git_status: pending_op::GitStatus::Staged,
8906 job_status: pending_op::JobStatus::Finished
8907 }
8908 ],
8909 );
8910
8911 repo.update(cx, |repo, _cx| {
8912 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8913
8914 assert_eq!(
8915 git_statuses,
8916 [StatusEntry {
8917 repo_path: repo_path("a.txt"),
8918 status: TrackedStatus {
8919 index_status: StatusCode::Added,
8920 worktree_status: StatusCode::Unmodified
8921 }
8922 .into(),
8923 }]
8924 );
8925 });
8926}
8927
8928#[gpui::test]
8929async fn test_repository_pending_ops_stage_all(
8930 executor: gpui::BackgroundExecutor,
8931 cx: &mut gpui::TestAppContext,
8932) {
8933 init_test(cx);
8934
8935 let fs = FakeFs::new(executor);
8936 fs.insert_tree(
8937 path!("/root"),
8938 json!({
8939 "my-repo": {
8940 ".git": {},
8941 "a.txt": "a",
8942 "b.txt": "b"
8943 }
8944
8945 }),
8946 )
8947 .await;
8948
8949 fs.set_status_for_repo(
8950 path!("/root/my-repo/.git").as_ref(),
8951 &[
8952 ("a.txt", FileStatus::Untracked),
8953 ("b.txt", FileStatus::Untracked),
8954 ],
8955 );
8956
8957 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8958 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8959 project.update(cx, |project, cx| {
8960 let pending_ops_all = pending_ops_all.clone();
8961 cx.subscribe(project.git_store(), move |_, _, e, _| {
8962 if let GitStoreEvent::RepositoryUpdated(
8963 _,
8964 RepositoryEvent::PendingOpsChanged { pending_ops },
8965 _,
8966 ) = e
8967 {
8968 let merged = merge_pending_ops_snapshots(
8969 pending_ops.items(()),
8970 pending_ops_all.lock().items(()),
8971 );
8972 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8973 }
8974 })
8975 .detach();
8976 });
8977 project
8978 .update(cx, |project, cx| project.git_scans_complete(cx))
8979 .await;
8980
8981 let repo = project.read_with(cx, |project, cx| {
8982 project.repositories(cx).values().next().unwrap().clone()
8983 });
8984
8985 repo.update(cx, |repo, cx| {
8986 repo.stage_entries(vec![repo_path("a.txt")], cx)
8987 })
8988 .await
8989 .unwrap();
8990 repo.update(cx, |repo, cx| repo.stage_all(cx))
8991 .await
8992 .unwrap();
8993 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8994 .await
8995 .unwrap();
8996
8997 cx.run_until_parked();
8998
8999 assert_eq!(
9000 pending_ops_all
9001 .lock()
9002 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
9003 .unwrap()
9004 .ops,
9005 vec![
9006 pending_op::PendingOp {
9007 id: 1u16.into(),
9008 git_status: pending_op::GitStatus::Staged,
9009 job_status: pending_op::JobStatus::Finished
9010 },
9011 pending_op::PendingOp {
9012 id: 2u16.into(),
9013 git_status: pending_op::GitStatus::Unstaged,
9014 job_status: pending_op::JobStatus::Finished
9015 },
9016 ],
9017 );
9018 assert_eq!(
9019 pending_ops_all
9020 .lock()
9021 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
9022 .unwrap()
9023 .ops,
9024 vec![
9025 pending_op::PendingOp {
9026 id: 1u16.into(),
9027 git_status: pending_op::GitStatus::Staged,
9028 job_status: pending_op::JobStatus::Finished
9029 },
9030 pending_op::PendingOp {
9031 id: 2u16.into(),
9032 git_status: pending_op::GitStatus::Unstaged,
9033 job_status: pending_op::JobStatus::Finished
9034 },
9035 ],
9036 );
9037
9038 repo.update(cx, |repo, _cx| {
9039 let git_statuses = repo.cached_status().collect::<Vec<_>>();
9040
9041 assert_eq!(
9042 git_statuses,
9043 [
9044 StatusEntry {
9045 repo_path: repo_path("a.txt"),
9046 status: FileStatus::Untracked,
9047 },
9048 StatusEntry {
9049 repo_path: repo_path("b.txt"),
9050 status: FileStatus::Untracked,
9051 },
9052 ]
9053 );
9054 });
9055}
9056
9057#[gpui::test]
9058async fn test_repository_subfolder_git_status(
9059 executor: gpui::BackgroundExecutor,
9060 cx: &mut gpui::TestAppContext,
9061) {
9062 init_test(cx);
9063
9064 let fs = FakeFs::new(executor);
9065 fs.insert_tree(
9066 path!("/root"),
9067 json!({
9068 "my-repo": {
9069 ".git": {},
9070 "a.txt": "a",
9071 "sub-folder-1": {
9072 "sub-folder-2": {
9073 "c.txt": "cc",
9074 "d": {
9075 "e.txt": "eee"
9076 }
9077 },
9078 }
9079 },
9080 }),
9081 )
9082 .await;
9083
9084 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
9085 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
9086
9087 fs.set_status_for_repo(
9088 path!("/root/my-repo/.git").as_ref(),
9089 &[(E_TXT, FileStatus::Untracked)],
9090 );
9091
9092 let project = Project::test(
9093 fs.clone(),
9094 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
9095 cx,
9096 )
9097 .await;
9098
9099 project
9100 .update(cx, |project, cx| project.git_scans_complete(cx))
9101 .await;
9102 cx.run_until_parked();
9103
9104 let repository = project.read_with(cx, |project, cx| {
9105 project.repositories(cx).values().next().unwrap().clone()
9106 });
9107
9108 // Ensure that the git status is loaded correctly
9109 repository.read_with(cx, |repository, _cx| {
9110 assert_eq!(
9111 repository.work_directory_abs_path,
9112 Path::new(path!("/root/my-repo")).into()
9113 );
9114
9115 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9116 assert_eq!(
9117 repository
9118 .status_for_path(&repo_path(E_TXT))
9119 .unwrap()
9120 .status,
9121 FileStatus::Untracked
9122 );
9123 });
9124
9125 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
9126 project
9127 .update(cx, |project, cx| project.git_scans_complete(cx))
9128 .await;
9129 cx.run_until_parked();
9130
9131 repository.read_with(cx, |repository, _cx| {
9132 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
9133 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
9134 });
9135}
9136
9137// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
9138#[cfg(any())]
9139#[gpui::test]
9140async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
9141 init_test(cx);
9142 cx.executor().allow_parking();
9143
9144 let root = TempTree::new(json!({
9145 "project": {
9146 "a.txt": "a",
9147 },
9148 }));
9149 let root_path = root.path();
9150
9151 let repo = git_init(&root_path.join("project"));
9152 git_add("a.txt", &repo);
9153 git_commit("init", &repo);
9154
9155 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9156
9157 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9158 tree.flush_fs_events(cx).await;
9159 project
9160 .update(cx, |project, cx| project.git_scans_complete(cx))
9161 .await;
9162 cx.executor().run_until_parked();
9163
9164 let repository = project.read_with(cx, |project, cx| {
9165 project.repositories(cx).values().next().unwrap().clone()
9166 });
9167
9168 git_branch("other-branch", &repo);
9169 git_checkout("refs/heads/other-branch", &repo);
9170 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9171 git_add("a.txt", &repo);
9172 git_commit("capitalize", &repo);
9173 let commit = repo
9174 .head()
9175 .expect("Failed to get HEAD")
9176 .peel_to_commit()
9177 .expect("HEAD is not a commit");
9178 git_checkout("refs/heads/main", &repo);
9179 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9180 git_add("a.txt", &repo);
9181 git_commit("improve letter", &repo);
9182 git_cherry_pick(&commit, &repo);
9183 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9184 .expect("No CHERRY_PICK_HEAD");
9185 pretty_assertions::assert_eq!(
9186 git_status(&repo),
9187 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9188 );
9189 tree.flush_fs_events(cx).await;
9190 project
9191 .update(cx, |project, cx| project.git_scans_complete(cx))
9192 .await;
9193 cx.executor().run_until_parked();
9194 let conflicts = repository.update(cx, |repository, _| {
9195 repository
9196 .merge_conflicts
9197 .iter()
9198 .cloned()
9199 .collect::<Vec<_>>()
9200 });
9201 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9202
9203 git_add("a.txt", &repo);
9204 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9205 git_commit("whatevs", &repo);
9206 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9207 .expect("Failed to remove CHERRY_PICK_HEAD");
9208 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9209 tree.flush_fs_events(cx).await;
9210 let conflicts = repository.update(cx, |repository, _| {
9211 repository
9212 .merge_conflicts
9213 .iter()
9214 .cloned()
9215 .collect::<Vec<_>>()
9216 });
9217 pretty_assertions::assert_eq!(conflicts, []);
9218}
9219
9220#[gpui::test]
9221async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9222 init_test(cx);
9223 let fs = FakeFs::new(cx.background_executor.clone());
9224 fs.insert_tree(
9225 path!("/root"),
9226 json!({
9227 ".git": {},
9228 ".gitignore": "*.txt\n",
9229 "a.xml": "<a></a>",
9230 "b.txt": "Some text"
9231 }),
9232 )
9233 .await;
9234
9235 fs.set_head_and_index_for_repo(
9236 path!("/root/.git").as_ref(),
9237 &[
9238 (".gitignore", "*.txt\n".into()),
9239 ("a.xml", "<a></a>".into()),
9240 ],
9241 );
9242
9243 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9244
9245 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9246 tree.flush_fs_events(cx).await;
9247 project
9248 .update(cx, |project, cx| project.git_scans_complete(cx))
9249 .await;
9250 cx.executor().run_until_parked();
9251
9252 let repository = project.read_with(cx, |project, cx| {
9253 project.repositories(cx).values().next().unwrap().clone()
9254 });
9255
9256 // One file is unmodified, the other is ignored.
9257 cx.read(|cx| {
9258 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9259 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9260 });
9261
9262 // Change the gitignore, and stage the newly non-ignored file.
9263 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9264 .await
9265 .unwrap();
9266 fs.set_index_for_repo(
9267 Path::new(path!("/root/.git")),
9268 &[
9269 (".gitignore", "*.txt\n".into()),
9270 ("a.xml", "<a></a>".into()),
9271 ("b.txt", "Some text".into()),
9272 ],
9273 );
9274
9275 cx.executor().run_until_parked();
9276 cx.read(|cx| {
9277 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9278 assert_entry_git_state(
9279 tree.read(cx),
9280 repository.read(cx),
9281 "b.txt",
9282 Some(StatusCode::Added),
9283 false,
9284 );
9285 });
9286}
9287
9288// NOTE:
9289// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9290// a directory which some program has already open.
9291// This is a limitation of the Windows.
9292// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9293// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9294#[gpui::test]
9295#[cfg_attr(target_os = "windows", ignore)]
9296async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9297 init_test(cx);
9298 cx.executor().allow_parking();
9299 let root = TempTree::new(json!({
9300 "projects": {
9301 "project1": {
9302 "a": "",
9303 "b": "",
9304 }
9305 },
9306
9307 }));
9308 let root_path = root.path();
9309
9310 let repo = git_init(&root_path.join("projects/project1"));
9311 git_add("a", &repo);
9312 git_commit("init", &repo);
9313 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9314
9315 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9316
9317 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9318 tree.flush_fs_events(cx).await;
9319 project
9320 .update(cx, |project, cx| project.git_scans_complete(cx))
9321 .await;
9322 cx.executor().run_until_parked();
9323
9324 let repository = project.read_with(cx, |project, cx| {
9325 project.repositories(cx).values().next().unwrap().clone()
9326 });
9327
9328 repository.read_with(cx, |repository, _| {
9329 assert_eq!(
9330 repository.work_directory_abs_path.as_ref(),
9331 root_path.join("projects/project1").as_path()
9332 );
9333 assert_eq!(
9334 repository
9335 .status_for_path(&repo_path("a"))
9336 .map(|entry| entry.status),
9337 Some(StatusCode::Modified.worktree()),
9338 );
9339 assert_eq!(
9340 repository
9341 .status_for_path(&repo_path("b"))
9342 .map(|entry| entry.status),
9343 Some(FileStatus::Untracked),
9344 );
9345 });
9346
9347 std::fs::rename(
9348 root_path.join("projects/project1"),
9349 root_path.join("projects/project2"),
9350 )
9351 .unwrap();
9352 tree.flush_fs_events(cx).await;
9353
9354 repository.read_with(cx, |repository, _| {
9355 assert_eq!(
9356 repository.work_directory_abs_path.as_ref(),
9357 root_path.join("projects/project2").as_path()
9358 );
9359 assert_eq!(
9360 repository.status_for_path(&repo_path("a")).unwrap().status,
9361 StatusCode::Modified.worktree(),
9362 );
9363 assert_eq!(
9364 repository.status_for_path(&repo_path("b")).unwrap().status,
9365 FileStatus::Untracked,
9366 );
9367 });
9368}
9369
9370// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9371// you can't rename a directory which some program has already open. This is a
9372// limitation of the Windows. See:
9373// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9374// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9375#[gpui::test]
9376#[cfg_attr(target_os = "windows", ignore)]
9377async fn test_file_status(cx: &mut gpui::TestAppContext) {
9378 init_test(cx);
9379 cx.executor().allow_parking();
9380 const IGNORE_RULE: &str = "**/target";
9381
9382 let root = TempTree::new(json!({
9383 "project": {
9384 "a.txt": "a",
9385 "b.txt": "bb",
9386 "c": {
9387 "d": {
9388 "e.txt": "eee"
9389 }
9390 },
9391 "f.txt": "ffff",
9392 "target": {
9393 "build_file": "???"
9394 },
9395 ".gitignore": IGNORE_RULE
9396 },
9397
9398 }));
9399 let root_path = root.path();
9400
9401 const A_TXT: &str = "a.txt";
9402 const B_TXT: &str = "b.txt";
9403 const E_TXT: &str = "c/d/e.txt";
9404 const F_TXT: &str = "f.txt";
9405 const DOTGITIGNORE: &str = ".gitignore";
9406 const BUILD_FILE: &str = "target/build_file";
9407
9408 // Set up git repository before creating the worktree.
9409 let work_dir = root.path().join("project");
9410 let mut repo = git_init(work_dir.as_path());
9411 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9412 git_add(A_TXT, &repo);
9413 git_add(E_TXT, &repo);
9414 git_add(DOTGITIGNORE, &repo);
9415 git_commit("Initial commit", &repo);
9416
9417 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9418
9419 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9420 tree.flush_fs_events(cx).await;
9421 project
9422 .update(cx, |project, cx| project.git_scans_complete(cx))
9423 .await;
9424 cx.executor().run_until_parked();
9425
9426 let repository = project.read_with(cx, |project, cx| {
9427 project.repositories(cx).values().next().unwrap().clone()
9428 });
9429
9430 // Check that the right git state is observed on startup
9431 repository.read_with(cx, |repository, _cx| {
9432 assert_eq!(
9433 repository.work_directory_abs_path.as_ref(),
9434 root_path.join("project").as_path()
9435 );
9436
9437 assert_eq!(
9438 repository
9439 .status_for_path(&repo_path(B_TXT))
9440 .unwrap()
9441 .status,
9442 FileStatus::Untracked,
9443 );
9444 assert_eq!(
9445 repository
9446 .status_for_path(&repo_path(F_TXT))
9447 .unwrap()
9448 .status,
9449 FileStatus::Untracked,
9450 );
9451 });
9452
9453 // Modify a file in the working copy.
9454 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9455 tree.flush_fs_events(cx).await;
9456 project
9457 .update(cx, |project, cx| project.git_scans_complete(cx))
9458 .await;
9459 cx.executor().run_until_parked();
9460
9461 // The worktree detects that the file's git status has changed.
9462 repository.read_with(cx, |repository, _| {
9463 assert_eq!(
9464 repository
9465 .status_for_path(&repo_path(A_TXT))
9466 .unwrap()
9467 .status,
9468 StatusCode::Modified.worktree(),
9469 );
9470 });
9471
9472 // Create a commit in the git repository.
9473 git_add(A_TXT, &repo);
9474 git_add(B_TXT, &repo);
9475 git_commit("Committing modified and added", &repo);
9476 tree.flush_fs_events(cx).await;
9477 project
9478 .update(cx, |project, cx| project.git_scans_complete(cx))
9479 .await;
9480 cx.executor().run_until_parked();
9481
9482 // The worktree detects that the files' git status have changed.
9483 repository.read_with(cx, |repository, _cx| {
9484 assert_eq!(
9485 repository
9486 .status_for_path(&repo_path(F_TXT))
9487 .unwrap()
9488 .status,
9489 FileStatus::Untracked,
9490 );
9491 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9492 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9493 });
9494
9495 // Modify files in the working copy and perform git operations on other files.
9496 git_reset(0, &repo);
9497 git_remove_index(Path::new(B_TXT), &repo);
9498 git_stash(&mut repo);
9499 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9500 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9501 tree.flush_fs_events(cx).await;
9502 project
9503 .update(cx, |project, cx| project.git_scans_complete(cx))
9504 .await;
9505 cx.executor().run_until_parked();
9506
9507 // Check that more complex repo changes are tracked
9508 repository.read_with(cx, |repository, _cx| {
9509 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9510 assert_eq!(
9511 repository
9512 .status_for_path(&repo_path(B_TXT))
9513 .unwrap()
9514 .status,
9515 FileStatus::Untracked,
9516 );
9517 assert_eq!(
9518 repository
9519 .status_for_path(&repo_path(E_TXT))
9520 .unwrap()
9521 .status,
9522 StatusCode::Modified.worktree(),
9523 );
9524 });
9525
9526 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9527 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9528 std::fs::write(
9529 work_dir.join(DOTGITIGNORE),
9530 [IGNORE_RULE, "f.txt"].join("\n"),
9531 )
9532 .unwrap();
9533
9534 git_add(Path::new(DOTGITIGNORE), &repo);
9535 git_commit("Committing modified git ignore", &repo);
9536
9537 tree.flush_fs_events(cx).await;
9538 cx.executor().run_until_parked();
9539
9540 let mut renamed_dir_name = "first_directory/second_directory";
9541 const RENAMED_FILE: &str = "rf.txt";
9542
9543 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9544 std::fs::write(
9545 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9546 "new-contents",
9547 )
9548 .unwrap();
9549
9550 tree.flush_fs_events(cx).await;
9551 project
9552 .update(cx, |project, cx| project.git_scans_complete(cx))
9553 .await;
9554 cx.executor().run_until_parked();
9555
9556 repository.read_with(cx, |repository, _cx| {
9557 assert_eq!(
9558 repository
9559 .status_for_path(&RepoPath::from_rel_path(
9560 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9561 ))
9562 .unwrap()
9563 .status,
9564 FileStatus::Untracked,
9565 );
9566 });
9567
9568 renamed_dir_name = "new_first_directory/second_directory";
9569
9570 std::fs::rename(
9571 work_dir.join("first_directory"),
9572 work_dir.join("new_first_directory"),
9573 )
9574 .unwrap();
9575
9576 tree.flush_fs_events(cx).await;
9577 project
9578 .update(cx, |project, cx| project.git_scans_complete(cx))
9579 .await;
9580 cx.executor().run_until_parked();
9581
9582 repository.read_with(cx, |repository, _cx| {
9583 assert_eq!(
9584 repository
9585 .status_for_path(&RepoPath::from_rel_path(
9586 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9587 ))
9588 .unwrap()
9589 .status,
9590 FileStatus::Untracked,
9591 );
9592 });
9593}
9594
9595#[gpui::test]
9596#[ignore]
9597async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9598 init_test(cx);
9599 cx.executor().allow_parking();
9600
9601 const IGNORE_RULE: &str = "**/target";
9602
9603 let root = TempTree::new(json!({
9604 "project": {
9605 "src": {
9606 "main.rs": "fn main() {}"
9607 },
9608 "target": {
9609 "debug": {
9610 "important_text.txt": "important text",
9611 },
9612 },
9613 ".gitignore": IGNORE_RULE
9614 },
9615
9616 }));
9617 let root_path = root.path();
9618
9619 // Set up git repository before creating the worktree.
9620 let work_dir = root.path().join("project");
9621 let repo = git_init(work_dir.as_path());
9622 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9623 git_add("src/main.rs", &repo);
9624 git_add(".gitignore", &repo);
9625 git_commit("Initial commit", &repo);
9626
9627 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9628 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9629 let project_events = Arc::new(Mutex::new(Vec::new()));
9630 project.update(cx, |project, cx| {
9631 let repo_events = repository_updates.clone();
9632 cx.subscribe(project.git_store(), move |_, _, e, _| {
9633 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9634 repo_events.lock().push(e.clone());
9635 }
9636 })
9637 .detach();
9638 let project_events = project_events.clone();
9639 cx.subscribe_self(move |_, e, _| {
9640 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9641 project_events.lock().extend(
9642 updates
9643 .iter()
9644 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9645 .filter(|(path, _)| path != "fs-event-sentinel"),
9646 );
9647 }
9648 })
9649 .detach();
9650 });
9651
9652 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9653 tree.flush_fs_events(cx).await;
9654 tree.update(cx, |tree, cx| {
9655 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9656 })
9657 .await
9658 .unwrap();
9659 tree.update(cx, |tree, _| {
9660 assert_eq!(
9661 tree.entries(true, 0)
9662 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9663 .collect::<Vec<_>>(),
9664 vec![
9665 (rel_path(""), false),
9666 (rel_path("project/"), false),
9667 (rel_path("project/.gitignore"), false),
9668 (rel_path("project/src"), false),
9669 (rel_path("project/src/main.rs"), false),
9670 (rel_path("project/target"), true),
9671 (rel_path("project/target/debug"), true),
9672 (rel_path("project/target/debug/important_text.txt"), true),
9673 ]
9674 );
9675 });
9676
9677 assert_eq!(
9678 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9679 vec![
9680 RepositoryEvent::StatusesChanged,
9681 RepositoryEvent::MergeHeadsChanged,
9682 ],
9683 "Initial worktree scan should produce a repo update event"
9684 );
9685 assert_eq!(
9686 project_events.lock().drain(..).collect::<Vec<_>>(),
9687 vec![
9688 ("project/target".to_string(), PathChange::Loaded),
9689 ("project/target/debug".to_string(), PathChange::Loaded),
9690 (
9691 "project/target/debug/important_text.txt".to_string(),
9692 PathChange::Loaded
9693 ),
9694 ],
9695 "Initial project changes should show that all not-ignored and all opened files are loaded"
9696 );
9697
9698 let deps_dir = work_dir.join("target").join("debug").join("deps");
9699 std::fs::create_dir_all(&deps_dir).unwrap();
9700 tree.flush_fs_events(cx).await;
9701 project
9702 .update(cx, |project, cx| project.git_scans_complete(cx))
9703 .await;
9704 cx.executor().run_until_parked();
9705 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9706 tree.flush_fs_events(cx).await;
9707 project
9708 .update(cx, |project, cx| project.git_scans_complete(cx))
9709 .await;
9710 cx.executor().run_until_parked();
9711 std::fs::remove_dir_all(&deps_dir).unwrap();
9712 tree.flush_fs_events(cx).await;
9713 project
9714 .update(cx, |project, cx| project.git_scans_complete(cx))
9715 .await;
9716 cx.executor().run_until_parked();
9717
9718 tree.update(cx, |tree, _| {
9719 assert_eq!(
9720 tree.entries(true, 0)
9721 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9722 .collect::<Vec<_>>(),
9723 vec![
9724 (rel_path(""), false),
9725 (rel_path("project/"), false),
9726 (rel_path("project/.gitignore"), false),
9727 (rel_path("project/src"), false),
9728 (rel_path("project/src/main.rs"), false),
9729 (rel_path("project/target"), true),
9730 (rel_path("project/target/debug"), true),
9731 (rel_path("project/target/debug/important_text.txt"), true),
9732 ],
9733 "No stray temp files should be left after the flycheck changes"
9734 );
9735 });
9736
9737 assert_eq!(
9738 repository_updates
9739 .lock()
9740 .iter()
9741 .cloned()
9742 .collect::<Vec<_>>(),
9743 Vec::new(),
9744 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9745 );
9746 assert_eq!(
9747 project_events.lock().as_slice(),
9748 vec![
9749 ("project/target/debug/deps".to_string(), PathChange::Added),
9750 ("project/target/debug/deps".to_string(), PathChange::Removed),
9751 ],
9752 "Due to `debug` directory being tracked, it should get updates for entries inside it.
9753 No updates for more nested directories should happen as those are ignored",
9754 );
9755}
9756
9757// todo(jk): turning this test off until we rework it in such a way so that it is not so susceptible
9758// to different timings/ordering of events.
9759#[ignore]
9760#[gpui::test]
9761async fn test_odd_events_for_ignored_dirs(
9762 executor: BackgroundExecutor,
9763 cx: &mut gpui::TestAppContext,
9764) {
9765 init_test(cx);
9766 let fs = FakeFs::new(executor);
9767 fs.insert_tree(
9768 path!("/root"),
9769 json!({
9770 ".git": {},
9771 ".gitignore": "**/target/",
9772 "src": {
9773 "main.rs": "fn main() {}",
9774 },
9775 "target": {
9776 "debug": {
9777 "foo.txt": "foo",
9778 "deps": {}
9779 }
9780 }
9781 }),
9782 )
9783 .await;
9784 fs.set_head_and_index_for_repo(
9785 path!("/root/.git").as_ref(),
9786 &[
9787 (".gitignore", "**/target/".into()),
9788 ("src/main.rs", "fn main() {}".into()),
9789 ],
9790 );
9791
9792 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9793 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9794 let project_events = Arc::new(Mutex::new(Vec::new()));
9795 project.update(cx, |project, cx| {
9796 let repository_updates = repository_updates.clone();
9797 cx.subscribe(project.git_store(), move |_, _, e, _| {
9798 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9799 repository_updates.lock().push(e.clone());
9800 }
9801 })
9802 .detach();
9803 let project_events = project_events.clone();
9804 cx.subscribe_self(move |_, e, _| {
9805 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9806 project_events.lock().extend(
9807 updates
9808 .iter()
9809 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9810 .filter(|(path, _)| path != "fs-event-sentinel"),
9811 );
9812 }
9813 })
9814 .detach();
9815 });
9816
9817 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9818 tree.update(cx, |tree, cx| {
9819 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9820 })
9821 .await
9822 .unwrap();
9823 tree.flush_fs_events(cx).await;
9824 project
9825 .update(cx, |project, cx| project.git_scans_complete(cx))
9826 .await;
9827 cx.run_until_parked();
9828 tree.update(cx, |tree, _| {
9829 assert_eq!(
9830 tree.entries(true, 0)
9831 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9832 .collect::<Vec<_>>(),
9833 vec![
9834 (rel_path(""), false),
9835 (rel_path(".gitignore"), false),
9836 (rel_path("src"), false),
9837 (rel_path("src/main.rs"), false),
9838 (rel_path("target"), true),
9839 (rel_path("target/debug"), true),
9840 (rel_path("target/debug/deps"), true),
9841 (rel_path("target/debug/foo.txt"), true),
9842 ]
9843 );
9844 });
9845
9846 assert_eq!(
9847 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9848 vec![
9849 RepositoryEvent::MergeHeadsChanged,
9850 RepositoryEvent::BranchChanged,
9851 RepositoryEvent::StatusesChanged,
9852 RepositoryEvent::StatusesChanged,
9853 ],
9854 "Initial worktree scan should produce a repo update event"
9855 );
9856 assert_eq!(
9857 project_events.lock().drain(..).collect::<Vec<_>>(),
9858 vec![
9859 ("target".to_string(), PathChange::Loaded),
9860 ("target/debug".to_string(), PathChange::Loaded),
9861 ("target/debug/deps".to_string(), PathChange::Loaded),
9862 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9863 ],
9864 "All non-ignored entries and all opened firs should be getting a project event",
9865 );
9866
9867 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9868 // This may happen multiple times during a single flycheck, but once is enough for testing.
9869 fs.emit_fs_event("/root/target/debug/deps", None);
9870 tree.flush_fs_events(cx).await;
9871 project
9872 .update(cx, |project, cx| project.git_scans_complete(cx))
9873 .await;
9874 cx.executor().run_until_parked();
9875
9876 assert_eq!(
9877 repository_updates
9878 .lock()
9879 .iter()
9880 .cloned()
9881 .collect::<Vec<_>>(),
9882 Vec::new(),
9883 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9884 );
9885 assert_eq!(
9886 project_events.lock().as_slice(),
9887 Vec::new(),
9888 "No further project events should happen, as only ignored dirs received FS events",
9889 );
9890}
9891
9892#[gpui::test]
9893async fn test_repos_in_invisible_worktrees(
9894 executor: BackgroundExecutor,
9895 cx: &mut gpui::TestAppContext,
9896) {
9897 init_test(cx);
9898 let fs = FakeFs::new(executor);
9899 fs.insert_tree(
9900 path!("/root"),
9901 json!({
9902 "dir1": {
9903 ".git": {},
9904 "dep1": {
9905 ".git": {},
9906 "src": {
9907 "a.txt": "",
9908 },
9909 },
9910 "b.txt": "",
9911 },
9912 }),
9913 )
9914 .await;
9915
9916 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9917 let _visible_worktree =
9918 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9919 project
9920 .update(cx, |project, cx| project.git_scans_complete(cx))
9921 .await;
9922
9923 let repos = project.read_with(cx, |project, cx| {
9924 project
9925 .repositories(cx)
9926 .values()
9927 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9928 .collect::<Vec<_>>()
9929 });
9930 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9931
9932 let (_invisible_worktree, _) = project
9933 .update(cx, |project, cx| {
9934 project.worktree_store.update(cx, |worktree_store, cx| {
9935 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9936 })
9937 })
9938 .await
9939 .expect("failed to create worktree");
9940 project
9941 .update(cx, |project, cx| project.git_scans_complete(cx))
9942 .await;
9943
9944 let repos = project.read_with(cx, |project, cx| {
9945 project
9946 .repositories(cx)
9947 .values()
9948 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9949 .collect::<Vec<_>>()
9950 });
9951 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9952}
9953
9954#[gpui::test(iterations = 10)]
9955async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9956 init_test(cx);
9957 cx.update(|cx| {
9958 cx.update_global::<SettingsStore, _>(|store, cx| {
9959 store.update_user_settings(cx, |settings| {
9960 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9961 });
9962 });
9963 });
9964 let fs = FakeFs::new(cx.background_executor.clone());
9965 fs.insert_tree(
9966 path!("/root"),
9967 json!({
9968 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9969 "tree": {
9970 ".git": {},
9971 ".gitignore": "ignored-dir\n",
9972 "tracked-dir": {
9973 "tracked-file1": "",
9974 "ancestor-ignored-file1": "",
9975 },
9976 "ignored-dir": {
9977 "ignored-file1": ""
9978 }
9979 }
9980 }),
9981 )
9982 .await;
9983 fs.set_head_and_index_for_repo(
9984 path!("/root/tree/.git").as_ref(),
9985 &[
9986 (".gitignore", "ignored-dir\n".into()),
9987 ("tracked-dir/tracked-file1", "".into()),
9988 ],
9989 );
9990
9991 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9992
9993 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9994 tree.flush_fs_events(cx).await;
9995 project
9996 .update(cx, |project, cx| project.git_scans_complete(cx))
9997 .await;
9998 cx.executor().run_until_parked();
9999
10000 let repository = project.read_with(cx, |project, cx| {
10001 project.repositories(cx).values().next().unwrap().clone()
10002 });
10003
10004 tree.read_with(cx, |tree, _| {
10005 tree.as_local()
10006 .unwrap()
10007 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
10008 })
10009 .recv()
10010 .await;
10011
10012 cx.read(|cx| {
10013 assert_entry_git_state(
10014 tree.read(cx),
10015 repository.read(cx),
10016 "tracked-dir/tracked-file1",
10017 None,
10018 false,
10019 );
10020 assert_entry_git_state(
10021 tree.read(cx),
10022 repository.read(cx),
10023 "tracked-dir/ancestor-ignored-file1",
10024 None,
10025 false,
10026 );
10027 assert_entry_git_state(
10028 tree.read(cx),
10029 repository.read(cx),
10030 "ignored-dir/ignored-file1",
10031 None,
10032 true,
10033 );
10034 });
10035
10036 fs.create_file(
10037 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
10038 Default::default(),
10039 )
10040 .await
10041 .unwrap();
10042 fs.set_index_for_repo(
10043 path!("/root/tree/.git").as_ref(),
10044 &[
10045 (".gitignore", "ignored-dir\n".into()),
10046 ("tracked-dir/tracked-file1", "".into()),
10047 ("tracked-dir/tracked-file2", "".into()),
10048 ],
10049 );
10050 fs.create_file(
10051 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
10052 Default::default(),
10053 )
10054 .await
10055 .unwrap();
10056 fs.create_file(
10057 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
10058 Default::default(),
10059 )
10060 .await
10061 .unwrap();
10062
10063 cx.executor().run_until_parked();
10064 cx.read(|cx| {
10065 assert_entry_git_state(
10066 tree.read(cx),
10067 repository.read(cx),
10068 "tracked-dir/tracked-file2",
10069 Some(StatusCode::Added),
10070 false,
10071 );
10072 assert_entry_git_state(
10073 tree.read(cx),
10074 repository.read(cx),
10075 "tracked-dir/ancestor-ignored-file2",
10076 None,
10077 false,
10078 );
10079 assert_entry_git_state(
10080 tree.read(cx),
10081 repository.read(cx),
10082 "ignored-dir/ignored-file2",
10083 None,
10084 true,
10085 );
10086 assert!(
10087 tree.read(cx)
10088 .entry_for_path(&rel_path(".git"))
10089 .unwrap()
10090 .is_ignored
10091 );
10092 });
10093}
10094
10095#[gpui::test]
10096async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
10097 init_test(cx);
10098
10099 let fs = FakeFs::new(cx.executor());
10100 fs.insert_tree(
10101 path!("/project"),
10102 json!({
10103 ".git": {
10104 "worktrees": {
10105 "some-worktree": {
10106 "commondir": "../..\n",
10107 // For is_git_dir
10108 "HEAD": "",
10109 "config": ""
10110 }
10111 },
10112 "modules": {
10113 "subdir": {
10114 "some-submodule": {
10115 // For is_git_dir
10116 "HEAD": "",
10117 "config": "",
10118 }
10119 }
10120 }
10121 },
10122 "src": {
10123 "a.txt": "A",
10124 },
10125 "some-worktree": {
10126 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
10127 "src": {
10128 "b.txt": "B",
10129 }
10130 },
10131 "subdir": {
10132 "some-submodule": {
10133 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
10134 "c.txt": "C",
10135 }
10136 }
10137 }),
10138 )
10139 .await;
10140
10141 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
10142 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10143 scan_complete.await;
10144
10145 let mut repositories = project.update(cx, |project, cx| {
10146 project
10147 .repositories(cx)
10148 .values()
10149 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10150 .collect::<Vec<_>>()
10151 });
10152 repositories.sort();
10153 pretty_assertions::assert_eq!(
10154 repositories,
10155 [
10156 Path::new(path!("/project")).into(),
10157 Path::new(path!("/project/some-worktree")).into(),
10158 Path::new(path!("/project/subdir/some-submodule")).into(),
10159 ]
10160 );
10161
10162 // Generate a git-related event for the worktree and check that it's refreshed.
10163 fs.with_git_state(
10164 path!("/project/some-worktree/.git").as_ref(),
10165 true,
10166 |state| {
10167 state
10168 .head_contents
10169 .insert(repo_path("src/b.txt"), "b".to_owned());
10170 state
10171 .index_contents
10172 .insert(repo_path("src/b.txt"), "b".to_owned());
10173 },
10174 )
10175 .unwrap();
10176 cx.run_until_parked();
10177
10178 let buffer = project
10179 .update(cx, |project, cx| {
10180 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10181 })
10182 .await
10183 .unwrap();
10184 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10185 let (repo, _) = project
10186 .git_store()
10187 .read(cx)
10188 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10189 .unwrap();
10190 pretty_assertions::assert_eq!(
10191 repo.read(cx).work_directory_abs_path,
10192 Path::new(path!("/project/some-worktree")).into(),
10193 );
10194 let barrier = repo.update(cx, |repo, _| repo.barrier());
10195 (repo.clone(), barrier)
10196 });
10197 barrier.await.unwrap();
10198 worktree_repo.update(cx, |repo, _| {
10199 pretty_assertions::assert_eq!(
10200 repo.status_for_path(&repo_path("src/b.txt"))
10201 .unwrap()
10202 .status,
10203 StatusCode::Modified.worktree(),
10204 );
10205 });
10206
10207 // The same for the submodule.
10208 fs.with_git_state(
10209 path!("/project/subdir/some-submodule/.git").as_ref(),
10210 true,
10211 |state| {
10212 state
10213 .head_contents
10214 .insert(repo_path("c.txt"), "c".to_owned());
10215 state
10216 .index_contents
10217 .insert(repo_path("c.txt"), "c".to_owned());
10218 },
10219 )
10220 .unwrap();
10221 cx.run_until_parked();
10222
10223 let buffer = project
10224 .update(cx, |project, cx| {
10225 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10226 })
10227 .await
10228 .unwrap();
10229 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10230 let (repo, _) = project
10231 .git_store()
10232 .read(cx)
10233 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10234 .unwrap();
10235 pretty_assertions::assert_eq!(
10236 repo.read(cx).work_directory_abs_path,
10237 Path::new(path!("/project/subdir/some-submodule")).into(),
10238 );
10239 let barrier = repo.update(cx, |repo, _| repo.barrier());
10240 (repo.clone(), barrier)
10241 });
10242 barrier.await.unwrap();
10243 submodule_repo.update(cx, |repo, _| {
10244 pretty_assertions::assert_eq!(
10245 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10246 StatusCode::Modified.worktree(),
10247 );
10248 });
10249}
10250
10251#[gpui::test]
10252async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10253 init_test(cx);
10254 let fs = FakeFs::new(cx.background_executor.clone());
10255 fs.insert_tree(
10256 path!("/root"),
10257 json!({
10258 "project": {
10259 ".git": {},
10260 "child1": {
10261 "a.txt": "A",
10262 },
10263 "child2": {
10264 "b.txt": "B",
10265 }
10266 }
10267 }),
10268 )
10269 .await;
10270
10271 let project = Project::test(
10272 fs.clone(),
10273 [
10274 path!("/root/project/child1").as_ref(),
10275 path!("/root/project/child2").as_ref(),
10276 ],
10277 cx,
10278 )
10279 .await;
10280
10281 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10282 tree.flush_fs_events(cx).await;
10283 project
10284 .update(cx, |project, cx| project.git_scans_complete(cx))
10285 .await;
10286 cx.executor().run_until_parked();
10287
10288 let repos = project.read_with(cx, |project, cx| {
10289 project
10290 .repositories(cx)
10291 .values()
10292 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10293 .collect::<Vec<_>>()
10294 });
10295 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10296}
10297
10298#[gpui::test]
10299async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10300 init_test(cx);
10301
10302 let file_1_committed = String::from(r#"file_1_committed"#);
10303 let file_1_staged = String::from(r#"file_1_staged"#);
10304 let file_2_committed = String::from(r#"file_2_committed"#);
10305 let file_2_staged = String::from(r#"file_2_staged"#);
10306 let buffer_contents = String::from(r#"buffer"#);
10307
10308 let fs = FakeFs::new(cx.background_executor.clone());
10309 fs.insert_tree(
10310 path!("/dir"),
10311 json!({
10312 ".git": {},
10313 "src": {
10314 "file_1.rs": file_1_committed.clone(),
10315 "file_2.rs": file_2_committed.clone(),
10316 }
10317 }),
10318 )
10319 .await;
10320
10321 fs.set_head_for_repo(
10322 path!("/dir/.git").as_ref(),
10323 &[
10324 ("src/file_1.rs", file_1_committed.clone()),
10325 ("src/file_2.rs", file_2_committed.clone()),
10326 ],
10327 "deadbeef",
10328 );
10329 fs.set_index_for_repo(
10330 path!("/dir/.git").as_ref(),
10331 &[
10332 ("src/file_1.rs", file_1_staged.clone()),
10333 ("src/file_2.rs", file_2_staged.clone()),
10334 ],
10335 );
10336
10337 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10338
10339 let buffer = project
10340 .update(cx, |project, cx| {
10341 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10342 })
10343 .await
10344 .unwrap();
10345
10346 buffer.update(cx, |buffer, cx| {
10347 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10348 });
10349
10350 let unstaged_diff = project
10351 .update(cx, |project, cx| {
10352 project.open_unstaged_diff(buffer.clone(), cx)
10353 })
10354 .await
10355 .unwrap();
10356
10357 cx.run_until_parked();
10358
10359 unstaged_diff.update(cx, |unstaged_diff, cx| {
10360 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10361 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10362 });
10363
10364 // Save the buffer as `file_2.rs`, which should trigger the
10365 // `BufferChangedFilePath` event.
10366 project
10367 .update(cx, |project, cx| {
10368 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10369 let path = ProjectPath {
10370 worktree_id,
10371 path: rel_path("src/file_2.rs").into(),
10372 };
10373 project.save_buffer_as(buffer.clone(), path, cx)
10374 })
10375 .await
10376 .unwrap();
10377
10378 cx.run_until_parked();
10379
10380 // Verify that the diff bases have been updated to file_2's contents due to
10381 // the `BufferChangedFilePath` event being handled.
10382 unstaged_diff.update(cx, |unstaged_diff, cx| {
10383 let snapshot = buffer.read(cx).snapshot();
10384 let base_text = unstaged_diff.base_text_string(cx).unwrap();
10385 assert_eq!(
10386 base_text, file_2_staged,
10387 "Diff bases should be automatically updated to file_2 staged content"
10388 );
10389
10390 let hunks: Vec<_> = unstaged_diff.snapshot(cx).hunks(&snapshot).collect();
10391 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10392 });
10393
10394 let uncommitted_diff = project
10395 .update(cx, |project, cx| {
10396 project.open_uncommitted_diff(buffer.clone(), cx)
10397 })
10398 .await
10399 .unwrap();
10400
10401 cx.run_until_parked();
10402
10403 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
10404 let base_text = uncommitted_diff.base_text_string(cx).unwrap();
10405 assert_eq!(
10406 base_text, file_2_committed,
10407 "Uncommitted diff should compare against file_2 committed content"
10408 );
10409 });
10410}
10411
10412async fn search(
10413 project: &Entity<Project>,
10414 query: SearchQuery,
10415 cx: &mut gpui::TestAppContext,
10416) -> Result<HashMap<String, Vec<Range<usize>>>> {
10417 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10418 let mut results = HashMap::default();
10419 while let Ok(search_result) = search_rx.recv().await {
10420 match search_result {
10421 SearchResult::Buffer { buffer, ranges } => {
10422 results.entry(buffer).or_insert(ranges);
10423 }
10424 SearchResult::LimitReached => {}
10425 }
10426 }
10427 Ok(results
10428 .into_iter()
10429 .map(|(buffer, ranges)| {
10430 buffer.update(cx, |buffer, cx| {
10431 let path = buffer
10432 .file()
10433 .unwrap()
10434 .full_path(cx)
10435 .to_string_lossy()
10436 .to_string();
10437 let ranges = ranges
10438 .into_iter()
10439 .map(|range| range.to_offset(buffer))
10440 .collect::<Vec<_>>();
10441 (path, ranges)
10442 })
10443 })
10444 .collect())
10445}
10446
10447pub fn init_test(cx: &mut gpui::TestAppContext) {
10448 zlog::init_test();
10449
10450 cx.update(|cx| {
10451 let settings_store = SettingsStore::test(cx);
10452 cx.set_global(settings_store);
10453 release_channel::init(semver::Version::new(0, 0, 0), cx);
10454 });
10455}
10456
10457fn json_lang() -> Arc<Language> {
10458 Arc::new(Language::new(
10459 LanguageConfig {
10460 name: "JSON".into(),
10461 matcher: LanguageMatcher {
10462 path_suffixes: vec!["json".to_string()],
10463 ..Default::default()
10464 },
10465 ..Default::default()
10466 },
10467 None,
10468 ))
10469}
10470
10471fn js_lang() -> Arc<Language> {
10472 Arc::new(Language::new(
10473 LanguageConfig {
10474 name: "JavaScript".into(),
10475 matcher: LanguageMatcher {
10476 path_suffixes: vec!["js".to_string()],
10477 ..Default::default()
10478 },
10479 ..Default::default()
10480 },
10481 None,
10482 ))
10483}
10484
10485fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10486 struct PythonMootToolchainLister(Arc<FakeFs>);
10487 #[async_trait]
10488 impl ToolchainLister for PythonMootToolchainLister {
10489 async fn list(
10490 &self,
10491 worktree_root: PathBuf,
10492 subroot_relative_path: Arc<RelPath>,
10493 _: Option<HashMap<String, String>>,
10494 _: &dyn Fs,
10495 ) -> ToolchainList {
10496 // This lister will always return a path .venv directories within ancestors
10497 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10498 let mut toolchains = vec![];
10499 for ancestor in ancestors {
10500 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10501 if self.0.is_dir(&venv_path).await {
10502 toolchains.push(Toolchain {
10503 name: SharedString::new("Python Venv"),
10504 path: venv_path.to_string_lossy().into_owned().into(),
10505 language_name: LanguageName(SharedString::new_static("Python")),
10506 as_json: serde_json::Value::Null,
10507 })
10508 }
10509 }
10510 ToolchainList {
10511 toolchains,
10512 ..Default::default()
10513 }
10514 }
10515 async fn resolve(
10516 &self,
10517 _: PathBuf,
10518 _: Option<HashMap<String, String>>,
10519 _: &dyn Fs,
10520 ) -> anyhow::Result<Toolchain> {
10521 Err(anyhow::anyhow!("Not implemented"))
10522 }
10523 fn meta(&self) -> ToolchainMetadata {
10524 ToolchainMetadata {
10525 term: SharedString::new_static("Virtual Environment"),
10526 new_toolchain_placeholder: SharedString::new_static(
10527 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10528 ),
10529 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10530 }
10531 }
10532 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10533 vec![]
10534 }
10535 }
10536 Arc::new(
10537 Language::new(
10538 LanguageConfig {
10539 name: "Python".into(),
10540 matcher: LanguageMatcher {
10541 path_suffixes: vec!["py".to_string()],
10542 ..Default::default()
10543 },
10544 ..Default::default()
10545 },
10546 None, // We're not testing Python parsing with this language.
10547 )
10548 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10549 "pyproject.toml",
10550 ))))
10551 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10552 )
10553}
10554
10555fn typescript_lang() -> Arc<Language> {
10556 Arc::new(Language::new(
10557 LanguageConfig {
10558 name: "TypeScript".into(),
10559 matcher: LanguageMatcher {
10560 path_suffixes: vec!["ts".to_string()],
10561 ..Default::default()
10562 },
10563 ..Default::default()
10564 },
10565 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10566 ))
10567}
10568
10569fn tsx_lang() -> Arc<Language> {
10570 Arc::new(Language::new(
10571 LanguageConfig {
10572 name: "tsx".into(),
10573 matcher: LanguageMatcher {
10574 path_suffixes: vec!["tsx".to_string()],
10575 ..Default::default()
10576 },
10577 ..Default::default()
10578 },
10579 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10580 ))
10581}
10582
10583fn get_all_tasks(
10584 project: &Entity<Project>,
10585 task_contexts: Arc<TaskContexts>,
10586 cx: &mut App,
10587) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10588 let new_tasks = project.update(cx, |project, cx| {
10589 project.task_store.update(cx, |task_store, cx| {
10590 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10591 this.used_and_current_resolved_tasks(task_contexts, cx)
10592 })
10593 })
10594 });
10595
10596 cx.background_spawn(async move {
10597 let (mut old, new) = new_tasks.await;
10598 old.extend(new);
10599 old
10600 })
10601}
10602
10603#[track_caller]
10604fn assert_entry_git_state(
10605 tree: &Worktree,
10606 repository: &Repository,
10607 path: &str,
10608 index_status: Option<StatusCode>,
10609 is_ignored: bool,
10610) {
10611 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10612 let entry = tree
10613 .entry_for_path(&rel_path(path))
10614 .unwrap_or_else(|| panic!("entry {path} not found"));
10615 let status = repository
10616 .status_for_path(&repo_path(path))
10617 .map(|entry| entry.status);
10618 let expected = index_status.map(|index_status| {
10619 TrackedStatus {
10620 index_status,
10621 worktree_status: StatusCode::Unmodified,
10622 }
10623 .into()
10624 });
10625 assert_eq!(
10626 status, expected,
10627 "expected {path} to have git status: {expected:?}"
10628 );
10629 assert_eq!(
10630 entry.is_ignored, is_ignored,
10631 "expected {path} to have is_ignored: {is_ignored}"
10632 );
10633}
10634
10635#[track_caller]
10636fn git_init(path: &Path) -> git2::Repository {
10637 let mut init_opts = RepositoryInitOptions::new();
10638 init_opts.initial_head("main");
10639 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10640}
10641
10642#[track_caller]
10643fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10644 let path = path.as_ref();
10645 let mut index = repo.index().expect("Failed to get index");
10646 index.add_path(path).expect("Failed to add file");
10647 index.write().expect("Failed to write index");
10648}
10649
10650#[track_caller]
10651fn git_remove_index(path: &Path, repo: &git2::Repository) {
10652 let mut index = repo.index().expect("Failed to get index");
10653 index.remove_path(path).expect("Failed to add file");
10654 index.write().expect("Failed to write index");
10655}
10656
10657#[track_caller]
10658fn git_commit(msg: &'static str, repo: &git2::Repository) {
10659 use git2::Signature;
10660
10661 let signature = Signature::now("test", "test@zed.dev").unwrap();
10662 let oid = repo.index().unwrap().write_tree().unwrap();
10663 let tree = repo.find_tree(oid).unwrap();
10664 if let Ok(head) = repo.head() {
10665 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10666
10667 let parent_commit = parent_obj.as_commit().unwrap();
10668
10669 repo.commit(
10670 Some("HEAD"),
10671 &signature,
10672 &signature,
10673 msg,
10674 &tree,
10675 &[parent_commit],
10676 )
10677 .expect("Failed to commit with parent");
10678 } else {
10679 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10680 .expect("Failed to commit");
10681 }
10682}
10683
10684#[cfg(any())]
10685#[track_caller]
10686fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10687 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10688}
10689
10690#[track_caller]
10691fn git_stash(repo: &mut git2::Repository) {
10692 use git2::Signature;
10693
10694 let signature = Signature::now("test", "test@zed.dev").unwrap();
10695 repo.stash_save(&signature, "N/A", None)
10696 .expect("Failed to stash");
10697}
10698
10699#[track_caller]
10700fn git_reset(offset: usize, repo: &git2::Repository) {
10701 let head = repo.head().expect("Couldn't get repo head");
10702 let object = head.peel(git2::ObjectType::Commit).unwrap();
10703 let commit = object.as_commit().unwrap();
10704 let new_head = commit
10705 .parents()
10706 .inspect(|parnet| {
10707 parnet.message();
10708 })
10709 .nth(offset)
10710 .expect("Not enough history");
10711 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10712 .expect("Could not reset");
10713}
10714
10715#[cfg(any())]
10716#[track_caller]
10717fn git_branch(name: &str, repo: &git2::Repository) {
10718 let head = repo
10719 .head()
10720 .expect("Couldn't get repo head")
10721 .peel_to_commit()
10722 .expect("HEAD is not a commit");
10723 repo.branch(name, &head, false).expect("Failed to commit");
10724}
10725
10726#[cfg(any())]
10727#[track_caller]
10728fn git_checkout(name: &str, repo: &git2::Repository) {
10729 repo.set_head(name).expect("Failed to set head");
10730 repo.checkout_head(None).expect("Failed to check out head");
10731}
10732
10733#[cfg(any())]
10734#[track_caller]
10735fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10736 repo.statuses(None)
10737 .unwrap()
10738 .iter()
10739 .map(|status| (status.path().unwrap().to_string(), status.status()))
10740 .collect()
10741}
10742
10743#[gpui::test]
10744async fn test_find_project_path_abs(
10745 background_executor: BackgroundExecutor,
10746 cx: &mut gpui::TestAppContext,
10747) {
10748 // find_project_path should work with absolute paths
10749 init_test(cx);
10750
10751 let fs = FakeFs::new(background_executor);
10752 fs.insert_tree(
10753 path!("/root"),
10754 json!({
10755 "project1": {
10756 "file1.txt": "content1",
10757 "subdir": {
10758 "file2.txt": "content2"
10759 }
10760 },
10761 "project2": {
10762 "file3.txt": "content3"
10763 }
10764 }),
10765 )
10766 .await;
10767
10768 let project = Project::test(
10769 fs.clone(),
10770 [
10771 path!("/root/project1").as_ref(),
10772 path!("/root/project2").as_ref(),
10773 ],
10774 cx,
10775 )
10776 .await;
10777
10778 // Make sure the worktrees are fully initialized
10779 project
10780 .update(cx, |project, cx| project.git_scans_complete(cx))
10781 .await;
10782 cx.run_until_parked();
10783
10784 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10785 project.read_with(cx, |project, cx| {
10786 let worktrees: Vec<_> = project.worktrees(cx).collect();
10787 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10788 let id1 = worktrees[0].read(cx).id();
10789 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10790 let id2 = worktrees[1].read(cx).id();
10791 (abs_path1, id1, abs_path2, id2)
10792 });
10793
10794 project.update(cx, |project, cx| {
10795 let abs_path = project1_abs_path.join("file1.txt");
10796 let found_path = project.find_project_path(abs_path, cx).unwrap();
10797 assert_eq!(found_path.worktree_id, project1_id);
10798 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10799
10800 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10801 let found_path = project.find_project_path(abs_path, cx).unwrap();
10802 assert_eq!(found_path.worktree_id, project1_id);
10803 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10804
10805 let abs_path = project2_abs_path.join("file3.txt");
10806 let found_path = project.find_project_path(abs_path, cx).unwrap();
10807 assert_eq!(found_path.worktree_id, project2_id);
10808 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10809
10810 let abs_path = project1_abs_path.join("nonexistent.txt");
10811 let found_path = project.find_project_path(abs_path, cx);
10812 assert!(
10813 found_path.is_some(),
10814 "Should find project path for nonexistent file in worktree"
10815 );
10816
10817 // Test with an absolute path outside any worktree
10818 let abs_path = Path::new("/some/other/path");
10819 let found_path = project.find_project_path(abs_path, cx);
10820 assert!(
10821 found_path.is_none(),
10822 "Should not find project path for path outside any worktree"
10823 );
10824 });
10825}
10826
10827#[gpui::test]
10828async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10829 init_test(cx);
10830
10831 let fs = FakeFs::new(cx.executor());
10832 fs.insert_tree(
10833 path!("/root"),
10834 json!({
10835 "a": {
10836 ".git": {},
10837 "src": {
10838 "main.rs": "fn main() {}",
10839 }
10840 },
10841 "b": {
10842 ".git": {},
10843 "src": {
10844 "main.rs": "fn main() {}",
10845 },
10846 "script": {
10847 "run.sh": "#!/bin/bash"
10848 }
10849 }
10850 }),
10851 )
10852 .await;
10853
10854 let project = Project::test(
10855 fs.clone(),
10856 [
10857 path!("/root/a").as_ref(),
10858 path!("/root/b/script").as_ref(),
10859 path!("/root/b").as_ref(),
10860 ],
10861 cx,
10862 )
10863 .await;
10864 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10865 scan_complete.await;
10866
10867 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10868 assert_eq!(worktrees.len(), 3);
10869
10870 let worktree_id_by_abs_path = worktrees
10871 .into_iter()
10872 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10873 .collect::<HashMap<_, _>>();
10874 let worktree_id = worktree_id_by_abs_path
10875 .get(Path::new(path!("/root/b/script")))
10876 .unwrap();
10877
10878 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10879 assert_eq!(repos.len(), 2);
10880
10881 project.update(cx, |project, cx| {
10882 project.remove_worktree(*worktree_id, cx);
10883 });
10884 cx.run_until_parked();
10885
10886 let mut repo_paths = project
10887 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10888 .values()
10889 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10890 .collect::<Vec<_>>();
10891 repo_paths.sort();
10892
10893 pretty_assertions::assert_eq!(
10894 repo_paths,
10895 [
10896 Path::new(path!("/root/a")).into(),
10897 Path::new(path!("/root/b")).into(),
10898 ]
10899 );
10900
10901 let active_repo_path = project
10902 .read_with(cx, |p, cx| {
10903 p.active_repository(cx)
10904 .map(|r| r.read(cx).work_directory_abs_path.clone())
10905 })
10906 .unwrap();
10907 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10908
10909 let worktree_id = worktree_id_by_abs_path
10910 .get(Path::new(path!("/root/a")))
10911 .unwrap();
10912 project.update(cx, |project, cx| {
10913 project.remove_worktree(*worktree_id, cx);
10914 });
10915 cx.run_until_parked();
10916
10917 let active_repo_path = project
10918 .read_with(cx, |p, cx| {
10919 p.active_repository(cx)
10920 .map(|r| r.read(cx).work_directory_abs_path.clone())
10921 })
10922 .unwrap();
10923 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10924
10925 let worktree_id = worktree_id_by_abs_path
10926 .get(Path::new(path!("/root/b")))
10927 .unwrap();
10928 project.update(cx, |project, cx| {
10929 project.remove_worktree(*worktree_id, cx);
10930 });
10931 cx.run_until_parked();
10932
10933 let active_repo_path = project.read_with(cx, |p, cx| {
10934 p.active_repository(cx)
10935 .map(|r| r.read(cx).work_directory_abs_path.clone())
10936 });
10937 assert!(active_repo_path.is_none());
10938}
10939
10940#[gpui::test]
10941async fn test_optimistic_hunks_in_staged_files(cx: &mut gpui::TestAppContext) {
10942 use DiffHunkSecondaryStatus::*;
10943 init_test(cx);
10944
10945 let committed_contents = r#"
10946 one
10947 two
10948 three
10949 "#
10950 .unindent();
10951 let file_contents = r#"
10952 one
10953 TWO
10954 three
10955 "#
10956 .unindent();
10957
10958 let fs = FakeFs::new(cx.background_executor.clone());
10959 fs.insert_tree(
10960 path!("/dir"),
10961 json!({
10962 ".git": {},
10963 "file.txt": file_contents.clone()
10964 }),
10965 )
10966 .await;
10967
10968 fs.set_head_and_index_for_repo(
10969 path!("/dir/.git").as_ref(),
10970 &[("file.txt", committed_contents.clone())],
10971 );
10972
10973 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10974
10975 let buffer = project
10976 .update(cx, |project, cx| {
10977 project.open_local_buffer(path!("/dir/file.txt"), cx)
10978 })
10979 .await
10980 .unwrap();
10981 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
10982 let uncommitted_diff = project
10983 .update(cx, |project, cx| {
10984 project.open_uncommitted_diff(buffer.clone(), cx)
10985 })
10986 .await
10987 .unwrap();
10988
10989 // The hunk is initially unstaged.
10990 uncommitted_diff.read_with(cx, |diff, cx| {
10991 assert_hunks(
10992 diff.snapshot(cx).hunks(&snapshot),
10993 &snapshot,
10994 &diff.base_text_string(cx).unwrap(),
10995 &[(
10996 1..2,
10997 "two\n",
10998 "TWO\n",
10999 DiffHunkStatus::modified(HasSecondaryHunk),
11000 )],
11001 );
11002 });
11003
11004 // Get the repository handle.
11005 let repo = project.read_with(cx, |project, cx| {
11006 project.repositories(cx).values().next().unwrap().clone()
11007 });
11008
11009 // Stage the file.
11010 let stage_task = repo.update(cx, |repo, cx| {
11011 repo.stage_entries(vec![repo_path("file.txt")], cx)
11012 });
11013
11014 // Run a few ticks to let the job start and mark hunks as pending,
11015 // but don't run_until_parked which would complete the entire operation.
11016 for _ in 0..10 {
11017 cx.executor().tick();
11018 let [hunk]: [_; 1] = uncommitted_diff
11019 .read_with(cx, |diff, cx| {
11020 diff.snapshot(cx).hunks(&snapshot).collect::<Vec<_>>()
11021 })
11022 .try_into()
11023 .unwrap();
11024 match hunk.secondary_status {
11025 HasSecondaryHunk => {}
11026 SecondaryHunkRemovalPending => break,
11027 NoSecondaryHunk => panic!("hunk was not optimistically staged"),
11028 _ => panic!("unexpected hunk state"),
11029 }
11030 }
11031 uncommitted_diff.read_with(cx, |diff, cx| {
11032 assert_hunks(
11033 diff.snapshot(cx).hunks(&snapshot),
11034 &snapshot,
11035 &diff.base_text_string(cx).unwrap(),
11036 &[(
11037 1..2,
11038 "two\n",
11039 "TWO\n",
11040 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
11041 )],
11042 );
11043 });
11044
11045 // Let the staging complete.
11046 stage_task.await.unwrap();
11047 cx.run_until_parked();
11048
11049 // The hunk is now fully staged.
11050 uncommitted_diff.read_with(cx, |diff, cx| {
11051 assert_hunks(
11052 diff.snapshot(cx).hunks(&snapshot),
11053 &snapshot,
11054 &diff.base_text_string(cx).unwrap(),
11055 &[(
11056 1..2,
11057 "two\n",
11058 "TWO\n",
11059 DiffHunkStatus::modified(NoSecondaryHunk),
11060 )],
11061 );
11062 });
11063
11064 // Simulate a commit by updating HEAD to match the current file contents.
11065 // The FakeGitRepository's commit method is a no-op, so we need to manually
11066 // update HEAD to simulate the commit completing.
11067 fs.set_head_for_repo(
11068 path!("/dir/.git").as_ref(),
11069 &[("file.txt", file_contents.clone())],
11070 "newhead",
11071 );
11072 cx.run_until_parked();
11073
11074 // After committing, there are no more hunks.
11075 uncommitted_diff.read_with(cx, |diff, cx| {
11076 assert_hunks(
11077 diff.snapshot(cx).hunks(&snapshot),
11078 &snapshot,
11079 &diff.base_text_string(cx).unwrap(),
11080 &[] as &[(Range<u32>, &str, &str, DiffHunkStatus)],
11081 );
11082 });
11083}