1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let settings_json_contents = json!({
1216 "languages": {
1217 "Rust": {
1218 "language_servers": ["my_fake_lsp", "lsp_on_path"]
1219 }
1220 },
1221 "lsp": {
1222 "my_fake_lsp": {
1223 "binary": {
1224 // file exists, so this is treated as a relative path
1225 "path": path!(".relative_path/to/my_fake_lsp_binary.exe").to_string(),
1226 }
1227 },
1228 "lsp_on_path": {
1229 "binary": {
1230 // file doesn't exist, so it will fall back on PATH env var
1231 "path": path!("lsp_on_path.exe").to_string(),
1232 }
1233 }
1234 },
1235 });
1236
1237 let fs = FakeFs::new(cx.executor());
1238 fs.insert_tree(
1239 path!("/the-root"),
1240 json!({
1241 ".zed": {
1242 "settings.json": settings_json_contents.to_string(),
1243 },
1244 ".relative_path": {
1245 "to": {
1246 "my_fake_lsp.exe": "",
1247 },
1248 },
1249 "src": {
1250 "main.rs": "",
1251 }
1252 }),
1253 )
1254 .await;
1255
1256 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1257 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1258 language_registry.add(rust_lang());
1259
1260 let mut my_fake_lsp = language_registry.register_fake_lsp(
1261 "Rust",
1262 FakeLspAdapter {
1263 name: "my_fake_lsp",
1264 ..Default::default()
1265 },
1266 );
1267 let mut lsp_on_path = language_registry.register_fake_lsp(
1268 "Rust",
1269 FakeLspAdapter {
1270 name: "lsp_on_path",
1271 ..Default::default()
1272 },
1273 );
1274
1275 cx.run_until_parked();
1276
1277 // Start the language server by opening a buffer with a compatible file extension.
1278 project
1279 .update(cx, |project, cx| {
1280 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1281 })
1282 .await
1283 .unwrap();
1284
1285 let lsp_path = my_fake_lsp.next().await.unwrap().binary.path;
1286 assert_eq!(
1287 lsp_path.to_string_lossy(),
1288 path!("/the-root/.relative_path/to/my_fake_lsp_binary.exe"),
1289 );
1290
1291 let lsp_path = lsp_on_path.next().await.unwrap().binary.path;
1292 assert_eq!(lsp_path.to_string_lossy(), path!("lsp_on_path.exe"));
1293}
1294
1295#[gpui::test]
1296async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1297 init_test(cx);
1298
1299 let fs = FakeFs::new(cx.executor());
1300 fs.insert_tree(
1301 path!("/the-root"),
1302 json!({
1303 ".gitignore": "target\n",
1304 "Cargo.lock": "",
1305 "src": {
1306 "a.rs": "",
1307 "b.rs": "",
1308 },
1309 "target": {
1310 "x": {
1311 "out": {
1312 "x.rs": ""
1313 }
1314 },
1315 "y": {
1316 "out": {
1317 "y.rs": "",
1318 }
1319 },
1320 "z": {
1321 "out": {
1322 "z.rs": ""
1323 }
1324 }
1325 }
1326 }),
1327 )
1328 .await;
1329 fs.insert_tree(
1330 path!("/the-registry"),
1331 json!({
1332 "dep1": {
1333 "src": {
1334 "dep1.rs": "",
1335 }
1336 },
1337 "dep2": {
1338 "src": {
1339 "dep2.rs": "",
1340 }
1341 },
1342 }),
1343 )
1344 .await;
1345 fs.insert_tree(
1346 path!("/the/stdlib"),
1347 json!({
1348 "LICENSE": "",
1349 "src": {
1350 "string.rs": "",
1351 }
1352 }),
1353 )
1354 .await;
1355
1356 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1357 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1358 (project.languages().clone(), project.lsp_store())
1359 });
1360 language_registry.add(rust_lang());
1361 let mut fake_servers = language_registry.register_fake_lsp(
1362 "Rust",
1363 FakeLspAdapter {
1364 name: "the-language-server",
1365 ..Default::default()
1366 },
1367 );
1368
1369 cx.executor().run_until_parked();
1370
1371 // Start the language server by opening a buffer with a compatible file extension.
1372 project
1373 .update(cx, |project, cx| {
1374 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1375 })
1376 .await
1377 .unwrap();
1378
1379 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1380 project.update(cx, |project, cx| {
1381 let worktree = project.worktrees(cx).next().unwrap();
1382 assert_eq!(
1383 worktree
1384 .read(cx)
1385 .snapshot()
1386 .entries(true, 0)
1387 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1388 .collect::<Vec<_>>(),
1389 &[
1390 ("", false),
1391 (".gitignore", false),
1392 ("Cargo.lock", false),
1393 ("src", false),
1394 ("src/a.rs", false),
1395 ("src/b.rs", false),
1396 ("target", true),
1397 ]
1398 );
1399 });
1400
1401 let prev_read_dir_count = fs.read_dir_call_count();
1402
1403 let fake_server = fake_servers.next().await.unwrap();
1404 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1405 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1406 id
1407 });
1408
1409 // Simulate jumping to a definition in a dependency outside of the worktree.
1410 let _out_of_worktree_buffer = project
1411 .update(cx, |project, cx| {
1412 project.open_local_buffer_via_lsp(
1413 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1414 server_id,
1415 cx,
1416 )
1417 })
1418 .await
1419 .unwrap();
1420
1421 // Keep track of the FS events reported to the language server.
1422 let file_changes = Arc::new(Mutex::new(Vec::new()));
1423 fake_server
1424 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1425 registrations: vec![lsp::Registration {
1426 id: Default::default(),
1427 method: "workspace/didChangeWatchedFiles".to_string(),
1428 register_options: serde_json::to_value(
1429 lsp::DidChangeWatchedFilesRegistrationOptions {
1430 watchers: vec![
1431 lsp::FileSystemWatcher {
1432 glob_pattern: lsp::GlobPattern::String(
1433 path!("/the-root/Cargo.toml").to_string(),
1434 ),
1435 kind: None,
1436 },
1437 lsp::FileSystemWatcher {
1438 glob_pattern: lsp::GlobPattern::String(
1439 path!("/the-root/src/*.{rs,c}").to_string(),
1440 ),
1441 kind: None,
1442 },
1443 lsp::FileSystemWatcher {
1444 glob_pattern: lsp::GlobPattern::String(
1445 path!("/the-root/target/y/**/*.rs").to_string(),
1446 ),
1447 kind: None,
1448 },
1449 lsp::FileSystemWatcher {
1450 glob_pattern: lsp::GlobPattern::String(
1451 path!("/the/stdlib/src/**/*.rs").to_string(),
1452 ),
1453 kind: None,
1454 },
1455 lsp::FileSystemWatcher {
1456 glob_pattern: lsp::GlobPattern::String(
1457 path!("**/Cargo.lock").to_string(),
1458 ),
1459 kind: None,
1460 },
1461 ],
1462 },
1463 )
1464 .ok(),
1465 }],
1466 })
1467 .await
1468 .into_response()
1469 .unwrap();
1470 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1471 let file_changes = file_changes.clone();
1472 move |params, _| {
1473 let mut file_changes = file_changes.lock();
1474 file_changes.extend(params.changes);
1475 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1476 }
1477 });
1478
1479 cx.executor().run_until_parked();
1480 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1481 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1482
1483 let mut new_watched_paths = fs.watched_paths();
1484 new_watched_paths.retain(|path| {
1485 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1486 });
1487 assert_eq!(
1488 &new_watched_paths,
1489 &[
1490 Path::new(path!("/the-root")),
1491 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1492 Path::new(path!("/the/stdlib/src"))
1493 ]
1494 );
1495
1496 // Now the language server has asked us to watch an ignored directory path,
1497 // so we recursively load it.
1498 project.update(cx, |project, cx| {
1499 let worktree = project.visible_worktrees(cx).next().unwrap();
1500 assert_eq!(
1501 worktree
1502 .read(cx)
1503 .snapshot()
1504 .entries(true, 0)
1505 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1506 .collect::<Vec<_>>(),
1507 &[
1508 ("", false),
1509 (".gitignore", false),
1510 ("Cargo.lock", false),
1511 ("src", false),
1512 ("src/a.rs", false),
1513 ("src/b.rs", false),
1514 ("target", true),
1515 ("target/x", true),
1516 ("target/y", true),
1517 ("target/y/out", true),
1518 ("target/y/out/y.rs", true),
1519 ("target/z", true),
1520 ]
1521 );
1522 });
1523
1524 // Perform some file system mutations, two of which match the watched patterns,
1525 // and one of which does not.
1526 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1527 .await
1528 .unwrap();
1529 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1530 .await
1531 .unwrap();
1532 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1533 .await
1534 .unwrap();
1535 fs.create_file(
1536 path!("/the-root/target/x/out/x2.rs").as_ref(),
1537 Default::default(),
1538 )
1539 .await
1540 .unwrap();
1541 fs.create_file(
1542 path!("/the-root/target/y/out/y2.rs").as_ref(),
1543 Default::default(),
1544 )
1545 .await
1546 .unwrap();
1547 fs.save(
1548 path!("/the-root/Cargo.lock").as_ref(),
1549 &"".into(),
1550 Default::default(),
1551 )
1552 .await
1553 .unwrap();
1554 fs.save(
1555 path!("/the-stdlib/LICENSE").as_ref(),
1556 &"".into(),
1557 Default::default(),
1558 )
1559 .await
1560 .unwrap();
1561 fs.save(
1562 path!("/the/stdlib/src/string.rs").as_ref(),
1563 &"".into(),
1564 Default::default(),
1565 )
1566 .await
1567 .unwrap();
1568
1569 // The language server receives events for the FS mutations that match its watch patterns.
1570 cx.executor().run_until_parked();
1571 assert_eq!(
1572 &*file_changes.lock(),
1573 &[
1574 lsp::FileEvent {
1575 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1576 typ: lsp::FileChangeType::CHANGED,
1577 },
1578 lsp::FileEvent {
1579 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1580 typ: lsp::FileChangeType::DELETED,
1581 },
1582 lsp::FileEvent {
1583 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1584 typ: lsp::FileChangeType::CREATED,
1585 },
1586 lsp::FileEvent {
1587 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1588 typ: lsp::FileChangeType::CREATED,
1589 },
1590 lsp::FileEvent {
1591 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1592 typ: lsp::FileChangeType::CHANGED,
1593 },
1594 ]
1595 );
1596}
1597
1598#[gpui::test]
1599async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1600 init_test(cx);
1601
1602 let fs = FakeFs::new(cx.executor());
1603 fs.insert_tree(
1604 path!("/dir"),
1605 json!({
1606 "a.rs": "let a = 1;",
1607 "b.rs": "let b = 2;"
1608 }),
1609 )
1610 .await;
1611
1612 let project = Project::test(
1613 fs,
1614 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1615 cx,
1616 )
1617 .await;
1618 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1619
1620 let buffer_a = project
1621 .update(cx, |project, cx| {
1622 project.open_local_buffer(path!("/dir/a.rs"), cx)
1623 })
1624 .await
1625 .unwrap();
1626 let buffer_b = project
1627 .update(cx, |project, cx| {
1628 project.open_local_buffer(path!("/dir/b.rs"), cx)
1629 })
1630 .await
1631 .unwrap();
1632
1633 lsp_store.update(cx, |lsp_store, cx| {
1634 lsp_store
1635 .update_diagnostics(
1636 LanguageServerId(0),
1637 lsp::PublishDiagnosticsParams {
1638 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1639 version: None,
1640 diagnostics: vec![lsp::Diagnostic {
1641 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1642 severity: Some(lsp::DiagnosticSeverity::ERROR),
1643 message: "error 1".to_string(),
1644 ..Default::default()
1645 }],
1646 },
1647 None,
1648 DiagnosticSourceKind::Pushed,
1649 &[],
1650 cx,
1651 )
1652 .unwrap();
1653 lsp_store
1654 .update_diagnostics(
1655 LanguageServerId(0),
1656 lsp::PublishDiagnosticsParams {
1657 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1658 version: None,
1659 diagnostics: vec![lsp::Diagnostic {
1660 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1661 severity: Some(DiagnosticSeverity::WARNING),
1662 message: "error 2".to_string(),
1663 ..Default::default()
1664 }],
1665 },
1666 None,
1667 DiagnosticSourceKind::Pushed,
1668 &[],
1669 cx,
1670 )
1671 .unwrap();
1672 });
1673
1674 buffer_a.update(cx, |buffer, _| {
1675 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1676 assert_eq!(
1677 chunks
1678 .iter()
1679 .map(|(s, d)| (s.as_str(), *d))
1680 .collect::<Vec<_>>(),
1681 &[
1682 ("let ", None),
1683 ("a", Some(DiagnosticSeverity::ERROR)),
1684 (" = 1;", None),
1685 ]
1686 );
1687 });
1688 buffer_b.update(cx, |buffer, _| {
1689 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1690 assert_eq!(
1691 chunks
1692 .iter()
1693 .map(|(s, d)| (s.as_str(), *d))
1694 .collect::<Vec<_>>(),
1695 &[
1696 ("let ", None),
1697 ("b", Some(DiagnosticSeverity::WARNING)),
1698 (" = 2;", None),
1699 ]
1700 );
1701 });
1702}
1703
1704#[gpui::test]
1705async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1706 init_test(cx);
1707
1708 let fs = FakeFs::new(cx.executor());
1709 fs.insert_tree(
1710 path!("/root"),
1711 json!({
1712 "dir": {
1713 ".git": {
1714 "HEAD": "ref: refs/heads/main",
1715 },
1716 ".gitignore": "b.rs",
1717 "a.rs": "let a = 1;",
1718 "b.rs": "let b = 2;",
1719 },
1720 "other.rs": "let b = c;"
1721 }),
1722 )
1723 .await;
1724
1725 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1726 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1727 let (worktree, _) = project
1728 .update(cx, |project, cx| {
1729 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1730 })
1731 .await
1732 .unwrap();
1733 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1734
1735 let (worktree, _) = project
1736 .update(cx, |project, cx| {
1737 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1738 })
1739 .await
1740 .unwrap();
1741 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1742
1743 let server_id = LanguageServerId(0);
1744 lsp_store.update(cx, |lsp_store, cx| {
1745 lsp_store
1746 .update_diagnostics(
1747 server_id,
1748 lsp::PublishDiagnosticsParams {
1749 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1750 version: None,
1751 diagnostics: vec![lsp::Diagnostic {
1752 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1753 severity: Some(lsp::DiagnosticSeverity::ERROR),
1754 message: "unused variable 'b'".to_string(),
1755 ..Default::default()
1756 }],
1757 },
1758 None,
1759 DiagnosticSourceKind::Pushed,
1760 &[],
1761 cx,
1762 )
1763 .unwrap();
1764 lsp_store
1765 .update_diagnostics(
1766 server_id,
1767 lsp::PublishDiagnosticsParams {
1768 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1769 version: None,
1770 diagnostics: vec![lsp::Diagnostic {
1771 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1772 severity: Some(lsp::DiagnosticSeverity::ERROR),
1773 message: "unknown variable 'c'".to_string(),
1774 ..Default::default()
1775 }],
1776 },
1777 None,
1778 DiagnosticSourceKind::Pushed,
1779 &[],
1780 cx,
1781 )
1782 .unwrap();
1783 });
1784
1785 let main_ignored_buffer = project
1786 .update(cx, |project, cx| {
1787 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1788 })
1789 .await
1790 .unwrap();
1791 main_ignored_buffer.update(cx, |buffer, _| {
1792 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1793 assert_eq!(
1794 chunks
1795 .iter()
1796 .map(|(s, d)| (s.as_str(), *d))
1797 .collect::<Vec<_>>(),
1798 &[
1799 ("let ", None),
1800 ("b", Some(DiagnosticSeverity::ERROR)),
1801 (" = 2;", None),
1802 ],
1803 "Gigitnored buffers should still get in-buffer diagnostics",
1804 );
1805 });
1806 let other_buffer = project
1807 .update(cx, |project, cx| {
1808 project.open_buffer((other_worktree_id, rel_path("")), cx)
1809 })
1810 .await
1811 .unwrap();
1812 other_buffer.update(cx, |buffer, _| {
1813 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1814 assert_eq!(
1815 chunks
1816 .iter()
1817 .map(|(s, d)| (s.as_str(), *d))
1818 .collect::<Vec<_>>(),
1819 &[
1820 ("let b = ", None),
1821 ("c", Some(DiagnosticSeverity::ERROR)),
1822 (";", None),
1823 ],
1824 "Buffers from hidden projects should still get in-buffer diagnostics"
1825 );
1826 });
1827
1828 project.update(cx, |project, cx| {
1829 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1830 assert_eq!(
1831 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1832 vec![(
1833 ProjectPath {
1834 worktree_id: main_worktree_id,
1835 path: rel_path("b.rs").into(),
1836 },
1837 server_id,
1838 DiagnosticSummary {
1839 error_count: 1,
1840 warning_count: 0,
1841 }
1842 )]
1843 );
1844 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1845 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1846 });
1847}
1848
1849#[gpui::test]
1850async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1851 init_test(cx);
1852
1853 let progress_token = "the-progress-token";
1854
1855 let fs = FakeFs::new(cx.executor());
1856 fs.insert_tree(
1857 path!("/dir"),
1858 json!({
1859 "a.rs": "fn a() { A }",
1860 "b.rs": "const y: i32 = 1",
1861 }),
1862 )
1863 .await;
1864
1865 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1866 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1867
1868 language_registry.add(rust_lang());
1869 let mut fake_servers = language_registry.register_fake_lsp(
1870 "Rust",
1871 FakeLspAdapter {
1872 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1873 disk_based_diagnostics_sources: vec!["disk".into()],
1874 ..Default::default()
1875 },
1876 );
1877
1878 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1879
1880 // Cause worktree to start the fake language server
1881 let _ = project
1882 .update(cx, |project, cx| {
1883 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1884 })
1885 .await
1886 .unwrap();
1887
1888 let mut events = cx.events(&project);
1889
1890 let fake_server = fake_servers.next().await.unwrap();
1891 assert_eq!(
1892 events.next().await.unwrap(),
1893 Event::LanguageServerAdded(
1894 LanguageServerId(0),
1895 fake_server.server.name(),
1896 Some(worktree_id)
1897 ),
1898 );
1899
1900 fake_server
1901 .start_progress(format!("{}/0", progress_token))
1902 .await;
1903 assert_eq!(
1904 events.next().await.unwrap(),
1905 Event::DiskBasedDiagnosticsStarted {
1906 language_server_id: LanguageServerId(0),
1907 }
1908 );
1909
1910 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1911 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1912 version: None,
1913 diagnostics: vec![lsp::Diagnostic {
1914 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1915 severity: Some(lsp::DiagnosticSeverity::ERROR),
1916 message: "undefined variable 'A'".to_string(),
1917 ..Default::default()
1918 }],
1919 });
1920 assert_eq!(
1921 events.next().await.unwrap(),
1922 Event::DiagnosticsUpdated {
1923 language_server_id: LanguageServerId(0),
1924 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1925 }
1926 );
1927
1928 fake_server.end_progress(format!("{}/0", progress_token));
1929 assert_eq!(
1930 events.next().await.unwrap(),
1931 Event::DiskBasedDiagnosticsFinished {
1932 language_server_id: LanguageServerId(0)
1933 }
1934 );
1935
1936 let buffer = project
1937 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1938 .await
1939 .unwrap();
1940
1941 buffer.update(cx, |buffer, _| {
1942 let snapshot = buffer.snapshot();
1943 let diagnostics = snapshot
1944 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1945 .collect::<Vec<_>>();
1946 assert_eq!(
1947 diagnostics,
1948 &[DiagnosticEntryRef {
1949 range: Point::new(0, 9)..Point::new(0, 10),
1950 diagnostic: &Diagnostic {
1951 severity: lsp::DiagnosticSeverity::ERROR,
1952 message: "undefined variable 'A'".to_string(),
1953 group_id: 0,
1954 is_primary: true,
1955 source_kind: DiagnosticSourceKind::Pushed,
1956 ..Diagnostic::default()
1957 }
1958 }]
1959 )
1960 });
1961
1962 // Ensure publishing empty diagnostics twice only results in one update event.
1963 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1964 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1965 version: None,
1966 diagnostics: Default::default(),
1967 });
1968 assert_eq!(
1969 events.next().await.unwrap(),
1970 Event::DiagnosticsUpdated {
1971 language_server_id: LanguageServerId(0),
1972 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1973 }
1974 );
1975
1976 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1977 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1978 version: None,
1979 diagnostics: Default::default(),
1980 });
1981 cx.executor().run_until_parked();
1982 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1983}
1984
1985#[gpui::test]
1986async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1987 init_test(cx);
1988
1989 let progress_token = "the-progress-token";
1990
1991 let fs = FakeFs::new(cx.executor());
1992 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1993
1994 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1995
1996 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1997 language_registry.add(rust_lang());
1998 let mut fake_servers = language_registry.register_fake_lsp(
1999 "Rust",
2000 FakeLspAdapter {
2001 name: "the-language-server",
2002 disk_based_diagnostics_sources: vec!["disk".into()],
2003 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2004 ..FakeLspAdapter::default()
2005 },
2006 );
2007
2008 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
2009
2010 let (buffer, _handle) = project
2011 .update(cx, |project, cx| {
2012 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2013 })
2014 .await
2015 .unwrap();
2016 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2017 // Simulate diagnostics starting to update.
2018 let fake_server = fake_servers.next().await.unwrap();
2019 fake_server.start_progress(progress_token).await;
2020
2021 // Restart the server before the diagnostics finish updating.
2022 project.update(cx, |project, cx| {
2023 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2024 });
2025 let mut events = cx.events(&project);
2026
2027 // Simulate the newly started server sending more diagnostics.
2028 let fake_server = fake_servers.next().await.unwrap();
2029 assert_eq!(
2030 events.next().await.unwrap(),
2031 Event::LanguageServerRemoved(LanguageServerId(0))
2032 );
2033 assert_eq!(
2034 events.next().await.unwrap(),
2035 Event::LanguageServerAdded(
2036 LanguageServerId(1),
2037 fake_server.server.name(),
2038 Some(worktree_id)
2039 )
2040 );
2041 fake_server.start_progress(progress_token).await;
2042 assert_eq!(
2043 events.next().await.unwrap(),
2044 Event::LanguageServerBufferRegistered {
2045 server_id: LanguageServerId(1),
2046 buffer_id,
2047 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2048 name: Some(fake_server.server.name())
2049 }
2050 );
2051 assert_eq!(
2052 events.next().await.unwrap(),
2053 Event::DiskBasedDiagnosticsStarted {
2054 language_server_id: LanguageServerId(1)
2055 }
2056 );
2057 project.update(cx, |project, cx| {
2058 assert_eq!(
2059 project
2060 .language_servers_running_disk_based_diagnostics(cx)
2061 .collect::<Vec<_>>(),
2062 [LanguageServerId(1)]
2063 );
2064 });
2065
2066 // All diagnostics are considered done, despite the old server's diagnostic
2067 // task never completing.
2068 fake_server.end_progress(progress_token);
2069 assert_eq!(
2070 events.next().await.unwrap(),
2071 Event::DiskBasedDiagnosticsFinished {
2072 language_server_id: LanguageServerId(1)
2073 }
2074 );
2075 project.update(cx, |project, cx| {
2076 assert_eq!(
2077 project
2078 .language_servers_running_disk_based_diagnostics(cx)
2079 .collect::<Vec<_>>(),
2080 [] as [language::LanguageServerId; 0]
2081 );
2082 });
2083}
2084
2085#[gpui::test]
2086async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2087 init_test(cx);
2088
2089 let fs = FakeFs::new(cx.executor());
2090 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2091
2092 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2093
2094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2095 language_registry.add(rust_lang());
2096 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2097
2098 let (buffer, _) = project
2099 .update(cx, |project, cx| {
2100 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2101 })
2102 .await
2103 .unwrap();
2104
2105 // Publish diagnostics
2106 let fake_server = fake_servers.next().await.unwrap();
2107 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2108 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2109 version: None,
2110 diagnostics: vec![lsp::Diagnostic {
2111 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2112 severity: Some(lsp::DiagnosticSeverity::ERROR),
2113 message: "the message".to_string(),
2114 ..Default::default()
2115 }],
2116 });
2117
2118 cx.executor().run_until_parked();
2119 buffer.update(cx, |buffer, _| {
2120 assert_eq!(
2121 buffer
2122 .snapshot()
2123 .diagnostics_in_range::<_, usize>(0..1, false)
2124 .map(|entry| entry.diagnostic.message.clone())
2125 .collect::<Vec<_>>(),
2126 ["the message".to_string()]
2127 );
2128 });
2129 project.update(cx, |project, cx| {
2130 assert_eq!(
2131 project.diagnostic_summary(false, cx),
2132 DiagnosticSummary {
2133 error_count: 1,
2134 warning_count: 0,
2135 }
2136 );
2137 });
2138
2139 project.update(cx, |project, cx| {
2140 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2141 });
2142
2143 // The diagnostics are cleared.
2144 cx.executor().run_until_parked();
2145 buffer.update(cx, |buffer, _| {
2146 assert_eq!(
2147 buffer
2148 .snapshot()
2149 .diagnostics_in_range::<_, usize>(0..1, false)
2150 .map(|entry| entry.diagnostic.message.clone())
2151 .collect::<Vec<_>>(),
2152 Vec::<String>::new(),
2153 );
2154 });
2155 project.update(cx, |project, cx| {
2156 assert_eq!(
2157 project.diagnostic_summary(false, cx),
2158 DiagnosticSummary {
2159 error_count: 0,
2160 warning_count: 0,
2161 }
2162 );
2163 });
2164}
2165
2166#[gpui::test]
2167async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2168 init_test(cx);
2169
2170 let fs = FakeFs::new(cx.executor());
2171 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2172
2173 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2174 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2175
2176 language_registry.add(rust_lang());
2177 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2178
2179 let (buffer, _handle) = project
2180 .update(cx, |project, cx| {
2181 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2182 })
2183 .await
2184 .unwrap();
2185
2186 // Before restarting the server, report diagnostics with an unknown buffer version.
2187 let fake_server = fake_servers.next().await.unwrap();
2188 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2189 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2190 version: Some(10000),
2191 diagnostics: Vec::new(),
2192 });
2193 cx.executor().run_until_parked();
2194 project.update(cx, |project, cx| {
2195 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2196 });
2197
2198 let mut fake_server = fake_servers.next().await.unwrap();
2199 let notification = fake_server
2200 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2201 .await
2202 .text_document;
2203 assert_eq!(notification.version, 0);
2204}
2205
2206#[gpui::test]
2207async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2208 init_test(cx);
2209
2210 let progress_token = "the-progress-token";
2211
2212 let fs = FakeFs::new(cx.executor());
2213 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2214
2215 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2216
2217 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2218 language_registry.add(rust_lang());
2219 let mut fake_servers = language_registry.register_fake_lsp(
2220 "Rust",
2221 FakeLspAdapter {
2222 name: "the-language-server",
2223 disk_based_diagnostics_sources: vec!["disk".into()],
2224 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2225 ..Default::default()
2226 },
2227 );
2228
2229 let (buffer, _handle) = project
2230 .update(cx, |project, cx| {
2231 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2232 })
2233 .await
2234 .unwrap();
2235
2236 // Simulate diagnostics starting to update.
2237 let mut fake_server = fake_servers.next().await.unwrap();
2238 fake_server
2239 .start_progress_with(
2240 "another-token",
2241 lsp::WorkDoneProgressBegin {
2242 cancellable: Some(false),
2243 ..Default::default()
2244 },
2245 )
2246 .await;
2247 fake_server
2248 .start_progress_with(
2249 progress_token,
2250 lsp::WorkDoneProgressBegin {
2251 cancellable: Some(true),
2252 ..Default::default()
2253 },
2254 )
2255 .await;
2256 cx.executor().run_until_parked();
2257
2258 project.update(cx, |project, cx| {
2259 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2260 });
2261
2262 let cancel_notification = fake_server
2263 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2264 .await;
2265 assert_eq!(
2266 cancel_notification.token,
2267 NumberOrString::String(progress_token.into())
2268 );
2269}
2270
2271#[gpui::test]
2272async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2273 init_test(cx);
2274
2275 let fs = FakeFs::new(cx.executor());
2276 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2277 .await;
2278
2279 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2280 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2281
2282 let mut fake_rust_servers = language_registry.register_fake_lsp(
2283 "Rust",
2284 FakeLspAdapter {
2285 name: "rust-lsp",
2286 ..Default::default()
2287 },
2288 );
2289 let mut fake_js_servers = language_registry.register_fake_lsp(
2290 "JavaScript",
2291 FakeLspAdapter {
2292 name: "js-lsp",
2293 ..Default::default()
2294 },
2295 );
2296 language_registry.add(rust_lang());
2297 language_registry.add(js_lang());
2298
2299 let _rs_buffer = project
2300 .update(cx, |project, cx| {
2301 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2302 })
2303 .await
2304 .unwrap();
2305 let _js_buffer = project
2306 .update(cx, |project, cx| {
2307 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2308 })
2309 .await
2310 .unwrap();
2311
2312 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2313 assert_eq!(
2314 fake_rust_server_1
2315 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2316 .await
2317 .text_document
2318 .uri
2319 .as_str(),
2320 uri!("file:///dir/a.rs")
2321 );
2322
2323 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2324 assert_eq!(
2325 fake_js_server
2326 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2327 .await
2328 .text_document
2329 .uri
2330 .as_str(),
2331 uri!("file:///dir/b.js")
2332 );
2333
2334 // Disable Rust language server, ensuring only that server gets stopped.
2335 cx.update(|cx| {
2336 SettingsStore::update_global(cx, |settings, cx| {
2337 settings.update_user_settings(cx, |settings| {
2338 settings.languages_mut().insert(
2339 "Rust".into(),
2340 LanguageSettingsContent {
2341 enable_language_server: Some(false),
2342 ..Default::default()
2343 },
2344 );
2345 });
2346 })
2347 });
2348 fake_rust_server_1
2349 .receive_notification::<lsp::notification::Exit>()
2350 .await;
2351
2352 // Enable Rust and disable JavaScript language servers, ensuring that the
2353 // former gets started again and that the latter stops.
2354 cx.update(|cx| {
2355 SettingsStore::update_global(cx, |settings, cx| {
2356 settings.update_user_settings(cx, |settings| {
2357 settings.languages_mut().insert(
2358 "Rust".into(),
2359 LanguageSettingsContent {
2360 enable_language_server: Some(true),
2361 ..Default::default()
2362 },
2363 );
2364 settings.languages_mut().insert(
2365 "JavaScript".into(),
2366 LanguageSettingsContent {
2367 enable_language_server: Some(false),
2368 ..Default::default()
2369 },
2370 );
2371 });
2372 })
2373 });
2374 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2375 assert_eq!(
2376 fake_rust_server_2
2377 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2378 .await
2379 .text_document
2380 .uri
2381 .as_str(),
2382 uri!("file:///dir/a.rs")
2383 );
2384 fake_js_server
2385 .receive_notification::<lsp::notification::Exit>()
2386 .await;
2387}
2388
2389#[gpui::test(iterations = 3)]
2390async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2391 init_test(cx);
2392
2393 let text = "
2394 fn a() { A }
2395 fn b() { BB }
2396 fn c() { CCC }
2397 "
2398 .unindent();
2399
2400 let fs = FakeFs::new(cx.executor());
2401 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2402
2403 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2404 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2405
2406 language_registry.add(rust_lang());
2407 let mut fake_servers = language_registry.register_fake_lsp(
2408 "Rust",
2409 FakeLspAdapter {
2410 disk_based_diagnostics_sources: vec!["disk".into()],
2411 ..Default::default()
2412 },
2413 );
2414
2415 let buffer = project
2416 .update(cx, |project, cx| {
2417 project.open_local_buffer(path!("/dir/a.rs"), cx)
2418 })
2419 .await
2420 .unwrap();
2421
2422 let _handle = project.update(cx, |project, cx| {
2423 project.register_buffer_with_language_servers(&buffer, cx)
2424 });
2425
2426 let mut fake_server = fake_servers.next().await.unwrap();
2427 let open_notification = fake_server
2428 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2429 .await;
2430
2431 // Edit the buffer, moving the content down
2432 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2433 let change_notification_1 = fake_server
2434 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2435 .await;
2436 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2437
2438 // Report some diagnostics for the initial version of the buffer
2439 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2440 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2441 version: Some(open_notification.text_document.version),
2442 diagnostics: vec![
2443 lsp::Diagnostic {
2444 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2445 severity: Some(DiagnosticSeverity::ERROR),
2446 message: "undefined variable 'A'".to_string(),
2447 source: Some("disk".to_string()),
2448 ..Default::default()
2449 },
2450 lsp::Diagnostic {
2451 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2452 severity: Some(DiagnosticSeverity::ERROR),
2453 message: "undefined variable 'BB'".to_string(),
2454 source: Some("disk".to_string()),
2455 ..Default::default()
2456 },
2457 lsp::Diagnostic {
2458 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2459 severity: Some(DiagnosticSeverity::ERROR),
2460 source: Some("disk".to_string()),
2461 message: "undefined variable 'CCC'".to_string(),
2462 ..Default::default()
2463 },
2464 ],
2465 });
2466
2467 // The diagnostics have moved down since they were created.
2468 cx.executor().run_until_parked();
2469 buffer.update(cx, |buffer, _| {
2470 assert_eq!(
2471 buffer
2472 .snapshot()
2473 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2474 .collect::<Vec<_>>(),
2475 &[
2476 DiagnosticEntry {
2477 range: Point::new(3, 9)..Point::new(3, 11),
2478 diagnostic: Diagnostic {
2479 source: Some("disk".into()),
2480 severity: DiagnosticSeverity::ERROR,
2481 message: "undefined variable 'BB'".to_string(),
2482 is_disk_based: true,
2483 group_id: 1,
2484 is_primary: true,
2485 source_kind: DiagnosticSourceKind::Pushed,
2486 ..Diagnostic::default()
2487 },
2488 },
2489 DiagnosticEntry {
2490 range: Point::new(4, 9)..Point::new(4, 12),
2491 diagnostic: Diagnostic {
2492 source: Some("disk".into()),
2493 severity: DiagnosticSeverity::ERROR,
2494 message: "undefined variable 'CCC'".to_string(),
2495 is_disk_based: true,
2496 group_id: 2,
2497 is_primary: true,
2498 source_kind: DiagnosticSourceKind::Pushed,
2499 ..Diagnostic::default()
2500 }
2501 }
2502 ]
2503 );
2504 assert_eq!(
2505 chunks_with_diagnostics(buffer, 0..buffer.len()),
2506 [
2507 ("\n\nfn a() { ".to_string(), None),
2508 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2509 (" }\nfn b() { ".to_string(), None),
2510 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2511 (" }\nfn c() { ".to_string(), None),
2512 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2513 (" }\n".to_string(), None),
2514 ]
2515 );
2516 assert_eq!(
2517 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2518 [
2519 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2520 (" }\nfn c() { ".to_string(), None),
2521 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2522 ]
2523 );
2524 });
2525
2526 // Ensure overlapping diagnostics are highlighted correctly.
2527 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2528 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2529 version: Some(open_notification.text_document.version),
2530 diagnostics: vec![
2531 lsp::Diagnostic {
2532 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2533 severity: Some(DiagnosticSeverity::ERROR),
2534 message: "undefined variable 'A'".to_string(),
2535 source: Some("disk".to_string()),
2536 ..Default::default()
2537 },
2538 lsp::Diagnostic {
2539 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2540 severity: Some(DiagnosticSeverity::WARNING),
2541 message: "unreachable statement".to_string(),
2542 source: Some("disk".to_string()),
2543 ..Default::default()
2544 },
2545 ],
2546 });
2547
2548 cx.executor().run_until_parked();
2549 buffer.update(cx, |buffer, _| {
2550 assert_eq!(
2551 buffer
2552 .snapshot()
2553 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2554 .collect::<Vec<_>>(),
2555 &[
2556 DiagnosticEntry {
2557 range: Point::new(2, 9)..Point::new(2, 12),
2558 diagnostic: Diagnostic {
2559 source: Some("disk".into()),
2560 severity: DiagnosticSeverity::WARNING,
2561 message: "unreachable statement".to_string(),
2562 is_disk_based: true,
2563 group_id: 4,
2564 is_primary: true,
2565 source_kind: DiagnosticSourceKind::Pushed,
2566 ..Diagnostic::default()
2567 }
2568 },
2569 DiagnosticEntry {
2570 range: Point::new(2, 9)..Point::new(2, 10),
2571 diagnostic: Diagnostic {
2572 source: Some("disk".into()),
2573 severity: DiagnosticSeverity::ERROR,
2574 message: "undefined variable 'A'".to_string(),
2575 is_disk_based: true,
2576 group_id: 3,
2577 is_primary: true,
2578 source_kind: DiagnosticSourceKind::Pushed,
2579 ..Diagnostic::default()
2580 },
2581 }
2582 ]
2583 );
2584 assert_eq!(
2585 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2586 [
2587 ("fn a() { ".to_string(), None),
2588 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2589 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2590 ("\n".to_string(), None),
2591 ]
2592 );
2593 assert_eq!(
2594 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2595 [
2596 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2597 ("\n".to_string(), None),
2598 ]
2599 );
2600 });
2601
2602 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2603 // changes since the last save.
2604 buffer.update(cx, |buffer, cx| {
2605 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2606 buffer.edit(
2607 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2608 None,
2609 cx,
2610 );
2611 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2612 });
2613 let change_notification_2 = fake_server
2614 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2615 .await;
2616 assert!(
2617 change_notification_2.text_document.version > change_notification_1.text_document.version
2618 );
2619
2620 // Handle out-of-order diagnostics
2621 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2622 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2623 version: Some(change_notification_2.text_document.version),
2624 diagnostics: vec![
2625 lsp::Diagnostic {
2626 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2627 severity: Some(DiagnosticSeverity::ERROR),
2628 message: "undefined variable 'BB'".to_string(),
2629 source: Some("disk".to_string()),
2630 ..Default::default()
2631 },
2632 lsp::Diagnostic {
2633 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2634 severity: Some(DiagnosticSeverity::WARNING),
2635 message: "undefined variable 'A'".to_string(),
2636 source: Some("disk".to_string()),
2637 ..Default::default()
2638 },
2639 ],
2640 });
2641
2642 cx.executor().run_until_parked();
2643 buffer.update(cx, |buffer, _| {
2644 assert_eq!(
2645 buffer
2646 .snapshot()
2647 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2648 .collect::<Vec<_>>(),
2649 &[
2650 DiagnosticEntry {
2651 range: Point::new(2, 21)..Point::new(2, 22),
2652 diagnostic: Diagnostic {
2653 source: Some("disk".into()),
2654 severity: DiagnosticSeverity::WARNING,
2655 message: "undefined variable 'A'".to_string(),
2656 is_disk_based: true,
2657 group_id: 6,
2658 is_primary: true,
2659 source_kind: DiagnosticSourceKind::Pushed,
2660 ..Diagnostic::default()
2661 }
2662 },
2663 DiagnosticEntry {
2664 range: Point::new(3, 9)..Point::new(3, 14),
2665 diagnostic: Diagnostic {
2666 source: Some("disk".into()),
2667 severity: DiagnosticSeverity::ERROR,
2668 message: "undefined variable 'BB'".to_string(),
2669 is_disk_based: true,
2670 group_id: 5,
2671 is_primary: true,
2672 source_kind: DiagnosticSourceKind::Pushed,
2673 ..Diagnostic::default()
2674 },
2675 }
2676 ]
2677 );
2678 });
2679}
2680
2681#[gpui::test]
2682async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2683 init_test(cx);
2684
2685 let text = concat!(
2686 "let one = ;\n", //
2687 "let two = \n",
2688 "let three = 3;\n",
2689 );
2690
2691 let fs = FakeFs::new(cx.executor());
2692 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2693
2694 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2695 let buffer = project
2696 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2697 .await
2698 .unwrap();
2699
2700 project.update(cx, |project, cx| {
2701 project.lsp_store.update(cx, |lsp_store, cx| {
2702 lsp_store
2703 .update_diagnostic_entries(
2704 LanguageServerId(0),
2705 PathBuf::from("/dir/a.rs"),
2706 None,
2707 None,
2708 vec![
2709 DiagnosticEntry {
2710 range: Unclipped(PointUtf16::new(0, 10))
2711 ..Unclipped(PointUtf16::new(0, 10)),
2712 diagnostic: Diagnostic {
2713 severity: DiagnosticSeverity::ERROR,
2714 message: "syntax error 1".to_string(),
2715 source_kind: DiagnosticSourceKind::Pushed,
2716 ..Diagnostic::default()
2717 },
2718 },
2719 DiagnosticEntry {
2720 range: Unclipped(PointUtf16::new(1, 10))
2721 ..Unclipped(PointUtf16::new(1, 10)),
2722 diagnostic: Diagnostic {
2723 severity: DiagnosticSeverity::ERROR,
2724 message: "syntax error 2".to_string(),
2725 source_kind: DiagnosticSourceKind::Pushed,
2726 ..Diagnostic::default()
2727 },
2728 },
2729 ],
2730 cx,
2731 )
2732 .unwrap();
2733 })
2734 });
2735
2736 // An empty range is extended forward to include the following character.
2737 // At the end of a line, an empty range is extended backward to include
2738 // the preceding character.
2739 buffer.update(cx, |buffer, _| {
2740 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2741 assert_eq!(
2742 chunks
2743 .iter()
2744 .map(|(s, d)| (s.as_str(), *d))
2745 .collect::<Vec<_>>(),
2746 &[
2747 ("let one = ", None),
2748 (";", Some(DiagnosticSeverity::ERROR)),
2749 ("\nlet two =", None),
2750 (" ", Some(DiagnosticSeverity::ERROR)),
2751 ("\nlet three = 3;\n", None)
2752 ]
2753 );
2754 });
2755}
2756
2757#[gpui::test]
2758async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2759 init_test(cx);
2760
2761 let fs = FakeFs::new(cx.executor());
2762 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2763 .await;
2764
2765 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2766 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2767
2768 lsp_store.update(cx, |lsp_store, cx| {
2769 lsp_store
2770 .update_diagnostic_entries(
2771 LanguageServerId(0),
2772 Path::new("/dir/a.rs").to_owned(),
2773 None,
2774 None,
2775 vec![DiagnosticEntry {
2776 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2777 diagnostic: Diagnostic {
2778 severity: DiagnosticSeverity::ERROR,
2779 is_primary: true,
2780 message: "syntax error a1".to_string(),
2781 source_kind: DiagnosticSourceKind::Pushed,
2782 ..Diagnostic::default()
2783 },
2784 }],
2785 cx,
2786 )
2787 .unwrap();
2788 lsp_store
2789 .update_diagnostic_entries(
2790 LanguageServerId(1),
2791 Path::new("/dir/a.rs").to_owned(),
2792 None,
2793 None,
2794 vec![DiagnosticEntry {
2795 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2796 diagnostic: Diagnostic {
2797 severity: DiagnosticSeverity::ERROR,
2798 is_primary: true,
2799 message: "syntax error b1".to_string(),
2800 source_kind: DiagnosticSourceKind::Pushed,
2801 ..Diagnostic::default()
2802 },
2803 }],
2804 cx,
2805 )
2806 .unwrap();
2807
2808 assert_eq!(
2809 lsp_store.diagnostic_summary(false, cx),
2810 DiagnosticSummary {
2811 error_count: 2,
2812 warning_count: 0,
2813 }
2814 );
2815 });
2816}
2817
2818#[gpui::test]
2819async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2820 init_test(cx);
2821
2822 let text = "
2823 fn a() {
2824 f1();
2825 }
2826 fn b() {
2827 f2();
2828 }
2829 fn c() {
2830 f3();
2831 }
2832 "
2833 .unindent();
2834
2835 let fs = FakeFs::new(cx.executor());
2836 fs.insert_tree(
2837 path!("/dir"),
2838 json!({
2839 "a.rs": text.clone(),
2840 }),
2841 )
2842 .await;
2843
2844 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2845 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2846
2847 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2848 language_registry.add(rust_lang());
2849 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2850
2851 let (buffer, _handle) = project
2852 .update(cx, |project, cx| {
2853 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2854 })
2855 .await
2856 .unwrap();
2857
2858 let mut fake_server = fake_servers.next().await.unwrap();
2859 let lsp_document_version = fake_server
2860 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2861 .await
2862 .text_document
2863 .version;
2864
2865 // Simulate editing the buffer after the language server computes some edits.
2866 buffer.update(cx, |buffer, cx| {
2867 buffer.edit(
2868 [(
2869 Point::new(0, 0)..Point::new(0, 0),
2870 "// above first function\n",
2871 )],
2872 None,
2873 cx,
2874 );
2875 buffer.edit(
2876 [(
2877 Point::new(2, 0)..Point::new(2, 0),
2878 " // inside first function\n",
2879 )],
2880 None,
2881 cx,
2882 );
2883 buffer.edit(
2884 [(
2885 Point::new(6, 4)..Point::new(6, 4),
2886 "// inside second function ",
2887 )],
2888 None,
2889 cx,
2890 );
2891
2892 assert_eq!(
2893 buffer.text(),
2894 "
2895 // above first function
2896 fn a() {
2897 // inside first function
2898 f1();
2899 }
2900 fn b() {
2901 // inside second function f2();
2902 }
2903 fn c() {
2904 f3();
2905 }
2906 "
2907 .unindent()
2908 );
2909 });
2910
2911 let edits = lsp_store
2912 .update(cx, |lsp_store, cx| {
2913 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2914 &buffer,
2915 vec![
2916 // replace body of first function
2917 lsp::TextEdit {
2918 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2919 new_text: "
2920 fn a() {
2921 f10();
2922 }
2923 "
2924 .unindent(),
2925 },
2926 // edit inside second function
2927 lsp::TextEdit {
2928 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2929 new_text: "00".into(),
2930 },
2931 // edit inside third function via two distinct edits
2932 lsp::TextEdit {
2933 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2934 new_text: "4000".into(),
2935 },
2936 lsp::TextEdit {
2937 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2938 new_text: "".into(),
2939 },
2940 ],
2941 LanguageServerId(0),
2942 Some(lsp_document_version),
2943 cx,
2944 )
2945 })
2946 .await
2947 .unwrap();
2948
2949 buffer.update(cx, |buffer, cx| {
2950 for (range, new_text) in edits {
2951 buffer.edit([(range, new_text)], None, cx);
2952 }
2953 assert_eq!(
2954 buffer.text(),
2955 "
2956 // above first function
2957 fn a() {
2958 // inside first function
2959 f10();
2960 }
2961 fn b() {
2962 // inside second function f200();
2963 }
2964 fn c() {
2965 f4000();
2966 }
2967 "
2968 .unindent()
2969 );
2970 });
2971}
2972
2973#[gpui::test]
2974async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2975 init_test(cx);
2976
2977 let text = "
2978 use a::b;
2979 use a::c;
2980
2981 fn f() {
2982 b();
2983 c();
2984 }
2985 "
2986 .unindent();
2987
2988 let fs = FakeFs::new(cx.executor());
2989 fs.insert_tree(
2990 path!("/dir"),
2991 json!({
2992 "a.rs": text.clone(),
2993 }),
2994 )
2995 .await;
2996
2997 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2998 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2999 let buffer = project
3000 .update(cx, |project, cx| {
3001 project.open_local_buffer(path!("/dir/a.rs"), cx)
3002 })
3003 .await
3004 .unwrap();
3005
3006 // Simulate the language server sending us a small edit in the form of a very large diff.
3007 // Rust-analyzer does this when performing a merge-imports code action.
3008 let edits = lsp_store
3009 .update(cx, |lsp_store, cx| {
3010 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3011 &buffer,
3012 [
3013 // Replace the first use statement without editing the semicolon.
3014 lsp::TextEdit {
3015 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
3016 new_text: "a::{b, c}".into(),
3017 },
3018 // Reinsert the remainder of the file between the semicolon and the final
3019 // newline of the file.
3020 lsp::TextEdit {
3021 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3022 new_text: "\n\n".into(),
3023 },
3024 lsp::TextEdit {
3025 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3026 new_text: "
3027 fn f() {
3028 b();
3029 c();
3030 }"
3031 .unindent(),
3032 },
3033 // Delete everything after the first newline of the file.
3034 lsp::TextEdit {
3035 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3036 new_text: "".into(),
3037 },
3038 ],
3039 LanguageServerId(0),
3040 None,
3041 cx,
3042 )
3043 })
3044 .await
3045 .unwrap();
3046
3047 buffer.update(cx, |buffer, cx| {
3048 let edits = edits
3049 .into_iter()
3050 .map(|(range, text)| {
3051 (
3052 range.start.to_point(buffer)..range.end.to_point(buffer),
3053 text,
3054 )
3055 })
3056 .collect::<Vec<_>>();
3057
3058 assert_eq!(
3059 edits,
3060 [
3061 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3062 (Point::new(1, 0)..Point::new(2, 0), "".into())
3063 ]
3064 );
3065
3066 for (range, new_text) in edits {
3067 buffer.edit([(range, new_text)], None, cx);
3068 }
3069 assert_eq!(
3070 buffer.text(),
3071 "
3072 use a::{b, c};
3073
3074 fn f() {
3075 b();
3076 c();
3077 }
3078 "
3079 .unindent()
3080 );
3081 });
3082}
3083
3084#[gpui::test]
3085async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3086 cx: &mut gpui::TestAppContext,
3087) {
3088 init_test(cx);
3089
3090 let text = "Path()";
3091
3092 let fs = FakeFs::new(cx.executor());
3093 fs.insert_tree(
3094 path!("/dir"),
3095 json!({
3096 "a.rs": text
3097 }),
3098 )
3099 .await;
3100
3101 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3102 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3103 let buffer = project
3104 .update(cx, |project, cx| {
3105 project.open_local_buffer(path!("/dir/a.rs"), cx)
3106 })
3107 .await
3108 .unwrap();
3109
3110 // Simulate the language server sending us a pair of edits at the same location,
3111 // with an insertion following a replacement (which violates the LSP spec).
3112 let edits = lsp_store
3113 .update(cx, |lsp_store, cx| {
3114 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3115 &buffer,
3116 [
3117 lsp::TextEdit {
3118 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3119 new_text: "Path".into(),
3120 },
3121 lsp::TextEdit {
3122 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3123 new_text: "from path import Path\n\n\n".into(),
3124 },
3125 ],
3126 LanguageServerId(0),
3127 None,
3128 cx,
3129 )
3130 })
3131 .await
3132 .unwrap();
3133
3134 buffer.update(cx, |buffer, cx| {
3135 buffer.edit(edits, None, cx);
3136 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3137 });
3138}
3139
3140#[gpui::test]
3141async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3142 init_test(cx);
3143
3144 let text = "
3145 use a::b;
3146 use a::c;
3147
3148 fn f() {
3149 b();
3150 c();
3151 }
3152 "
3153 .unindent();
3154
3155 let fs = FakeFs::new(cx.executor());
3156 fs.insert_tree(
3157 path!("/dir"),
3158 json!({
3159 "a.rs": text.clone(),
3160 }),
3161 )
3162 .await;
3163
3164 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3165 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3166 let buffer = project
3167 .update(cx, |project, cx| {
3168 project.open_local_buffer(path!("/dir/a.rs"), cx)
3169 })
3170 .await
3171 .unwrap();
3172
3173 // Simulate the language server sending us edits in a non-ordered fashion,
3174 // with ranges sometimes being inverted or pointing to invalid locations.
3175 let edits = lsp_store
3176 .update(cx, |lsp_store, cx| {
3177 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3178 &buffer,
3179 [
3180 lsp::TextEdit {
3181 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3182 new_text: "\n\n".into(),
3183 },
3184 lsp::TextEdit {
3185 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3186 new_text: "a::{b, c}".into(),
3187 },
3188 lsp::TextEdit {
3189 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3190 new_text: "".into(),
3191 },
3192 lsp::TextEdit {
3193 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3194 new_text: "
3195 fn f() {
3196 b();
3197 c();
3198 }"
3199 .unindent(),
3200 },
3201 ],
3202 LanguageServerId(0),
3203 None,
3204 cx,
3205 )
3206 })
3207 .await
3208 .unwrap();
3209
3210 buffer.update(cx, |buffer, cx| {
3211 let edits = edits
3212 .into_iter()
3213 .map(|(range, text)| {
3214 (
3215 range.start.to_point(buffer)..range.end.to_point(buffer),
3216 text,
3217 )
3218 })
3219 .collect::<Vec<_>>();
3220
3221 assert_eq!(
3222 edits,
3223 [
3224 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3225 (Point::new(1, 0)..Point::new(2, 0), "".into())
3226 ]
3227 );
3228
3229 for (range, new_text) in edits {
3230 buffer.edit([(range, new_text)], None, cx);
3231 }
3232 assert_eq!(
3233 buffer.text(),
3234 "
3235 use a::{b, c};
3236
3237 fn f() {
3238 b();
3239 c();
3240 }
3241 "
3242 .unindent()
3243 );
3244 });
3245}
3246
3247fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3248 buffer: &Buffer,
3249 range: Range<T>,
3250) -> Vec<(String, Option<DiagnosticSeverity>)> {
3251 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3252 for chunk in buffer.snapshot().chunks(range, true) {
3253 if chunks
3254 .last()
3255 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3256 {
3257 chunks.last_mut().unwrap().0.push_str(chunk.text);
3258 } else {
3259 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3260 }
3261 }
3262 chunks
3263}
3264
3265#[gpui::test(iterations = 10)]
3266async fn test_definition(cx: &mut gpui::TestAppContext) {
3267 init_test(cx);
3268
3269 let fs = FakeFs::new(cx.executor());
3270 fs.insert_tree(
3271 path!("/dir"),
3272 json!({
3273 "a.rs": "const fn a() { A }",
3274 "b.rs": "const y: i32 = crate::a()",
3275 }),
3276 )
3277 .await;
3278
3279 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3280
3281 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3282 language_registry.add(rust_lang());
3283 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3284
3285 let (buffer, _handle) = project
3286 .update(cx, |project, cx| {
3287 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3288 })
3289 .await
3290 .unwrap();
3291
3292 let fake_server = fake_servers.next().await.unwrap();
3293 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3294 let params = params.text_document_position_params;
3295 assert_eq!(
3296 params.text_document.uri.to_file_path().unwrap(),
3297 Path::new(path!("/dir/b.rs")),
3298 );
3299 assert_eq!(params.position, lsp::Position::new(0, 22));
3300
3301 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3302 lsp::Location::new(
3303 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3304 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3305 ),
3306 )))
3307 });
3308 let mut definitions = project
3309 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3310 .await
3311 .unwrap()
3312 .unwrap();
3313
3314 // Assert no new language server started
3315 cx.executor().run_until_parked();
3316 assert!(fake_servers.try_next().is_err());
3317
3318 assert_eq!(definitions.len(), 1);
3319 let definition = definitions.pop().unwrap();
3320 cx.update(|cx| {
3321 let target_buffer = definition.target.buffer.read(cx);
3322 assert_eq!(
3323 target_buffer
3324 .file()
3325 .unwrap()
3326 .as_local()
3327 .unwrap()
3328 .abs_path(cx),
3329 Path::new(path!("/dir/a.rs")),
3330 );
3331 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3332 assert_eq!(
3333 list_worktrees(&project, cx),
3334 [
3335 (path!("/dir/a.rs").as_ref(), false),
3336 (path!("/dir/b.rs").as_ref(), true)
3337 ],
3338 );
3339
3340 drop(definition);
3341 });
3342 cx.update(|cx| {
3343 assert_eq!(
3344 list_worktrees(&project, cx),
3345 [(path!("/dir/b.rs").as_ref(), true)]
3346 );
3347 });
3348
3349 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3350 project
3351 .read(cx)
3352 .worktrees(cx)
3353 .map(|worktree| {
3354 let worktree = worktree.read(cx);
3355 (
3356 worktree.as_local().unwrap().abs_path().as_ref(),
3357 worktree.is_visible(),
3358 )
3359 })
3360 .collect::<Vec<_>>()
3361 }
3362}
3363
3364#[gpui::test]
3365async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3366 init_test(cx);
3367
3368 let fs = FakeFs::new(cx.executor());
3369 fs.insert_tree(
3370 path!("/dir"),
3371 json!({
3372 "a.ts": "",
3373 }),
3374 )
3375 .await;
3376
3377 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3378
3379 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3380 language_registry.add(typescript_lang());
3381 let mut fake_language_servers = language_registry.register_fake_lsp(
3382 "TypeScript",
3383 FakeLspAdapter {
3384 capabilities: lsp::ServerCapabilities {
3385 completion_provider: Some(lsp::CompletionOptions {
3386 trigger_characters: Some(vec![".".to_string()]),
3387 ..Default::default()
3388 }),
3389 ..Default::default()
3390 },
3391 ..Default::default()
3392 },
3393 );
3394
3395 let (buffer, _handle) = project
3396 .update(cx, |p, cx| {
3397 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3398 })
3399 .await
3400 .unwrap();
3401
3402 let fake_server = fake_language_servers.next().await.unwrap();
3403
3404 // When text_edit exists, it takes precedence over insert_text and label
3405 let text = "let a = obj.fqn";
3406 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3407 let completions = project.update(cx, |project, cx| {
3408 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3409 });
3410
3411 fake_server
3412 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3413 Ok(Some(lsp::CompletionResponse::Array(vec![
3414 lsp::CompletionItem {
3415 label: "labelText".into(),
3416 insert_text: Some("insertText".into()),
3417 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3418 range: lsp::Range::new(
3419 lsp::Position::new(0, text.len() as u32 - 3),
3420 lsp::Position::new(0, text.len() as u32),
3421 ),
3422 new_text: "textEditText".into(),
3423 })),
3424 ..Default::default()
3425 },
3426 ])))
3427 })
3428 .next()
3429 .await;
3430
3431 let completions = completions
3432 .await
3433 .unwrap()
3434 .into_iter()
3435 .flat_map(|response| response.completions)
3436 .collect::<Vec<_>>();
3437 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3438
3439 assert_eq!(completions.len(), 1);
3440 assert_eq!(completions[0].new_text, "textEditText");
3441 assert_eq!(
3442 completions[0].replace_range.to_offset(&snapshot),
3443 text.len() - 3..text.len()
3444 );
3445}
3446
3447#[gpui::test]
3448async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3449 init_test(cx);
3450
3451 let fs = FakeFs::new(cx.executor());
3452 fs.insert_tree(
3453 path!("/dir"),
3454 json!({
3455 "a.ts": "",
3456 }),
3457 )
3458 .await;
3459
3460 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3461
3462 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3463 language_registry.add(typescript_lang());
3464 let mut fake_language_servers = language_registry.register_fake_lsp(
3465 "TypeScript",
3466 FakeLspAdapter {
3467 capabilities: lsp::ServerCapabilities {
3468 completion_provider: Some(lsp::CompletionOptions {
3469 trigger_characters: Some(vec![".".to_string()]),
3470 ..Default::default()
3471 }),
3472 ..Default::default()
3473 },
3474 ..Default::default()
3475 },
3476 );
3477
3478 let (buffer, _handle) = project
3479 .update(cx, |p, cx| {
3480 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3481 })
3482 .await
3483 .unwrap();
3484
3485 let fake_server = fake_language_servers.next().await.unwrap();
3486 let text = "let a = obj.fqn";
3487
3488 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3489 {
3490 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3491 let completions = project.update(cx, |project, cx| {
3492 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3493 });
3494
3495 fake_server
3496 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3497 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3498 is_incomplete: false,
3499 item_defaults: Some(lsp::CompletionListItemDefaults {
3500 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3501 lsp::Range::new(
3502 lsp::Position::new(0, text.len() as u32 - 3),
3503 lsp::Position::new(0, text.len() as u32),
3504 ),
3505 )),
3506 ..Default::default()
3507 }),
3508 items: vec![lsp::CompletionItem {
3509 label: "labelText".into(),
3510 text_edit_text: Some("textEditText".into()),
3511 text_edit: None,
3512 ..Default::default()
3513 }],
3514 })))
3515 })
3516 .next()
3517 .await;
3518
3519 let completions = completions
3520 .await
3521 .unwrap()
3522 .into_iter()
3523 .flat_map(|response| response.completions)
3524 .collect::<Vec<_>>();
3525 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3526
3527 assert_eq!(completions.len(), 1);
3528 assert_eq!(completions[0].new_text, "textEditText");
3529 assert_eq!(
3530 completions[0].replace_range.to_offset(&snapshot),
3531 text.len() - 3..text.len()
3532 );
3533 }
3534
3535 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3536 {
3537 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3538 let completions = project.update(cx, |project, cx| {
3539 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3540 });
3541
3542 fake_server
3543 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3544 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3545 is_incomplete: false,
3546 item_defaults: Some(lsp::CompletionListItemDefaults {
3547 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3548 lsp::Range::new(
3549 lsp::Position::new(0, text.len() as u32 - 3),
3550 lsp::Position::new(0, text.len() as u32),
3551 ),
3552 )),
3553 ..Default::default()
3554 }),
3555 items: vec![lsp::CompletionItem {
3556 label: "labelText".into(),
3557 text_edit_text: None,
3558 insert_text: Some("irrelevant".into()),
3559 text_edit: None,
3560 ..Default::default()
3561 }],
3562 })))
3563 })
3564 .next()
3565 .await;
3566
3567 let completions = completions
3568 .await
3569 .unwrap()
3570 .into_iter()
3571 .flat_map(|response| response.completions)
3572 .collect::<Vec<_>>();
3573 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3574
3575 assert_eq!(completions.len(), 1);
3576 assert_eq!(completions[0].new_text, "labelText");
3577 assert_eq!(
3578 completions[0].replace_range.to_offset(&snapshot),
3579 text.len() - 3..text.len()
3580 );
3581 }
3582}
3583
3584#[gpui::test]
3585async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3586 init_test(cx);
3587
3588 let fs = FakeFs::new(cx.executor());
3589 fs.insert_tree(
3590 path!("/dir"),
3591 json!({
3592 "a.ts": "",
3593 }),
3594 )
3595 .await;
3596
3597 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3598
3599 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3600 language_registry.add(typescript_lang());
3601 let mut fake_language_servers = language_registry.register_fake_lsp(
3602 "TypeScript",
3603 FakeLspAdapter {
3604 capabilities: lsp::ServerCapabilities {
3605 completion_provider: Some(lsp::CompletionOptions {
3606 trigger_characters: Some(vec![":".to_string()]),
3607 ..Default::default()
3608 }),
3609 ..Default::default()
3610 },
3611 ..Default::default()
3612 },
3613 );
3614
3615 let (buffer, _handle) = project
3616 .update(cx, |p, cx| {
3617 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3618 })
3619 .await
3620 .unwrap();
3621
3622 let fake_server = fake_language_servers.next().await.unwrap();
3623
3624 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3625 let text = "let a = b.fqn";
3626 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3627 let completions = project.update(cx, |project, cx| {
3628 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3629 });
3630
3631 fake_server
3632 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3633 Ok(Some(lsp::CompletionResponse::Array(vec![
3634 lsp::CompletionItem {
3635 label: "fullyQualifiedName?".into(),
3636 insert_text: Some("fullyQualifiedName".into()),
3637 ..Default::default()
3638 },
3639 ])))
3640 })
3641 .next()
3642 .await;
3643 let completions = completions
3644 .await
3645 .unwrap()
3646 .into_iter()
3647 .flat_map(|response| response.completions)
3648 .collect::<Vec<_>>();
3649 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3650 assert_eq!(completions.len(), 1);
3651 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3652 assert_eq!(
3653 completions[0].replace_range.to_offset(&snapshot),
3654 text.len() - 3..text.len()
3655 );
3656
3657 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3658 let text = "let a = \"atoms/cmp\"";
3659 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3660 let completions = project.update(cx, |project, cx| {
3661 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3662 });
3663
3664 fake_server
3665 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3666 Ok(Some(lsp::CompletionResponse::Array(vec![
3667 lsp::CompletionItem {
3668 label: "component".into(),
3669 ..Default::default()
3670 },
3671 ])))
3672 })
3673 .next()
3674 .await;
3675 let completions = completions
3676 .await
3677 .unwrap()
3678 .into_iter()
3679 .flat_map(|response| response.completions)
3680 .collect::<Vec<_>>();
3681 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3682 assert_eq!(completions.len(), 1);
3683 assert_eq!(completions[0].new_text, "component");
3684 assert_eq!(
3685 completions[0].replace_range.to_offset(&snapshot),
3686 text.len() - 4..text.len() - 1
3687 );
3688}
3689
3690#[gpui::test]
3691async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3692 init_test(cx);
3693
3694 let fs = FakeFs::new(cx.executor());
3695 fs.insert_tree(
3696 path!("/dir"),
3697 json!({
3698 "a.ts": "",
3699 }),
3700 )
3701 .await;
3702
3703 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3704
3705 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3706 language_registry.add(typescript_lang());
3707 let mut fake_language_servers = language_registry.register_fake_lsp(
3708 "TypeScript",
3709 FakeLspAdapter {
3710 capabilities: lsp::ServerCapabilities {
3711 completion_provider: Some(lsp::CompletionOptions {
3712 trigger_characters: Some(vec![":".to_string()]),
3713 ..Default::default()
3714 }),
3715 ..Default::default()
3716 },
3717 ..Default::default()
3718 },
3719 );
3720
3721 let (buffer, _handle) = project
3722 .update(cx, |p, cx| {
3723 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3724 })
3725 .await
3726 .unwrap();
3727
3728 let fake_server = fake_language_servers.next().await.unwrap();
3729
3730 let text = "let a = b.fqn";
3731 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3732 let completions = project.update(cx, |project, cx| {
3733 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3734 });
3735
3736 fake_server
3737 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3738 Ok(Some(lsp::CompletionResponse::Array(vec![
3739 lsp::CompletionItem {
3740 label: "fullyQualifiedName?".into(),
3741 insert_text: Some("fully\rQualified\r\nName".into()),
3742 ..Default::default()
3743 },
3744 ])))
3745 })
3746 .next()
3747 .await;
3748 let completions = completions
3749 .await
3750 .unwrap()
3751 .into_iter()
3752 .flat_map(|response| response.completions)
3753 .collect::<Vec<_>>();
3754 assert_eq!(completions.len(), 1);
3755 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3756}
3757
3758#[gpui::test(iterations = 10)]
3759async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3760 init_test(cx);
3761
3762 let fs = FakeFs::new(cx.executor());
3763 fs.insert_tree(
3764 path!("/dir"),
3765 json!({
3766 "a.ts": "a",
3767 }),
3768 )
3769 .await;
3770
3771 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3772
3773 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3774 language_registry.add(typescript_lang());
3775 let mut fake_language_servers = language_registry.register_fake_lsp(
3776 "TypeScript",
3777 FakeLspAdapter {
3778 capabilities: lsp::ServerCapabilities {
3779 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3780 lsp::CodeActionOptions {
3781 resolve_provider: Some(true),
3782 ..lsp::CodeActionOptions::default()
3783 },
3784 )),
3785 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3786 commands: vec!["_the/command".to_string()],
3787 ..lsp::ExecuteCommandOptions::default()
3788 }),
3789 ..lsp::ServerCapabilities::default()
3790 },
3791 ..FakeLspAdapter::default()
3792 },
3793 );
3794
3795 let (buffer, _handle) = project
3796 .update(cx, |p, cx| {
3797 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3798 })
3799 .await
3800 .unwrap();
3801
3802 let fake_server = fake_language_servers.next().await.unwrap();
3803
3804 // Language server returns code actions that contain commands, and not edits.
3805 let actions = project.update(cx, |project, cx| {
3806 project.code_actions(&buffer, 0..0, None, cx)
3807 });
3808 fake_server
3809 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3810 Ok(Some(vec![
3811 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3812 title: "The code action".into(),
3813 data: Some(serde_json::json!({
3814 "command": "_the/command",
3815 })),
3816 ..lsp::CodeAction::default()
3817 }),
3818 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3819 title: "two".into(),
3820 ..lsp::CodeAction::default()
3821 }),
3822 ]))
3823 })
3824 .next()
3825 .await;
3826
3827 let action = actions.await.unwrap().unwrap()[0].clone();
3828 let apply = project.update(cx, |project, cx| {
3829 project.apply_code_action(buffer.clone(), action, true, cx)
3830 });
3831
3832 // Resolving the code action does not populate its edits. In absence of
3833 // edits, we must execute the given command.
3834 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3835 |mut action, _| async move {
3836 if action.data.is_some() {
3837 action.command = Some(lsp::Command {
3838 title: "The command".into(),
3839 command: "_the/command".into(),
3840 arguments: Some(vec![json!("the-argument")]),
3841 });
3842 }
3843 Ok(action)
3844 },
3845 );
3846
3847 // While executing the command, the language server sends the editor
3848 // a `workspaceEdit` request.
3849 fake_server
3850 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3851 let fake = fake_server.clone();
3852 move |params, _| {
3853 assert_eq!(params.command, "_the/command");
3854 let fake = fake.clone();
3855 async move {
3856 fake.server
3857 .request::<lsp::request::ApplyWorkspaceEdit>(
3858 lsp::ApplyWorkspaceEditParams {
3859 label: None,
3860 edit: lsp::WorkspaceEdit {
3861 changes: Some(
3862 [(
3863 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3864 vec![lsp::TextEdit {
3865 range: lsp::Range::new(
3866 lsp::Position::new(0, 0),
3867 lsp::Position::new(0, 0),
3868 ),
3869 new_text: "X".into(),
3870 }],
3871 )]
3872 .into_iter()
3873 .collect(),
3874 ),
3875 ..Default::default()
3876 },
3877 },
3878 )
3879 .await
3880 .into_response()
3881 .unwrap();
3882 Ok(Some(json!(null)))
3883 }
3884 }
3885 })
3886 .next()
3887 .await;
3888
3889 // Applying the code action returns a project transaction containing the edits
3890 // sent by the language server in its `workspaceEdit` request.
3891 let transaction = apply.await.unwrap();
3892 assert!(transaction.0.contains_key(&buffer));
3893 buffer.update(cx, |buffer, cx| {
3894 assert_eq!(buffer.text(), "Xa");
3895 buffer.undo(cx);
3896 assert_eq!(buffer.text(), "a");
3897 });
3898}
3899
3900#[gpui::test]
3901async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3902 init_test(cx);
3903 let fs = FakeFs::new(cx.background_executor.clone());
3904 let expected_contents = "content";
3905 fs.as_fake()
3906 .insert_tree(
3907 "/root",
3908 json!({
3909 "test.txt": expected_contents
3910 }),
3911 )
3912 .await;
3913
3914 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3915
3916 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3917 let worktree = project.worktrees(cx).next().unwrap();
3918 let entry_id = worktree
3919 .read(cx)
3920 .entry_for_path(rel_path("test.txt"))
3921 .unwrap()
3922 .id;
3923 (worktree, entry_id)
3924 });
3925 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3926 let _result = project
3927 .update(cx, |project, cx| {
3928 project.rename_entry(
3929 entry_id,
3930 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3931 cx,
3932 )
3933 })
3934 .await
3935 .unwrap();
3936 worktree.read_with(cx, |worktree, _| {
3937 assert!(
3938 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3939 "Old file should have been removed"
3940 );
3941 assert!(
3942 worktree
3943 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3944 .is_some(),
3945 "Whole directory hierarchy and the new file should have been created"
3946 );
3947 });
3948 assert_eq!(
3949 worktree
3950 .update(cx, |worktree, cx| {
3951 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3952 })
3953 .await
3954 .unwrap()
3955 .text,
3956 expected_contents,
3957 "Moved file's contents should be preserved"
3958 );
3959
3960 let entry_id = worktree.read_with(cx, |worktree, _| {
3961 worktree
3962 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3963 .unwrap()
3964 .id
3965 });
3966
3967 let _result = project
3968 .update(cx, |project, cx| {
3969 project.rename_entry(
3970 entry_id,
3971 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3972 cx,
3973 )
3974 })
3975 .await
3976 .unwrap();
3977 worktree.read_with(cx, |worktree, _| {
3978 assert!(
3979 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3980 "First file should not reappear"
3981 );
3982 assert!(
3983 worktree
3984 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3985 .is_none(),
3986 "Old file should have been removed"
3987 );
3988 assert!(
3989 worktree
3990 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3991 .is_some(),
3992 "No error should have occurred after moving into existing directory"
3993 );
3994 });
3995 assert_eq!(
3996 worktree
3997 .update(cx, |worktree, cx| {
3998 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3999 })
4000 .await
4001 .unwrap()
4002 .text,
4003 expected_contents,
4004 "Moved file's contents should be preserved"
4005 );
4006}
4007
4008#[gpui::test(iterations = 10)]
4009async fn test_save_file(cx: &mut gpui::TestAppContext) {
4010 init_test(cx);
4011
4012 let fs = FakeFs::new(cx.executor());
4013 fs.insert_tree(
4014 path!("/dir"),
4015 json!({
4016 "file1": "the old contents",
4017 }),
4018 )
4019 .await;
4020
4021 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4022 let buffer = project
4023 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4024 .await
4025 .unwrap();
4026 buffer.update(cx, |buffer, cx| {
4027 assert_eq!(buffer.text(), "the old contents");
4028 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4029 });
4030
4031 project
4032 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4033 .await
4034 .unwrap();
4035
4036 let new_text = fs
4037 .load(Path::new(path!("/dir/file1")))
4038 .await
4039 .unwrap()
4040 .replace("\r\n", "\n");
4041 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4042}
4043
4044#[gpui::test(iterations = 10)]
4045async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4046 // Issue: #24349
4047 init_test(cx);
4048
4049 let fs = FakeFs::new(cx.executor());
4050 fs.insert_tree(path!("/dir"), json!({})).await;
4051
4052 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4053 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4054
4055 language_registry.add(rust_lang());
4056 let mut fake_rust_servers = language_registry.register_fake_lsp(
4057 "Rust",
4058 FakeLspAdapter {
4059 name: "the-rust-language-server",
4060 capabilities: lsp::ServerCapabilities {
4061 completion_provider: Some(lsp::CompletionOptions {
4062 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4063 ..Default::default()
4064 }),
4065 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4066 lsp::TextDocumentSyncOptions {
4067 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4068 ..Default::default()
4069 },
4070 )),
4071 ..Default::default()
4072 },
4073 ..Default::default()
4074 },
4075 );
4076
4077 let buffer = project
4078 .update(cx, |this, cx| this.create_buffer(false, cx))
4079 .unwrap()
4080 .await;
4081 project.update(cx, |this, cx| {
4082 this.register_buffer_with_language_servers(&buffer, cx);
4083 buffer.update(cx, |buffer, cx| {
4084 assert!(!this.has_language_servers_for(buffer, cx));
4085 })
4086 });
4087
4088 project
4089 .update(cx, |this, cx| {
4090 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4091 this.save_buffer_as(
4092 buffer.clone(),
4093 ProjectPath {
4094 worktree_id,
4095 path: rel_path("file.rs").into(),
4096 },
4097 cx,
4098 )
4099 })
4100 .await
4101 .unwrap();
4102 // A server is started up, and it is notified about Rust files.
4103 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4104 assert_eq!(
4105 fake_rust_server
4106 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4107 .await
4108 .text_document,
4109 lsp::TextDocumentItem {
4110 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4111 version: 0,
4112 text: "".to_string(),
4113 language_id: "rust".to_string(),
4114 }
4115 );
4116
4117 project.update(cx, |this, cx| {
4118 buffer.update(cx, |buffer, cx| {
4119 assert!(this.has_language_servers_for(buffer, cx));
4120 })
4121 });
4122}
4123
4124#[gpui::test(iterations = 30)]
4125async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4126 init_test(cx);
4127
4128 let fs = FakeFs::new(cx.executor());
4129 fs.insert_tree(
4130 path!("/dir"),
4131 json!({
4132 "file1": "the original contents",
4133 }),
4134 )
4135 .await;
4136
4137 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4138 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4139 let buffer = project
4140 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4141 .await
4142 .unwrap();
4143
4144 // Simulate buffer diffs being slow, so that they don't complete before
4145 // the next file change occurs.
4146 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4147
4148 // Change the buffer's file on disk, and then wait for the file change
4149 // to be detected by the worktree, so that the buffer starts reloading.
4150 fs.save(
4151 path!("/dir/file1").as_ref(),
4152 &"the first contents".into(),
4153 Default::default(),
4154 )
4155 .await
4156 .unwrap();
4157 worktree.next_event(cx).await;
4158
4159 // Change the buffer's file again. Depending on the random seed, the
4160 // previous file change may still be in progress.
4161 fs.save(
4162 path!("/dir/file1").as_ref(),
4163 &"the second contents".into(),
4164 Default::default(),
4165 )
4166 .await
4167 .unwrap();
4168 worktree.next_event(cx).await;
4169
4170 cx.executor().run_until_parked();
4171 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4172 buffer.read_with(cx, |buffer, _| {
4173 assert_eq!(buffer.text(), on_disk_text);
4174 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4175 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4176 });
4177}
4178
4179#[gpui::test(iterations = 30)]
4180async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4181 init_test(cx);
4182
4183 let fs = FakeFs::new(cx.executor());
4184 fs.insert_tree(
4185 path!("/dir"),
4186 json!({
4187 "file1": "the original contents",
4188 }),
4189 )
4190 .await;
4191
4192 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4193 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4194 let buffer = project
4195 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4196 .await
4197 .unwrap();
4198
4199 // Simulate buffer diffs being slow, so that they don't complete before
4200 // the next file change occurs.
4201 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4202
4203 // Change the buffer's file on disk, and then wait for the file change
4204 // to be detected by the worktree, so that the buffer starts reloading.
4205 fs.save(
4206 path!("/dir/file1").as_ref(),
4207 &"the first contents".into(),
4208 Default::default(),
4209 )
4210 .await
4211 .unwrap();
4212 worktree.next_event(cx).await;
4213
4214 cx.executor()
4215 .spawn(cx.executor().simulate_random_delay())
4216 .await;
4217
4218 // Perform a noop edit, causing the buffer's version to increase.
4219 buffer.update(cx, |buffer, cx| {
4220 buffer.edit([(0..0, " ")], None, cx);
4221 buffer.undo(cx);
4222 });
4223
4224 cx.executor().run_until_parked();
4225 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4226 buffer.read_with(cx, |buffer, _| {
4227 let buffer_text = buffer.text();
4228 if buffer_text == on_disk_text {
4229 assert!(
4230 !buffer.is_dirty() && !buffer.has_conflict(),
4231 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4232 );
4233 }
4234 // If the file change occurred while the buffer was processing the first
4235 // change, the buffer will be in a conflicting state.
4236 else {
4237 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4238 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4239 }
4240 });
4241}
4242
4243#[gpui::test]
4244async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4245 init_test(cx);
4246
4247 let fs = FakeFs::new(cx.executor());
4248 fs.insert_tree(
4249 path!("/dir"),
4250 json!({
4251 "file1": "the old contents",
4252 }),
4253 )
4254 .await;
4255
4256 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4257 let buffer = project
4258 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4259 .await
4260 .unwrap();
4261 buffer.update(cx, |buffer, cx| {
4262 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4263 });
4264
4265 project
4266 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4267 .await
4268 .unwrap();
4269
4270 let new_text = fs
4271 .load(Path::new(path!("/dir/file1")))
4272 .await
4273 .unwrap()
4274 .replace("\r\n", "\n");
4275 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4276}
4277
4278#[gpui::test]
4279async fn test_save_as(cx: &mut gpui::TestAppContext) {
4280 init_test(cx);
4281
4282 let fs = FakeFs::new(cx.executor());
4283 fs.insert_tree("/dir", json!({})).await;
4284
4285 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4286
4287 let languages = project.update(cx, |project, _| project.languages().clone());
4288 languages.add(rust_lang());
4289
4290 let buffer = project.update(cx, |project, cx| {
4291 project.create_local_buffer("", None, false, cx)
4292 });
4293 buffer.update(cx, |buffer, cx| {
4294 buffer.edit([(0..0, "abc")], None, cx);
4295 assert!(buffer.is_dirty());
4296 assert!(!buffer.has_conflict());
4297 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4298 });
4299 project
4300 .update(cx, |project, cx| {
4301 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4302 let path = ProjectPath {
4303 worktree_id,
4304 path: rel_path("file1.rs").into(),
4305 };
4306 project.save_buffer_as(buffer.clone(), path, cx)
4307 })
4308 .await
4309 .unwrap();
4310 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4311
4312 cx.executor().run_until_parked();
4313 buffer.update(cx, |buffer, cx| {
4314 assert_eq!(
4315 buffer.file().unwrap().full_path(cx),
4316 Path::new("dir/file1.rs")
4317 );
4318 assert!(!buffer.is_dirty());
4319 assert!(!buffer.has_conflict());
4320 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4321 });
4322
4323 let opened_buffer = project
4324 .update(cx, |project, cx| {
4325 project.open_local_buffer("/dir/file1.rs", cx)
4326 })
4327 .await
4328 .unwrap();
4329 assert_eq!(opened_buffer, buffer);
4330}
4331
4332#[gpui::test]
4333async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4334 init_test(cx);
4335
4336 let fs = FakeFs::new(cx.executor());
4337 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4338
4339 fs.insert_tree(
4340 path!("/dir"),
4341 json!({
4342 "data_a.txt": "data about a"
4343 }),
4344 )
4345 .await;
4346
4347 let buffer = project
4348 .update(cx, |project, cx| {
4349 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4350 })
4351 .await
4352 .unwrap();
4353
4354 buffer.update(cx, |buffer, cx| {
4355 buffer.edit([(11..12, "b")], None, cx);
4356 });
4357
4358 // Save buffer's contents as a new file and confirm that the buffer's now
4359 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4360 // file associated with the buffer has now been updated to `data_b.txt`
4361 project
4362 .update(cx, |project, cx| {
4363 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4364 let new_path = ProjectPath {
4365 worktree_id,
4366 path: rel_path("data_b.txt").into(),
4367 };
4368
4369 project.save_buffer_as(buffer.clone(), new_path, cx)
4370 })
4371 .await
4372 .unwrap();
4373
4374 buffer.update(cx, |buffer, cx| {
4375 assert_eq!(
4376 buffer.file().unwrap().full_path(cx),
4377 Path::new("dir/data_b.txt")
4378 )
4379 });
4380
4381 // Open the original `data_a.txt` file, confirming that its contents are
4382 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4383 let original_buffer = project
4384 .update(cx, |project, cx| {
4385 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4386 })
4387 .await
4388 .unwrap();
4389
4390 original_buffer.update(cx, |buffer, cx| {
4391 assert_eq!(buffer.text(), "data about a");
4392 assert_eq!(
4393 buffer.file().unwrap().full_path(cx),
4394 Path::new("dir/data_a.txt")
4395 )
4396 });
4397}
4398
4399#[gpui::test(retries = 5)]
4400async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4401 use worktree::WorktreeModelHandle as _;
4402
4403 init_test(cx);
4404 cx.executor().allow_parking();
4405
4406 let dir = TempTree::new(json!({
4407 "a": {
4408 "file1": "",
4409 "file2": "",
4410 "file3": "",
4411 },
4412 "b": {
4413 "c": {
4414 "file4": "",
4415 "file5": "",
4416 }
4417 }
4418 }));
4419
4420 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4421
4422 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4423 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4424 async move { buffer.await.unwrap() }
4425 };
4426 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4427 project.update(cx, |project, cx| {
4428 let tree = project.worktrees(cx).next().unwrap();
4429 tree.read(cx)
4430 .entry_for_path(rel_path(path))
4431 .unwrap_or_else(|| panic!("no entry for path {}", path))
4432 .id
4433 })
4434 };
4435
4436 let buffer2 = buffer_for_path("a/file2", cx).await;
4437 let buffer3 = buffer_for_path("a/file3", cx).await;
4438 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4439 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4440
4441 let file2_id = id_for_path("a/file2", cx);
4442 let file3_id = id_for_path("a/file3", cx);
4443 let file4_id = id_for_path("b/c/file4", cx);
4444
4445 // Create a remote copy of this worktree.
4446 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4447 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4448
4449 let updates = Arc::new(Mutex::new(Vec::new()));
4450 tree.update(cx, |tree, cx| {
4451 let updates = updates.clone();
4452 tree.observe_updates(0, cx, move |update| {
4453 updates.lock().push(update);
4454 async { true }
4455 });
4456 });
4457
4458 let remote = cx.update(|cx| {
4459 Worktree::remote(
4460 0,
4461 ReplicaId::REMOTE_SERVER,
4462 metadata,
4463 project.read(cx).client().into(),
4464 project.read(cx).path_style(cx),
4465 cx,
4466 )
4467 });
4468
4469 cx.executor().run_until_parked();
4470
4471 cx.update(|cx| {
4472 assert!(!buffer2.read(cx).is_dirty());
4473 assert!(!buffer3.read(cx).is_dirty());
4474 assert!(!buffer4.read(cx).is_dirty());
4475 assert!(!buffer5.read(cx).is_dirty());
4476 });
4477
4478 // Rename and delete files and directories.
4479 tree.flush_fs_events(cx).await;
4480 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4481 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4482 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4483 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4484 tree.flush_fs_events(cx).await;
4485
4486 cx.update(|app| {
4487 assert_eq!(
4488 tree.read(app).paths().collect::<Vec<_>>(),
4489 vec![
4490 rel_path("a"),
4491 rel_path("a/file1"),
4492 rel_path("a/file2.new"),
4493 rel_path("b"),
4494 rel_path("d"),
4495 rel_path("d/file3"),
4496 rel_path("d/file4"),
4497 ]
4498 );
4499 });
4500
4501 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4502 assert_eq!(id_for_path("d/file3", cx), file3_id);
4503 assert_eq!(id_for_path("d/file4", cx), file4_id);
4504
4505 cx.update(|cx| {
4506 assert_eq!(
4507 buffer2.read(cx).file().unwrap().path().as_ref(),
4508 rel_path("a/file2.new")
4509 );
4510 assert_eq!(
4511 buffer3.read(cx).file().unwrap().path().as_ref(),
4512 rel_path("d/file3")
4513 );
4514 assert_eq!(
4515 buffer4.read(cx).file().unwrap().path().as_ref(),
4516 rel_path("d/file4")
4517 );
4518 assert_eq!(
4519 buffer5.read(cx).file().unwrap().path().as_ref(),
4520 rel_path("b/c/file5")
4521 );
4522
4523 assert_matches!(
4524 buffer2.read(cx).file().unwrap().disk_state(),
4525 DiskState::Present { .. }
4526 );
4527 assert_matches!(
4528 buffer3.read(cx).file().unwrap().disk_state(),
4529 DiskState::Present { .. }
4530 );
4531 assert_matches!(
4532 buffer4.read(cx).file().unwrap().disk_state(),
4533 DiskState::Present { .. }
4534 );
4535 assert_eq!(
4536 buffer5.read(cx).file().unwrap().disk_state(),
4537 DiskState::Deleted
4538 );
4539 });
4540
4541 // Update the remote worktree. Check that it becomes consistent with the
4542 // local worktree.
4543 cx.executor().run_until_parked();
4544
4545 remote.update(cx, |remote, _| {
4546 for update in updates.lock().drain(..) {
4547 remote.as_remote_mut().unwrap().update_from_remote(update);
4548 }
4549 });
4550 cx.executor().run_until_parked();
4551 remote.update(cx, |remote, _| {
4552 assert_eq!(
4553 remote.paths().collect::<Vec<_>>(),
4554 vec![
4555 rel_path("a"),
4556 rel_path("a/file1"),
4557 rel_path("a/file2.new"),
4558 rel_path("b"),
4559 rel_path("d"),
4560 rel_path("d/file3"),
4561 rel_path("d/file4"),
4562 ]
4563 );
4564 });
4565}
4566
4567#[gpui::test(iterations = 10)]
4568async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4569 init_test(cx);
4570
4571 let fs = FakeFs::new(cx.executor());
4572 fs.insert_tree(
4573 path!("/dir"),
4574 json!({
4575 "a": {
4576 "file1": "",
4577 }
4578 }),
4579 )
4580 .await;
4581
4582 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4583 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4584 let tree_id = tree.update(cx, |tree, _| tree.id());
4585
4586 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4587 project.update(cx, |project, cx| {
4588 let tree = project.worktrees(cx).next().unwrap();
4589 tree.read(cx)
4590 .entry_for_path(rel_path(path))
4591 .unwrap_or_else(|| panic!("no entry for path {}", path))
4592 .id
4593 })
4594 };
4595
4596 let dir_id = id_for_path("a", cx);
4597 let file_id = id_for_path("a/file1", cx);
4598 let buffer = project
4599 .update(cx, |p, cx| {
4600 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4601 })
4602 .await
4603 .unwrap();
4604 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4605
4606 project
4607 .update(cx, |project, cx| {
4608 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4609 })
4610 .unwrap()
4611 .await
4612 .into_included()
4613 .unwrap();
4614 cx.executor().run_until_parked();
4615
4616 assert_eq!(id_for_path("b", cx), dir_id);
4617 assert_eq!(id_for_path("b/file1", cx), file_id);
4618 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4619}
4620
4621#[gpui::test]
4622async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4623 init_test(cx);
4624
4625 let fs = FakeFs::new(cx.executor());
4626 fs.insert_tree(
4627 "/dir",
4628 json!({
4629 "a.txt": "a-contents",
4630 "b.txt": "b-contents",
4631 }),
4632 )
4633 .await;
4634
4635 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4636
4637 // Spawn multiple tasks to open paths, repeating some paths.
4638 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4639 (
4640 p.open_local_buffer("/dir/a.txt", cx),
4641 p.open_local_buffer("/dir/b.txt", cx),
4642 p.open_local_buffer("/dir/a.txt", cx),
4643 )
4644 });
4645
4646 let buffer_a_1 = buffer_a_1.await.unwrap();
4647 let buffer_a_2 = buffer_a_2.await.unwrap();
4648 let buffer_b = buffer_b.await.unwrap();
4649 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4650 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4651
4652 // There is only one buffer per path.
4653 let buffer_a_id = buffer_a_1.entity_id();
4654 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4655
4656 // Open the same path again while it is still open.
4657 drop(buffer_a_1);
4658 let buffer_a_3 = project
4659 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4660 .await
4661 .unwrap();
4662
4663 // There's still only one buffer per path.
4664 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4665}
4666
4667#[gpui::test]
4668async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4669 init_test(cx);
4670
4671 let fs = FakeFs::new(cx.executor());
4672 fs.insert_tree(
4673 path!("/dir"),
4674 json!({
4675 "file1": "abc",
4676 "file2": "def",
4677 "file3": "ghi",
4678 }),
4679 )
4680 .await;
4681
4682 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4683
4684 let buffer1 = project
4685 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4686 .await
4687 .unwrap();
4688 let events = Arc::new(Mutex::new(Vec::new()));
4689
4690 // initially, the buffer isn't dirty.
4691 buffer1.update(cx, |buffer, cx| {
4692 cx.subscribe(&buffer1, {
4693 let events = events.clone();
4694 move |_, _, event, _| match event {
4695 BufferEvent::Operation { .. } => {}
4696 _ => events.lock().push(event.clone()),
4697 }
4698 })
4699 .detach();
4700
4701 assert!(!buffer.is_dirty());
4702 assert!(events.lock().is_empty());
4703
4704 buffer.edit([(1..2, "")], None, cx);
4705 });
4706
4707 // after the first edit, the buffer is dirty, and emits a dirtied event.
4708 buffer1.update(cx, |buffer, cx| {
4709 assert!(buffer.text() == "ac");
4710 assert!(buffer.is_dirty());
4711 assert_eq!(
4712 *events.lock(),
4713 &[
4714 language::BufferEvent::Edited,
4715 language::BufferEvent::DirtyChanged
4716 ]
4717 );
4718 events.lock().clear();
4719 buffer.did_save(
4720 buffer.version(),
4721 buffer.file().unwrap().disk_state().mtime(),
4722 cx,
4723 );
4724 });
4725
4726 // after saving, the buffer is not dirty, and emits a saved event.
4727 buffer1.update(cx, |buffer, cx| {
4728 assert!(!buffer.is_dirty());
4729 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4730 events.lock().clear();
4731
4732 buffer.edit([(1..1, "B")], None, cx);
4733 buffer.edit([(2..2, "D")], None, cx);
4734 });
4735
4736 // after editing again, the buffer is dirty, and emits another dirty event.
4737 buffer1.update(cx, |buffer, cx| {
4738 assert!(buffer.text() == "aBDc");
4739 assert!(buffer.is_dirty());
4740 assert_eq!(
4741 *events.lock(),
4742 &[
4743 language::BufferEvent::Edited,
4744 language::BufferEvent::DirtyChanged,
4745 language::BufferEvent::Edited,
4746 ],
4747 );
4748 events.lock().clear();
4749
4750 // After restoring the buffer to its previously-saved state,
4751 // the buffer is not considered dirty anymore.
4752 buffer.edit([(1..3, "")], None, cx);
4753 assert!(buffer.text() == "ac");
4754 assert!(!buffer.is_dirty());
4755 });
4756
4757 assert_eq!(
4758 *events.lock(),
4759 &[
4760 language::BufferEvent::Edited,
4761 language::BufferEvent::DirtyChanged
4762 ]
4763 );
4764
4765 // When a file is deleted, it is not considered dirty.
4766 let events = Arc::new(Mutex::new(Vec::new()));
4767 let buffer2 = project
4768 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4769 .await
4770 .unwrap();
4771 buffer2.update(cx, |_, cx| {
4772 cx.subscribe(&buffer2, {
4773 let events = events.clone();
4774 move |_, _, event, _| match event {
4775 BufferEvent::Operation { .. } => {}
4776 _ => events.lock().push(event.clone()),
4777 }
4778 })
4779 .detach();
4780 });
4781
4782 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4783 .await
4784 .unwrap();
4785 cx.executor().run_until_parked();
4786 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4787 assert_eq!(
4788 mem::take(&mut *events.lock()),
4789 &[language::BufferEvent::FileHandleChanged]
4790 );
4791
4792 // Buffer becomes dirty when edited.
4793 buffer2.update(cx, |buffer, cx| {
4794 buffer.edit([(2..3, "")], None, cx);
4795 assert_eq!(buffer.is_dirty(), true);
4796 });
4797 assert_eq!(
4798 mem::take(&mut *events.lock()),
4799 &[
4800 language::BufferEvent::Edited,
4801 language::BufferEvent::DirtyChanged
4802 ]
4803 );
4804
4805 // Buffer becomes clean again when all of its content is removed, because
4806 // the file was deleted.
4807 buffer2.update(cx, |buffer, cx| {
4808 buffer.edit([(0..2, "")], None, cx);
4809 assert_eq!(buffer.is_empty(), true);
4810 assert_eq!(buffer.is_dirty(), false);
4811 });
4812 assert_eq!(
4813 *events.lock(),
4814 &[
4815 language::BufferEvent::Edited,
4816 language::BufferEvent::DirtyChanged
4817 ]
4818 );
4819
4820 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4821 let events = Arc::new(Mutex::new(Vec::new()));
4822 let buffer3 = project
4823 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4824 .await
4825 .unwrap();
4826 buffer3.update(cx, |_, cx| {
4827 cx.subscribe(&buffer3, {
4828 let events = events.clone();
4829 move |_, _, event, _| match event {
4830 BufferEvent::Operation { .. } => {}
4831 _ => events.lock().push(event.clone()),
4832 }
4833 })
4834 .detach();
4835 });
4836
4837 buffer3.update(cx, |buffer, cx| {
4838 buffer.edit([(0..0, "x")], None, cx);
4839 });
4840 events.lock().clear();
4841 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4842 .await
4843 .unwrap();
4844 cx.executor().run_until_parked();
4845 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4846 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4847}
4848
4849#[gpui::test]
4850async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4851 init_test(cx);
4852
4853 let (initial_contents, initial_offsets) =
4854 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4855 let fs = FakeFs::new(cx.executor());
4856 fs.insert_tree(
4857 path!("/dir"),
4858 json!({
4859 "the-file": initial_contents,
4860 }),
4861 )
4862 .await;
4863 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4864 let buffer = project
4865 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4866 .await
4867 .unwrap();
4868
4869 let anchors = initial_offsets
4870 .iter()
4871 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4872 .collect::<Vec<_>>();
4873
4874 // Change the file on disk, adding two new lines of text, and removing
4875 // one line.
4876 buffer.update(cx, |buffer, _| {
4877 assert!(!buffer.is_dirty());
4878 assert!(!buffer.has_conflict());
4879 });
4880
4881 let (new_contents, new_offsets) =
4882 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4883 fs.save(
4884 path!("/dir/the-file").as_ref(),
4885 &new_contents.as_str().into(),
4886 LineEnding::Unix,
4887 )
4888 .await
4889 .unwrap();
4890
4891 // Because the buffer was not modified, it is reloaded from disk. Its
4892 // contents are edited according to the diff between the old and new
4893 // file contents.
4894 cx.executor().run_until_parked();
4895 buffer.update(cx, |buffer, _| {
4896 assert_eq!(buffer.text(), new_contents);
4897 assert!(!buffer.is_dirty());
4898 assert!(!buffer.has_conflict());
4899
4900 let anchor_offsets = anchors
4901 .iter()
4902 .map(|anchor| anchor.to_offset(&*buffer))
4903 .collect::<Vec<_>>();
4904 assert_eq!(anchor_offsets, new_offsets);
4905 });
4906
4907 // Modify the buffer
4908 buffer.update(cx, |buffer, cx| {
4909 buffer.edit([(0..0, " ")], None, cx);
4910 assert!(buffer.is_dirty());
4911 assert!(!buffer.has_conflict());
4912 });
4913
4914 // Change the file on disk again, adding blank lines to the beginning.
4915 fs.save(
4916 path!("/dir/the-file").as_ref(),
4917 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4918 LineEnding::Unix,
4919 )
4920 .await
4921 .unwrap();
4922
4923 // Because the buffer is modified, it doesn't reload from disk, but is
4924 // marked as having a conflict.
4925 cx.executor().run_until_parked();
4926 buffer.update(cx, |buffer, _| {
4927 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4928 assert!(buffer.has_conflict());
4929 });
4930}
4931
4932#[gpui::test]
4933async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4934 init_test(cx);
4935
4936 let fs = FakeFs::new(cx.executor());
4937 fs.insert_tree(
4938 path!("/dir"),
4939 json!({
4940 "file1": "a\nb\nc\n",
4941 "file2": "one\r\ntwo\r\nthree\r\n",
4942 }),
4943 )
4944 .await;
4945
4946 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4947 let buffer1 = project
4948 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4949 .await
4950 .unwrap();
4951 let buffer2 = project
4952 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4953 .await
4954 .unwrap();
4955
4956 buffer1.update(cx, |buffer, _| {
4957 assert_eq!(buffer.text(), "a\nb\nc\n");
4958 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4959 });
4960 buffer2.update(cx, |buffer, _| {
4961 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4962 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4963 });
4964
4965 // Change a file's line endings on disk from unix to windows. The buffer's
4966 // state updates correctly.
4967 fs.save(
4968 path!("/dir/file1").as_ref(),
4969 &"aaa\nb\nc\n".into(),
4970 LineEnding::Windows,
4971 )
4972 .await
4973 .unwrap();
4974 cx.executor().run_until_parked();
4975 buffer1.update(cx, |buffer, _| {
4976 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4977 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4978 });
4979
4980 // Save a file with windows line endings. The file is written correctly.
4981 buffer2.update(cx, |buffer, cx| {
4982 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4983 });
4984 project
4985 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4986 .await
4987 .unwrap();
4988 assert_eq!(
4989 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4990 "one\r\ntwo\r\nthree\r\nfour\r\n",
4991 );
4992}
4993
4994#[gpui::test]
4995async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4996 init_test(cx);
4997
4998 let fs = FakeFs::new(cx.executor());
4999 fs.insert_tree(
5000 path!("/dir"),
5001 json!({
5002 "a.rs": "
5003 fn foo(mut v: Vec<usize>) {
5004 for x in &v {
5005 v.push(1);
5006 }
5007 }
5008 "
5009 .unindent(),
5010 }),
5011 )
5012 .await;
5013
5014 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5015 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
5016 let buffer = project
5017 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5018 .await
5019 .unwrap();
5020
5021 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5022 let message = lsp::PublishDiagnosticsParams {
5023 uri: buffer_uri.clone(),
5024 diagnostics: vec![
5025 lsp::Diagnostic {
5026 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5027 severity: Some(DiagnosticSeverity::WARNING),
5028 message: "error 1".to_string(),
5029 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5030 location: lsp::Location {
5031 uri: buffer_uri.clone(),
5032 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5033 },
5034 message: "error 1 hint 1".to_string(),
5035 }]),
5036 ..Default::default()
5037 },
5038 lsp::Diagnostic {
5039 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5040 severity: Some(DiagnosticSeverity::HINT),
5041 message: "error 1 hint 1".to_string(),
5042 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5043 location: lsp::Location {
5044 uri: buffer_uri.clone(),
5045 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5046 },
5047 message: "original diagnostic".to_string(),
5048 }]),
5049 ..Default::default()
5050 },
5051 lsp::Diagnostic {
5052 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5053 severity: Some(DiagnosticSeverity::ERROR),
5054 message: "error 2".to_string(),
5055 related_information: Some(vec![
5056 lsp::DiagnosticRelatedInformation {
5057 location: lsp::Location {
5058 uri: buffer_uri.clone(),
5059 range: lsp::Range::new(
5060 lsp::Position::new(1, 13),
5061 lsp::Position::new(1, 15),
5062 ),
5063 },
5064 message: "error 2 hint 1".to_string(),
5065 },
5066 lsp::DiagnosticRelatedInformation {
5067 location: lsp::Location {
5068 uri: buffer_uri.clone(),
5069 range: lsp::Range::new(
5070 lsp::Position::new(1, 13),
5071 lsp::Position::new(1, 15),
5072 ),
5073 },
5074 message: "error 2 hint 2".to_string(),
5075 },
5076 ]),
5077 ..Default::default()
5078 },
5079 lsp::Diagnostic {
5080 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5081 severity: Some(DiagnosticSeverity::HINT),
5082 message: "error 2 hint 1".to_string(),
5083 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5084 location: lsp::Location {
5085 uri: buffer_uri.clone(),
5086 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5087 },
5088 message: "original diagnostic".to_string(),
5089 }]),
5090 ..Default::default()
5091 },
5092 lsp::Diagnostic {
5093 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5094 severity: Some(DiagnosticSeverity::HINT),
5095 message: "error 2 hint 2".to_string(),
5096 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5097 location: lsp::Location {
5098 uri: buffer_uri,
5099 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5100 },
5101 message: "original diagnostic".to_string(),
5102 }]),
5103 ..Default::default()
5104 },
5105 ],
5106 version: None,
5107 };
5108
5109 lsp_store
5110 .update(cx, |lsp_store, cx| {
5111 lsp_store.update_diagnostics(
5112 LanguageServerId(0),
5113 message,
5114 None,
5115 DiagnosticSourceKind::Pushed,
5116 &[],
5117 cx,
5118 )
5119 })
5120 .unwrap();
5121 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5122
5123 assert_eq!(
5124 buffer
5125 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5126 .collect::<Vec<_>>(),
5127 &[
5128 DiagnosticEntry {
5129 range: Point::new(1, 8)..Point::new(1, 9),
5130 diagnostic: Diagnostic {
5131 severity: DiagnosticSeverity::WARNING,
5132 message: "error 1".to_string(),
5133 group_id: 1,
5134 is_primary: true,
5135 source_kind: DiagnosticSourceKind::Pushed,
5136 ..Diagnostic::default()
5137 }
5138 },
5139 DiagnosticEntry {
5140 range: Point::new(1, 8)..Point::new(1, 9),
5141 diagnostic: Diagnostic {
5142 severity: DiagnosticSeverity::HINT,
5143 message: "error 1 hint 1".to_string(),
5144 group_id: 1,
5145 is_primary: false,
5146 source_kind: DiagnosticSourceKind::Pushed,
5147 ..Diagnostic::default()
5148 }
5149 },
5150 DiagnosticEntry {
5151 range: Point::new(1, 13)..Point::new(1, 15),
5152 diagnostic: Diagnostic {
5153 severity: DiagnosticSeverity::HINT,
5154 message: "error 2 hint 1".to_string(),
5155 group_id: 0,
5156 is_primary: false,
5157 source_kind: DiagnosticSourceKind::Pushed,
5158 ..Diagnostic::default()
5159 }
5160 },
5161 DiagnosticEntry {
5162 range: Point::new(1, 13)..Point::new(1, 15),
5163 diagnostic: Diagnostic {
5164 severity: DiagnosticSeverity::HINT,
5165 message: "error 2 hint 2".to_string(),
5166 group_id: 0,
5167 is_primary: false,
5168 source_kind: DiagnosticSourceKind::Pushed,
5169 ..Diagnostic::default()
5170 }
5171 },
5172 DiagnosticEntry {
5173 range: Point::new(2, 8)..Point::new(2, 17),
5174 diagnostic: Diagnostic {
5175 severity: DiagnosticSeverity::ERROR,
5176 message: "error 2".to_string(),
5177 group_id: 0,
5178 is_primary: true,
5179 source_kind: DiagnosticSourceKind::Pushed,
5180 ..Diagnostic::default()
5181 }
5182 }
5183 ]
5184 );
5185
5186 assert_eq!(
5187 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5188 &[
5189 DiagnosticEntry {
5190 range: Point::new(1, 13)..Point::new(1, 15),
5191 diagnostic: Diagnostic {
5192 severity: DiagnosticSeverity::HINT,
5193 message: "error 2 hint 1".to_string(),
5194 group_id: 0,
5195 is_primary: false,
5196 source_kind: DiagnosticSourceKind::Pushed,
5197 ..Diagnostic::default()
5198 }
5199 },
5200 DiagnosticEntry {
5201 range: Point::new(1, 13)..Point::new(1, 15),
5202 diagnostic: Diagnostic {
5203 severity: DiagnosticSeverity::HINT,
5204 message: "error 2 hint 2".to_string(),
5205 group_id: 0,
5206 is_primary: false,
5207 source_kind: DiagnosticSourceKind::Pushed,
5208 ..Diagnostic::default()
5209 }
5210 },
5211 DiagnosticEntry {
5212 range: Point::new(2, 8)..Point::new(2, 17),
5213 diagnostic: Diagnostic {
5214 severity: DiagnosticSeverity::ERROR,
5215 message: "error 2".to_string(),
5216 group_id: 0,
5217 is_primary: true,
5218 source_kind: DiagnosticSourceKind::Pushed,
5219 ..Diagnostic::default()
5220 }
5221 }
5222 ]
5223 );
5224
5225 assert_eq!(
5226 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5227 &[
5228 DiagnosticEntry {
5229 range: Point::new(1, 8)..Point::new(1, 9),
5230 diagnostic: Diagnostic {
5231 severity: DiagnosticSeverity::WARNING,
5232 message: "error 1".to_string(),
5233 group_id: 1,
5234 is_primary: true,
5235 source_kind: DiagnosticSourceKind::Pushed,
5236 ..Diagnostic::default()
5237 }
5238 },
5239 DiagnosticEntry {
5240 range: Point::new(1, 8)..Point::new(1, 9),
5241 diagnostic: Diagnostic {
5242 severity: DiagnosticSeverity::HINT,
5243 message: "error 1 hint 1".to_string(),
5244 group_id: 1,
5245 is_primary: false,
5246 source_kind: DiagnosticSourceKind::Pushed,
5247 ..Diagnostic::default()
5248 }
5249 },
5250 ]
5251 );
5252}
5253
5254#[gpui::test]
5255async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5256 init_test(cx);
5257
5258 let fs = FakeFs::new(cx.executor());
5259 fs.insert_tree(
5260 path!("/dir"),
5261 json!({
5262 "one.rs": "const ONE: usize = 1;",
5263 "two": {
5264 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5265 }
5266
5267 }),
5268 )
5269 .await;
5270 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5271
5272 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5273 language_registry.add(rust_lang());
5274 let watched_paths = lsp::FileOperationRegistrationOptions {
5275 filters: vec![
5276 FileOperationFilter {
5277 scheme: Some("file".to_owned()),
5278 pattern: lsp::FileOperationPattern {
5279 glob: "**/*.rs".to_owned(),
5280 matches: Some(lsp::FileOperationPatternKind::File),
5281 options: None,
5282 },
5283 },
5284 FileOperationFilter {
5285 scheme: Some("file".to_owned()),
5286 pattern: lsp::FileOperationPattern {
5287 glob: "**/**".to_owned(),
5288 matches: Some(lsp::FileOperationPatternKind::Folder),
5289 options: None,
5290 },
5291 },
5292 ],
5293 };
5294 let mut fake_servers = language_registry.register_fake_lsp(
5295 "Rust",
5296 FakeLspAdapter {
5297 capabilities: lsp::ServerCapabilities {
5298 workspace: Some(lsp::WorkspaceServerCapabilities {
5299 workspace_folders: None,
5300 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5301 did_rename: Some(watched_paths.clone()),
5302 will_rename: Some(watched_paths),
5303 ..Default::default()
5304 }),
5305 }),
5306 ..Default::default()
5307 },
5308 ..Default::default()
5309 },
5310 );
5311
5312 let _ = project
5313 .update(cx, |project, cx| {
5314 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5315 })
5316 .await
5317 .unwrap();
5318
5319 let fake_server = fake_servers.next().await.unwrap();
5320 let response = project.update(cx, |project, cx| {
5321 let worktree = project.worktrees(cx).next().unwrap();
5322 let entry = worktree
5323 .read(cx)
5324 .entry_for_path(rel_path("one.rs"))
5325 .unwrap();
5326 project.rename_entry(
5327 entry.id,
5328 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5329 cx,
5330 )
5331 });
5332 let expected_edit = lsp::WorkspaceEdit {
5333 changes: None,
5334 document_changes: Some(DocumentChanges::Edits({
5335 vec![TextDocumentEdit {
5336 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5337 range: lsp::Range {
5338 start: lsp::Position {
5339 line: 0,
5340 character: 1,
5341 },
5342 end: lsp::Position {
5343 line: 0,
5344 character: 3,
5345 },
5346 },
5347 new_text: "This is not a drill".to_owned(),
5348 })],
5349 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5350 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5351 version: Some(1337),
5352 },
5353 }]
5354 })),
5355 change_annotations: None,
5356 };
5357 let resolved_workspace_edit = Arc::new(OnceLock::new());
5358 fake_server
5359 .set_request_handler::<WillRenameFiles, _, _>({
5360 let resolved_workspace_edit = resolved_workspace_edit.clone();
5361 let expected_edit = expected_edit.clone();
5362 move |params, _| {
5363 let resolved_workspace_edit = resolved_workspace_edit.clone();
5364 let expected_edit = expected_edit.clone();
5365 async move {
5366 assert_eq!(params.files.len(), 1);
5367 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5368 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5369 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5370 Ok(Some(expected_edit))
5371 }
5372 }
5373 })
5374 .next()
5375 .await
5376 .unwrap();
5377 let _ = response.await.unwrap();
5378 fake_server
5379 .handle_notification::<DidRenameFiles, _>(|params, _| {
5380 assert_eq!(params.files.len(), 1);
5381 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5382 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5383 })
5384 .next()
5385 .await
5386 .unwrap();
5387 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5388}
5389
5390#[gpui::test]
5391async fn test_rename(cx: &mut gpui::TestAppContext) {
5392 // hi
5393 init_test(cx);
5394
5395 let fs = FakeFs::new(cx.executor());
5396 fs.insert_tree(
5397 path!("/dir"),
5398 json!({
5399 "one.rs": "const ONE: usize = 1;",
5400 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5401 }),
5402 )
5403 .await;
5404
5405 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5406
5407 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5408 language_registry.add(rust_lang());
5409 let mut fake_servers = language_registry.register_fake_lsp(
5410 "Rust",
5411 FakeLspAdapter {
5412 capabilities: lsp::ServerCapabilities {
5413 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5414 prepare_provider: Some(true),
5415 work_done_progress_options: Default::default(),
5416 })),
5417 ..Default::default()
5418 },
5419 ..Default::default()
5420 },
5421 );
5422
5423 let (buffer, _handle) = project
5424 .update(cx, |project, cx| {
5425 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5426 })
5427 .await
5428 .unwrap();
5429
5430 let fake_server = fake_servers.next().await.unwrap();
5431
5432 let response = project.update(cx, |project, cx| {
5433 project.prepare_rename(buffer.clone(), 7, cx)
5434 });
5435 fake_server
5436 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5437 assert_eq!(
5438 params.text_document.uri.as_str(),
5439 uri!("file:///dir/one.rs")
5440 );
5441 assert_eq!(params.position, lsp::Position::new(0, 7));
5442 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5443 lsp::Position::new(0, 6),
5444 lsp::Position::new(0, 9),
5445 ))))
5446 })
5447 .next()
5448 .await
5449 .unwrap();
5450 let response = response.await.unwrap();
5451 let PrepareRenameResponse::Success(range) = response else {
5452 panic!("{:?}", response);
5453 };
5454 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5455 assert_eq!(range, 6..9);
5456
5457 let response = project.update(cx, |project, cx| {
5458 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5459 });
5460 fake_server
5461 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5462 assert_eq!(
5463 params.text_document_position.text_document.uri.as_str(),
5464 uri!("file:///dir/one.rs")
5465 );
5466 assert_eq!(
5467 params.text_document_position.position,
5468 lsp::Position::new(0, 7)
5469 );
5470 assert_eq!(params.new_name, "THREE");
5471 Ok(Some(lsp::WorkspaceEdit {
5472 changes: Some(
5473 [
5474 (
5475 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5476 vec![lsp::TextEdit::new(
5477 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5478 "THREE".to_string(),
5479 )],
5480 ),
5481 (
5482 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5483 vec![
5484 lsp::TextEdit::new(
5485 lsp::Range::new(
5486 lsp::Position::new(0, 24),
5487 lsp::Position::new(0, 27),
5488 ),
5489 "THREE".to_string(),
5490 ),
5491 lsp::TextEdit::new(
5492 lsp::Range::new(
5493 lsp::Position::new(0, 35),
5494 lsp::Position::new(0, 38),
5495 ),
5496 "THREE".to_string(),
5497 ),
5498 ],
5499 ),
5500 ]
5501 .into_iter()
5502 .collect(),
5503 ),
5504 ..Default::default()
5505 }))
5506 })
5507 .next()
5508 .await
5509 .unwrap();
5510 let mut transaction = response.await.unwrap().0;
5511 assert_eq!(transaction.len(), 2);
5512 assert_eq!(
5513 transaction
5514 .remove_entry(&buffer)
5515 .unwrap()
5516 .0
5517 .update(cx, |buffer, _| buffer.text()),
5518 "const THREE: usize = 1;"
5519 );
5520 assert_eq!(
5521 transaction
5522 .into_keys()
5523 .next()
5524 .unwrap()
5525 .update(cx, |buffer, _| buffer.text()),
5526 "const TWO: usize = one::THREE + one::THREE;"
5527 );
5528}
5529
5530#[gpui::test]
5531async fn test_search(cx: &mut gpui::TestAppContext) {
5532 init_test(cx);
5533
5534 let fs = FakeFs::new(cx.executor());
5535 fs.insert_tree(
5536 path!("/dir"),
5537 json!({
5538 "one.rs": "const ONE: usize = 1;",
5539 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5540 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5541 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5542 }),
5543 )
5544 .await;
5545 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5546 assert_eq!(
5547 search(
5548 &project,
5549 SearchQuery::text(
5550 "TWO",
5551 false,
5552 true,
5553 false,
5554 Default::default(),
5555 Default::default(),
5556 false,
5557 None
5558 )
5559 .unwrap(),
5560 cx
5561 )
5562 .await
5563 .unwrap(),
5564 HashMap::from_iter([
5565 (path!("dir/two.rs").to_string(), vec![6..9]),
5566 (path!("dir/three.rs").to_string(), vec![37..40])
5567 ])
5568 );
5569
5570 let buffer_4 = project
5571 .update(cx, |project, cx| {
5572 project.open_local_buffer(path!("/dir/four.rs"), cx)
5573 })
5574 .await
5575 .unwrap();
5576 buffer_4.update(cx, |buffer, cx| {
5577 let text = "two::TWO";
5578 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5579 });
5580
5581 assert_eq!(
5582 search(
5583 &project,
5584 SearchQuery::text(
5585 "TWO",
5586 false,
5587 true,
5588 false,
5589 Default::default(),
5590 Default::default(),
5591 false,
5592 None,
5593 )
5594 .unwrap(),
5595 cx
5596 )
5597 .await
5598 .unwrap(),
5599 HashMap::from_iter([
5600 (path!("dir/two.rs").to_string(), vec![6..9]),
5601 (path!("dir/three.rs").to_string(), vec![37..40]),
5602 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5603 ])
5604 );
5605}
5606
5607#[gpui::test]
5608async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5609 init_test(cx);
5610
5611 let search_query = "file";
5612
5613 let fs = FakeFs::new(cx.executor());
5614 fs.insert_tree(
5615 path!("/dir"),
5616 json!({
5617 "one.rs": r#"// Rust file one"#,
5618 "one.ts": r#"// TypeScript file one"#,
5619 "two.rs": r#"// Rust file two"#,
5620 "two.ts": r#"// TypeScript file two"#,
5621 }),
5622 )
5623 .await;
5624 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5625
5626 assert!(
5627 search(
5628 &project,
5629 SearchQuery::text(
5630 search_query,
5631 false,
5632 true,
5633 false,
5634 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5635 Default::default(),
5636 false,
5637 None
5638 )
5639 .unwrap(),
5640 cx
5641 )
5642 .await
5643 .unwrap()
5644 .is_empty(),
5645 "If no inclusions match, no files should be returned"
5646 );
5647
5648 assert_eq!(
5649 search(
5650 &project,
5651 SearchQuery::text(
5652 search_query,
5653 false,
5654 true,
5655 false,
5656 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5657 Default::default(),
5658 false,
5659 None
5660 )
5661 .unwrap(),
5662 cx
5663 )
5664 .await
5665 .unwrap(),
5666 HashMap::from_iter([
5667 (path!("dir/one.rs").to_string(), vec![8..12]),
5668 (path!("dir/two.rs").to_string(), vec![8..12]),
5669 ]),
5670 "Rust only search should give only Rust files"
5671 );
5672
5673 assert_eq!(
5674 search(
5675 &project,
5676 SearchQuery::text(
5677 search_query,
5678 false,
5679 true,
5680 false,
5681 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5682 .unwrap(),
5683 Default::default(),
5684 false,
5685 None,
5686 )
5687 .unwrap(),
5688 cx
5689 )
5690 .await
5691 .unwrap(),
5692 HashMap::from_iter([
5693 (path!("dir/one.ts").to_string(), vec![14..18]),
5694 (path!("dir/two.ts").to_string(), vec![14..18]),
5695 ]),
5696 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5697 );
5698
5699 assert_eq!(
5700 search(
5701 &project,
5702 SearchQuery::text(
5703 search_query,
5704 false,
5705 true,
5706 false,
5707 PathMatcher::new(
5708 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5709 PathStyle::local()
5710 )
5711 .unwrap(),
5712 Default::default(),
5713 false,
5714 None,
5715 )
5716 .unwrap(),
5717 cx
5718 )
5719 .await
5720 .unwrap(),
5721 HashMap::from_iter([
5722 (path!("dir/two.ts").to_string(), vec![14..18]),
5723 (path!("dir/one.rs").to_string(), vec![8..12]),
5724 (path!("dir/one.ts").to_string(), vec![14..18]),
5725 (path!("dir/two.rs").to_string(), vec![8..12]),
5726 ]),
5727 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5728 );
5729}
5730
5731#[gpui::test]
5732async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5733 init_test(cx);
5734
5735 let search_query = "file";
5736
5737 let fs = FakeFs::new(cx.executor());
5738 fs.insert_tree(
5739 path!("/dir"),
5740 json!({
5741 "one.rs": r#"// Rust file one"#,
5742 "one.ts": r#"// TypeScript file one"#,
5743 "two.rs": r#"// Rust file two"#,
5744 "two.ts": r#"// TypeScript file two"#,
5745 }),
5746 )
5747 .await;
5748 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5749
5750 assert_eq!(
5751 search(
5752 &project,
5753 SearchQuery::text(
5754 search_query,
5755 false,
5756 true,
5757 false,
5758 Default::default(),
5759 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5760 false,
5761 None,
5762 )
5763 .unwrap(),
5764 cx
5765 )
5766 .await
5767 .unwrap(),
5768 HashMap::from_iter([
5769 (path!("dir/one.rs").to_string(), vec![8..12]),
5770 (path!("dir/one.ts").to_string(), vec![14..18]),
5771 (path!("dir/two.rs").to_string(), vec![8..12]),
5772 (path!("dir/two.ts").to_string(), vec![14..18]),
5773 ]),
5774 "If no exclusions match, all files should be returned"
5775 );
5776
5777 assert_eq!(
5778 search(
5779 &project,
5780 SearchQuery::text(
5781 search_query,
5782 false,
5783 true,
5784 false,
5785 Default::default(),
5786 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5787 false,
5788 None,
5789 )
5790 .unwrap(),
5791 cx
5792 )
5793 .await
5794 .unwrap(),
5795 HashMap::from_iter([
5796 (path!("dir/one.ts").to_string(), vec![14..18]),
5797 (path!("dir/two.ts").to_string(), vec![14..18]),
5798 ]),
5799 "Rust exclusion search should give only TypeScript files"
5800 );
5801
5802 assert_eq!(
5803 search(
5804 &project,
5805 SearchQuery::text(
5806 search_query,
5807 false,
5808 true,
5809 false,
5810 Default::default(),
5811 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5812 .unwrap(),
5813 false,
5814 None,
5815 )
5816 .unwrap(),
5817 cx
5818 )
5819 .await
5820 .unwrap(),
5821 HashMap::from_iter([
5822 (path!("dir/one.rs").to_string(), vec![8..12]),
5823 (path!("dir/two.rs").to_string(), vec![8..12]),
5824 ]),
5825 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5826 );
5827
5828 assert!(
5829 search(
5830 &project,
5831 SearchQuery::text(
5832 search_query,
5833 false,
5834 true,
5835 false,
5836 Default::default(),
5837 PathMatcher::new(
5838 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5839 PathStyle::local(),
5840 )
5841 .unwrap(),
5842 false,
5843 None,
5844 )
5845 .unwrap(),
5846 cx
5847 )
5848 .await
5849 .unwrap()
5850 .is_empty(),
5851 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5852 );
5853}
5854
5855#[gpui::test]
5856async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5857 init_test(cx);
5858
5859 let search_query = "file";
5860
5861 let fs = FakeFs::new(cx.executor());
5862 fs.insert_tree(
5863 path!("/dir"),
5864 json!({
5865 "one.rs": r#"// Rust file one"#,
5866 "one.ts": r#"// TypeScript file one"#,
5867 "two.rs": r#"// Rust file two"#,
5868 "two.ts": r#"// TypeScript file two"#,
5869 }),
5870 )
5871 .await;
5872
5873 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5874 let path_style = PathStyle::local();
5875 let _buffer = project.update(cx, |project, cx| {
5876 project.create_local_buffer("file", None, false, cx)
5877 });
5878
5879 assert_eq!(
5880 search(
5881 &project,
5882 SearchQuery::text(
5883 search_query,
5884 false,
5885 true,
5886 false,
5887 Default::default(),
5888 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5889 false,
5890 None,
5891 )
5892 .unwrap(),
5893 cx
5894 )
5895 .await
5896 .unwrap(),
5897 HashMap::from_iter([
5898 (path!("dir/one.rs").to_string(), vec![8..12]),
5899 (path!("dir/one.ts").to_string(), vec![14..18]),
5900 (path!("dir/two.rs").to_string(), vec![8..12]),
5901 (path!("dir/two.ts").to_string(), vec![14..18]),
5902 ]),
5903 "If no exclusions match, all files should be returned"
5904 );
5905
5906 assert_eq!(
5907 search(
5908 &project,
5909 SearchQuery::text(
5910 search_query,
5911 false,
5912 true,
5913 false,
5914 Default::default(),
5915 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5916 false,
5917 None,
5918 )
5919 .unwrap(),
5920 cx
5921 )
5922 .await
5923 .unwrap(),
5924 HashMap::from_iter([
5925 (path!("dir/one.ts").to_string(), vec![14..18]),
5926 (path!("dir/two.ts").to_string(), vec![14..18]),
5927 ]),
5928 "Rust exclusion search should give only TypeScript files"
5929 );
5930
5931 assert_eq!(
5932 search(
5933 &project,
5934 SearchQuery::text(
5935 search_query,
5936 false,
5937 true,
5938 false,
5939 Default::default(),
5940 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5941 false,
5942 None,
5943 )
5944 .unwrap(),
5945 cx
5946 )
5947 .await
5948 .unwrap(),
5949 HashMap::from_iter([
5950 (path!("dir/one.rs").to_string(), vec![8..12]),
5951 (path!("dir/two.rs").to_string(), vec![8..12]),
5952 ]),
5953 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5954 );
5955
5956 assert!(
5957 search(
5958 &project,
5959 SearchQuery::text(
5960 search_query,
5961 false,
5962 true,
5963 false,
5964 Default::default(),
5965 PathMatcher::new(
5966 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5967 PathStyle::local(),
5968 )
5969 .unwrap(),
5970 false,
5971 None,
5972 )
5973 .unwrap(),
5974 cx
5975 )
5976 .await
5977 .unwrap()
5978 .is_empty(),
5979 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5980 );
5981}
5982
5983#[gpui::test]
5984async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5985 init_test(cx);
5986
5987 let search_query = "file";
5988
5989 let fs = FakeFs::new(cx.executor());
5990 fs.insert_tree(
5991 path!("/dir"),
5992 json!({
5993 "one.rs": r#"// Rust file one"#,
5994 "one.ts": r#"// TypeScript file one"#,
5995 "two.rs": r#"// Rust file two"#,
5996 "two.ts": r#"// TypeScript file two"#,
5997 }),
5998 )
5999 .await;
6000 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6001 assert!(
6002 search(
6003 &project,
6004 SearchQuery::text(
6005 search_query,
6006 false,
6007 true,
6008 false,
6009 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6010 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
6011 false,
6012 None,
6013 )
6014 .unwrap(),
6015 cx
6016 )
6017 .await
6018 .unwrap()
6019 .is_empty(),
6020 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6021 );
6022
6023 assert!(
6024 search(
6025 &project,
6026 SearchQuery::text(
6027 search_query,
6028 false,
6029 true,
6030 false,
6031 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6032 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6033 false,
6034 None,
6035 )
6036 .unwrap(),
6037 cx
6038 )
6039 .await
6040 .unwrap()
6041 .is_empty(),
6042 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6043 );
6044
6045 assert!(
6046 search(
6047 &project,
6048 SearchQuery::text(
6049 search_query,
6050 false,
6051 true,
6052 false,
6053 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6054 .unwrap(),
6055 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6056 .unwrap(),
6057 false,
6058 None,
6059 )
6060 .unwrap(),
6061 cx
6062 )
6063 .await
6064 .unwrap()
6065 .is_empty(),
6066 "Non-matching inclusions and exclusions should not change that."
6067 );
6068
6069 assert_eq!(
6070 search(
6071 &project,
6072 SearchQuery::text(
6073 search_query,
6074 false,
6075 true,
6076 false,
6077 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6078 .unwrap(),
6079 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6080 .unwrap(),
6081 false,
6082 None,
6083 )
6084 .unwrap(),
6085 cx
6086 )
6087 .await
6088 .unwrap(),
6089 HashMap::from_iter([
6090 (path!("dir/one.ts").to_string(), vec![14..18]),
6091 (path!("dir/two.ts").to_string(), vec![14..18]),
6092 ]),
6093 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6094 );
6095}
6096
6097#[gpui::test]
6098async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6099 init_test(cx);
6100
6101 let fs = FakeFs::new(cx.executor());
6102 fs.insert_tree(
6103 path!("/worktree-a"),
6104 json!({
6105 "haystack.rs": r#"// NEEDLE"#,
6106 "haystack.ts": r#"// NEEDLE"#,
6107 }),
6108 )
6109 .await;
6110 fs.insert_tree(
6111 path!("/worktree-b"),
6112 json!({
6113 "haystack.rs": r#"// NEEDLE"#,
6114 "haystack.ts": r#"// NEEDLE"#,
6115 }),
6116 )
6117 .await;
6118
6119 let path_style = PathStyle::local();
6120 let project = Project::test(
6121 fs.clone(),
6122 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6123 cx,
6124 )
6125 .await;
6126
6127 assert_eq!(
6128 search(
6129 &project,
6130 SearchQuery::text(
6131 "NEEDLE",
6132 false,
6133 true,
6134 false,
6135 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6136 Default::default(),
6137 true,
6138 None,
6139 )
6140 .unwrap(),
6141 cx
6142 )
6143 .await
6144 .unwrap(),
6145 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6146 "should only return results from included worktree"
6147 );
6148 assert_eq!(
6149 search(
6150 &project,
6151 SearchQuery::text(
6152 "NEEDLE",
6153 false,
6154 true,
6155 false,
6156 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6157 Default::default(),
6158 true,
6159 None,
6160 )
6161 .unwrap(),
6162 cx
6163 )
6164 .await
6165 .unwrap(),
6166 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6167 "should only return results from included worktree"
6168 );
6169
6170 assert_eq!(
6171 search(
6172 &project,
6173 SearchQuery::text(
6174 "NEEDLE",
6175 false,
6176 true,
6177 false,
6178 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6179 Default::default(),
6180 false,
6181 None,
6182 )
6183 .unwrap(),
6184 cx
6185 )
6186 .await
6187 .unwrap(),
6188 HashMap::from_iter([
6189 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6190 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6191 ]),
6192 "should return results from both worktrees"
6193 );
6194}
6195
6196#[gpui::test]
6197async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6198 init_test(cx);
6199
6200 let fs = FakeFs::new(cx.background_executor.clone());
6201 fs.insert_tree(
6202 path!("/dir"),
6203 json!({
6204 ".git": {},
6205 ".gitignore": "**/target\n/node_modules\n",
6206 "target": {
6207 "index.txt": "index_key:index_value"
6208 },
6209 "node_modules": {
6210 "eslint": {
6211 "index.ts": "const eslint_key = 'eslint value'",
6212 "package.json": r#"{ "some_key": "some value" }"#,
6213 },
6214 "prettier": {
6215 "index.ts": "const prettier_key = 'prettier value'",
6216 "package.json": r#"{ "other_key": "other value" }"#,
6217 },
6218 },
6219 "package.json": r#"{ "main_key": "main value" }"#,
6220 }),
6221 )
6222 .await;
6223 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6224
6225 let query = "key";
6226 assert_eq!(
6227 search(
6228 &project,
6229 SearchQuery::text(
6230 query,
6231 false,
6232 false,
6233 false,
6234 Default::default(),
6235 Default::default(),
6236 false,
6237 None,
6238 )
6239 .unwrap(),
6240 cx
6241 )
6242 .await
6243 .unwrap(),
6244 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6245 "Only one non-ignored file should have the query"
6246 );
6247
6248 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6249 let path_style = PathStyle::local();
6250 assert_eq!(
6251 search(
6252 &project,
6253 SearchQuery::text(
6254 query,
6255 false,
6256 false,
6257 true,
6258 Default::default(),
6259 Default::default(),
6260 false,
6261 None,
6262 )
6263 .unwrap(),
6264 cx
6265 )
6266 .await
6267 .unwrap(),
6268 HashMap::from_iter([
6269 (path!("dir/package.json").to_string(), vec![8..11]),
6270 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6271 (
6272 path!("dir/node_modules/prettier/package.json").to_string(),
6273 vec![9..12]
6274 ),
6275 (
6276 path!("dir/node_modules/prettier/index.ts").to_string(),
6277 vec![15..18]
6278 ),
6279 (
6280 path!("dir/node_modules/eslint/index.ts").to_string(),
6281 vec![13..16]
6282 ),
6283 (
6284 path!("dir/node_modules/eslint/package.json").to_string(),
6285 vec![8..11]
6286 ),
6287 ]),
6288 "Unrestricted search with ignored directories should find every file with the query"
6289 );
6290
6291 let files_to_include =
6292 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6293 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6294 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6295 assert_eq!(
6296 search(
6297 &project,
6298 SearchQuery::text(
6299 query,
6300 false,
6301 false,
6302 true,
6303 files_to_include,
6304 files_to_exclude,
6305 false,
6306 None,
6307 )
6308 .unwrap(),
6309 cx
6310 )
6311 .await
6312 .unwrap(),
6313 HashMap::from_iter([(
6314 path!("dir/node_modules/prettier/package.json").to_string(),
6315 vec![9..12]
6316 )]),
6317 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6318 );
6319}
6320
6321#[gpui::test]
6322async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6323 init_test(cx);
6324
6325 let fs = FakeFs::new(cx.executor());
6326 fs.insert_tree(
6327 path!("/dir"),
6328 json!({
6329 "one.rs": "// ПРИВЕТ? привет!",
6330 "two.rs": "// ПРИВЕТ.",
6331 "three.rs": "// привет",
6332 }),
6333 )
6334 .await;
6335 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6336 let unicode_case_sensitive_query = SearchQuery::text(
6337 "привет",
6338 false,
6339 true,
6340 false,
6341 Default::default(),
6342 Default::default(),
6343 false,
6344 None,
6345 );
6346 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6347 assert_eq!(
6348 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6349 .await
6350 .unwrap(),
6351 HashMap::from_iter([
6352 (path!("dir/one.rs").to_string(), vec![17..29]),
6353 (path!("dir/three.rs").to_string(), vec![3..15]),
6354 ])
6355 );
6356
6357 let unicode_case_insensitive_query = SearchQuery::text(
6358 "привет",
6359 false,
6360 false,
6361 false,
6362 Default::default(),
6363 Default::default(),
6364 false,
6365 None,
6366 );
6367 assert_matches!(
6368 unicode_case_insensitive_query,
6369 Ok(SearchQuery::Regex { .. })
6370 );
6371 assert_eq!(
6372 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6373 .await
6374 .unwrap(),
6375 HashMap::from_iter([
6376 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6377 (path!("dir/two.rs").to_string(), vec![3..15]),
6378 (path!("dir/three.rs").to_string(), vec![3..15]),
6379 ])
6380 );
6381
6382 assert_eq!(
6383 search(
6384 &project,
6385 SearchQuery::text(
6386 "привет.",
6387 false,
6388 false,
6389 false,
6390 Default::default(),
6391 Default::default(),
6392 false,
6393 None,
6394 )
6395 .unwrap(),
6396 cx
6397 )
6398 .await
6399 .unwrap(),
6400 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6401 );
6402}
6403
6404#[gpui::test]
6405async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6406 init_test(cx);
6407
6408 let fs = FakeFs::new(cx.executor());
6409 fs.insert_tree(
6410 "/one/two",
6411 json!({
6412 "three": {
6413 "a.txt": "",
6414 "four": {}
6415 },
6416 "c.rs": ""
6417 }),
6418 )
6419 .await;
6420
6421 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6422 project
6423 .update(cx, |project, cx| {
6424 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6425 project.create_entry((id, rel_path("b..")), true, cx)
6426 })
6427 .await
6428 .unwrap()
6429 .into_included()
6430 .unwrap();
6431
6432 assert_eq!(
6433 fs.paths(true),
6434 vec![
6435 PathBuf::from(path!("/")),
6436 PathBuf::from(path!("/one")),
6437 PathBuf::from(path!("/one/two")),
6438 PathBuf::from(path!("/one/two/c.rs")),
6439 PathBuf::from(path!("/one/two/three")),
6440 PathBuf::from(path!("/one/two/three/a.txt")),
6441 PathBuf::from(path!("/one/two/three/b..")),
6442 PathBuf::from(path!("/one/two/three/four")),
6443 ]
6444 );
6445}
6446
6447#[gpui::test]
6448async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6449 init_test(cx);
6450
6451 let fs = FakeFs::new(cx.executor());
6452 fs.insert_tree(
6453 path!("/dir"),
6454 json!({
6455 "a.tsx": "a",
6456 }),
6457 )
6458 .await;
6459
6460 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6461
6462 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6463 language_registry.add(tsx_lang());
6464 let language_server_names = [
6465 "TypeScriptServer",
6466 "TailwindServer",
6467 "ESLintServer",
6468 "NoHoverCapabilitiesServer",
6469 ];
6470 let mut language_servers = [
6471 language_registry.register_fake_lsp(
6472 "tsx",
6473 FakeLspAdapter {
6474 name: language_server_names[0],
6475 capabilities: lsp::ServerCapabilities {
6476 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6477 ..lsp::ServerCapabilities::default()
6478 },
6479 ..FakeLspAdapter::default()
6480 },
6481 ),
6482 language_registry.register_fake_lsp(
6483 "tsx",
6484 FakeLspAdapter {
6485 name: language_server_names[1],
6486 capabilities: lsp::ServerCapabilities {
6487 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6488 ..lsp::ServerCapabilities::default()
6489 },
6490 ..FakeLspAdapter::default()
6491 },
6492 ),
6493 language_registry.register_fake_lsp(
6494 "tsx",
6495 FakeLspAdapter {
6496 name: language_server_names[2],
6497 capabilities: lsp::ServerCapabilities {
6498 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6499 ..lsp::ServerCapabilities::default()
6500 },
6501 ..FakeLspAdapter::default()
6502 },
6503 ),
6504 language_registry.register_fake_lsp(
6505 "tsx",
6506 FakeLspAdapter {
6507 name: language_server_names[3],
6508 capabilities: lsp::ServerCapabilities {
6509 hover_provider: None,
6510 ..lsp::ServerCapabilities::default()
6511 },
6512 ..FakeLspAdapter::default()
6513 },
6514 ),
6515 ];
6516
6517 let (buffer, _handle) = project
6518 .update(cx, |p, cx| {
6519 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6520 })
6521 .await
6522 .unwrap();
6523 cx.executor().run_until_parked();
6524
6525 let mut servers_with_hover_requests = HashMap::default();
6526 for i in 0..language_server_names.len() {
6527 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6528 panic!(
6529 "Failed to get language server #{i} with name {}",
6530 &language_server_names[i]
6531 )
6532 });
6533 let new_server_name = new_server.server.name();
6534 assert!(
6535 !servers_with_hover_requests.contains_key(&new_server_name),
6536 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6537 );
6538 match new_server_name.as_ref() {
6539 "TailwindServer" | "TypeScriptServer" => {
6540 servers_with_hover_requests.insert(
6541 new_server_name.clone(),
6542 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6543 move |_, _| {
6544 let name = new_server_name.clone();
6545 async move {
6546 Ok(Some(lsp::Hover {
6547 contents: lsp::HoverContents::Scalar(
6548 lsp::MarkedString::String(format!("{name} hover")),
6549 ),
6550 range: None,
6551 }))
6552 }
6553 },
6554 ),
6555 );
6556 }
6557 "ESLintServer" => {
6558 servers_with_hover_requests.insert(
6559 new_server_name,
6560 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6561 |_, _| async move { Ok(None) },
6562 ),
6563 );
6564 }
6565 "NoHoverCapabilitiesServer" => {
6566 let _never_handled = new_server
6567 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6568 panic!(
6569 "Should not call for hovers server with no corresponding capabilities"
6570 )
6571 });
6572 }
6573 unexpected => panic!("Unexpected server name: {unexpected}"),
6574 }
6575 }
6576
6577 let hover_task = project.update(cx, |project, cx| {
6578 project.hover(&buffer, Point::new(0, 0), cx)
6579 });
6580 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6581 |mut hover_request| async move {
6582 hover_request
6583 .next()
6584 .await
6585 .expect("All hover requests should have been triggered")
6586 },
6587 ))
6588 .await;
6589 assert_eq!(
6590 vec!["TailwindServer hover", "TypeScriptServer hover"],
6591 hover_task
6592 .await
6593 .into_iter()
6594 .flatten()
6595 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6596 .sorted()
6597 .collect::<Vec<_>>(),
6598 "Should receive hover responses from all related servers with hover capabilities"
6599 );
6600}
6601
6602#[gpui::test]
6603async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6604 init_test(cx);
6605
6606 let fs = FakeFs::new(cx.executor());
6607 fs.insert_tree(
6608 path!("/dir"),
6609 json!({
6610 "a.ts": "a",
6611 }),
6612 )
6613 .await;
6614
6615 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6616
6617 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6618 language_registry.add(typescript_lang());
6619 let mut fake_language_servers = language_registry.register_fake_lsp(
6620 "TypeScript",
6621 FakeLspAdapter {
6622 capabilities: lsp::ServerCapabilities {
6623 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6624 ..lsp::ServerCapabilities::default()
6625 },
6626 ..FakeLspAdapter::default()
6627 },
6628 );
6629
6630 let (buffer, _handle) = project
6631 .update(cx, |p, cx| {
6632 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6633 })
6634 .await
6635 .unwrap();
6636 cx.executor().run_until_parked();
6637
6638 let fake_server = fake_language_servers
6639 .next()
6640 .await
6641 .expect("failed to get the language server");
6642
6643 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6644 move |_, _| async move {
6645 Ok(Some(lsp::Hover {
6646 contents: lsp::HoverContents::Array(vec![
6647 lsp::MarkedString::String("".to_string()),
6648 lsp::MarkedString::String(" ".to_string()),
6649 lsp::MarkedString::String("\n\n\n".to_string()),
6650 ]),
6651 range: None,
6652 }))
6653 },
6654 );
6655
6656 let hover_task = project.update(cx, |project, cx| {
6657 project.hover(&buffer, Point::new(0, 0), cx)
6658 });
6659 let () = request_handled
6660 .next()
6661 .await
6662 .expect("All hover requests should have been triggered");
6663 assert_eq!(
6664 Vec::<String>::new(),
6665 hover_task
6666 .await
6667 .into_iter()
6668 .flatten()
6669 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6670 .sorted()
6671 .collect::<Vec<_>>(),
6672 "Empty hover parts should be ignored"
6673 );
6674}
6675
6676#[gpui::test]
6677async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6678 init_test(cx);
6679
6680 let fs = FakeFs::new(cx.executor());
6681 fs.insert_tree(
6682 path!("/dir"),
6683 json!({
6684 "a.ts": "a",
6685 }),
6686 )
6687 .await;
6688
6689 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6690
6691 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6692 language_registry.add(typescript_lang());
6693 let mut fake_language_servers = language_registry.register_fake_lsp(
6694 "TypeScript",
6695 FakeLspAdapter {
6696 capabilities: lsp::ServerCapabilities {
6697 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6698 ..lsp::ServerCapabilities::default()
6699 },
6700 ..FakeLspAdapter::default()
6701 },
6702 );
6703
6704 let (buffer, _handle) = project
6705 .update(cx, |p, cx| {
6706 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6707 })
6708 .await
6709 .unwrap();
6710 cx.executor().run_until_parked();
6711
6712 let fake_server = fake_language_servers
6713 .next()
6714 .await
6715 .expect("failed to get the language server");
6716
6717 let mut request_handled = fake_server
6718 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6719 Ok(Some(vec![
6720 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6721 title: "organize imports".to_string(),
6722 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6723 ..lsp::CodeAction::default()
6724 }),
6725 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6726 title: "fix code".to_string(),
6727 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6728 ..lsp::CodeAction::default()
6729 }),
6730 ]))
6731 });
6732
6733 let code_actions_task = project.update(cx, |project, cx| {
6734 project.code_actions(
6735 &buffer,
6736 0..buffer.read(cx).len(),
6737 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6738 cx,
6739 )
6740 });
6741
6742 let () = request_handled
6743 .next()
6744 .await
6745 .expect("The code action request should have been triggered");
6746
6747 let code_actions = code_actions_task.await.unwrap().unwrap();
6748 assert_eq!(code_actions.len(), 1);
6749 assert_eq!(
6750 code_actions[0].lsp_action.action_kind(),
6751 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6752 );
6753}
6754
6755#[gpui::test]
6756async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6757 init_test(cx);
6758
6759 let fs = FakeFs::new(cx.executor());
6760 fs.insert_tree(
6761 path!("/dir"),
6762 json!({
6763 "a.tsx": "a",
6764 }),
6765 )
6766 .await;
6767
6768 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6769
6770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6771 language_registry.add(tsx_lang());
6772 let language_server_names = [
6773 "TypeScriptServer",
6774 "TailwindServer",
6775 "ESLintServer",
6776 "NoActionsCapabilitiesServer",
6777 ];
6778
6779 let mut language_server_rxs = [
6780 language_registry.register_fake_lsp(
6781 "tsx",
6782 FakeLspAdapter {
6783 name: language_server_names[0],
6784 capabilities: lsp::ServerCapabilities {
6785 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6786 ..lsp::ServerCapabilities::default()
6787 },
6788 ..FakeLspAdapter::default()
6789 },
6790 ),
6791 language_registry.register_fake_lsp(
6792 "tsx",
6793 FakeLspAdapter {
6794 name: language_server_names[1],
6795 capabilities: lsp::ServerCapabilities {
6796 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6797 ..lsp::ServerCapabilities::default()
6798 },
6799 ..FakeLspAdapter::default()
6800 },
6801 ),
6802 language_registry.register_fake_lsp(
6803 "tsx",
6804 FakeLspAdapter {
6805 name: language_server_names[2],
6806 capabilities: lsp::ServerCapabilities {
6807 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6808 ..lsp::ServerCapabilities::default()
6809 },
6810 ..FakeLspAdapter::default()
6811 },
6812 ),
6813 language_registry.register_fake_lsp(
6814 "tsx",
6815 FakeLspAdapter {
6816 name: language_server_names[3],
6817 capabilities: lsp::ServerCapabilities {
6818 code_action_provider: None,
6819 ..lsp::ServerCapabilities::default()
6820 },
6821 ..FakeLspAdapter::default()
6822 },
6823 ),
6824 ];
6825
6826 let (buffer, _handle) = project
6827 .update(cx, |p, cx| {
6828 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6829 })
6830 .await
6831 .unwrap();
6832 cx.executor().run_until_parked();
6833
6834 let mut servers_with_actions_requests = HashMap::default();
6835 for i in 0..language_server_names.len() {
6836 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6837 panic!(
6838 "Failed to get language server #{i} with name {}",
6839 &language_server_names[i]
6840 )
6841 });
6842 let new_server_name = new_server.server.name();
6843
6844 assert!(
6845 !servers_with_actions_requests.contains_key(&new_server_name),
6846 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6847 );
6848 match new_server_name.0.as_ref() {
6849 "TailwindServer" | "TypeScriptServer" => {
6850 servers_with_actions_requests.insert(
6851 new_server_name.clone(),
6852 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6853 move |_, _| {
6854 let name = new_server_name.clone();
6855 async move {
6856 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6857 lsp::CodeAction {
6858 title: format!("{name} code action"),
6859 ..lsp::CodeAction::default()
6860 },
6861 )]))
6862 }
6863 },
6864 ),
6865 );
6866 }
6867 "ESLintServer" => {
6868 servers_with_actions_requests.insert(
6869 new_server_name,
6870 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6871 |_, _| async move { Ok(None) },
6872 ),
6873 );
6874 }
6875 "NoActionsCapabilitiesServer" => {
6876 let _never_handled = new_server
6877 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6878 panic!(
6879 "Should not call for code actions server with no corresponding capabilities"
6880 )
6881 });
6882 }
6883 unexpected => panic!("Unexpected server name: {unexpected}"),
6884 }
6885 }
6886
6887 let code_actions_task = project.update(cx, |project, cx| {
6888 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6889 });
6890
6891 // cx.run_until_parked();
6892 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6893 |mut code_actions_request| async move {
6894 code_actions_request
6895 .next()
6896 .await
6897 .expect("All code actions requests should have been triggered")
6898 },
6899 ))
6900 .await;
6901 assert_eq!(
6902 vec!["TailwindServer code action", "TypeScriptServer code action"],
6903 code_actions_task
6904 .await
6905 .unwrap()
6906 .unwrap()
6907 .into_iter()
6908 .map(|code_action| code_action.lsp_action.title().to_owned())
6909 .sorted()
6910 .collect::<Vec<_>>(),
6911 "Should receive code actions responses from all related servers with hover capabilities"
6912 );
6913}
6914
6915#[gpui::test]
6916async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6917 init_test(cx);
6918
6919 let fs = FakeFs::new(cx.executor());
6920 fs.insert_tree(
6921 "/dir",
6922 json!({
6923 "a.rs": "let a = 1;",
6924 "b.rs": "let b = 2;",
6925 "c.rs": "let c = 2;",
6926 }),
6927 )
6928 .await;
6929
6930 let project = Project::test(
6931 fs,
6932 [
6933 "/dir/a.rs".as_ref(),
6934 "/dir/b.rs".as_ref(),
6935 "/dir/c.rs".as_ref(),
6936 ],
6937 cx,
6938 )
6939 .await;
6940
6941 // check the initial state and get the worktrees
6942 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6943 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6944 assert_eq!(worktrees.len(), 3);
6945
6946 let worktree_a = worktrees[0].read(cx);
6947 let worktree_b = worktrees[1].read(cx);
6948 let worktree_c = worktrees[2].read(cx);
6949
6950 // check they start in the right order
6951 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6952 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6953 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6954
6955 (
6956 worktrees[0].clone(),
6957 worktrees[1].clone(),
6958 worktrees[2].clone(),
6959 )
6960 });
6961
6962 // move first worktree to after the second
6963 // [a, b, c] -> [b, a, c]
6964 project
6965 .update(cx, |project, cx| {
6966 let first = worktree_a.read(cx);
6967 let second = worktree_b.read(cx);
6968 project.move_worktree(first.id(), second.id(), cx)
6969 })
6970 .expect("moving first after second");
6971
6972 // check the state after moving
6973 project.update(cx, |project, cx| {
6974 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6975 assert_eq!(worktrees.len(), 3);
6976
6977 let first = worktrees[0].read(cx);
6978 let second = worktrees[1].read(cx);
6979 let third = worktrees[2].read(cx);
6980
6981 // check they are now in the right order
6982 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6983 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6984 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6985 });
6986
6987 // move the second worktree to before the first
6988 // [b, a, c] -> [a, b, c]
6989 project
6990 .update(cx, |project, cx| {
6991 let second = worktree_a.read(cx);
6992 let first = worktree_b.read(cx);
6993 project.move_worktree(first.id(), second.id(), cx)
6994 })
6995 .expect("moving second before first");
6996
6997 // check the state after moving
6998 project.update(cx, |project, cx| {
6999 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7000 assert_eq!(worktrees.len(), 3);
7001
7002 let first = worktrees[0].read(cx);
7003 let second = worktrees[1].read(cx);
7004 let third = worktrees[2].read(cx);
7005
7006 // check they are now in the right order
7007 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7008 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7009 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7010 });
7011
7012 // move the second worktree to after the third
7013 // [a, b, c] -> [a, c, b]
7014 project
7015 .update(cx, |project, cx| {
7016 let second = worktree_b.read(cx);
7017 let third = worktree_c.read(cx);
7018 project.move_worktree(second.id(), third.id(), cx)
7019 })
7020 .expect("moving second after third");
7021
7022 // check the state after moving
7023 project.update(cx, |project, cx| {
7024 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7025 assert_eq!(worktrees.len(), 3);
7026
7027 let first = worktrees[0].read(cx);
7028 let second = worktrees[1].read(cx);
7029 let third = worktrees[2].read(cx);
7030
7031 // check they are now in the right order
7032 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7033 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7034 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7035 });
7036
7037 // move the third worktree to before the second
7038 // [a, c, b] -> [a, b, c]
7039 project
7040 .update(cx, |project, cx| {
7041 let third = worktree_c.read(cx);
7042 let second = worktree_b.read(cx);
7043 project.move_worktree(third.id(), second.id(), cx)
7044 })
7045 .expect("moving third before second");
7046
7047 // check the state after moving
7048 project.update(cx, |project, cx| {
7049 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7050 assert_eq!(worktrees.len(), 3);
7051
7052 let first = worktrees[0].read(cx);
7053 let second = worktrees[1].read(cx);
7054 let third = worktrees[2].read(cx);
7055
7056 // check they are now in the right order
7057 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7058 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7059 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7060 });
7061
7062 // move the first worktree to after the third
7063 // [a, b, c] -> [b, c, a]
7064 project
7065 .update(cx, |project, cx| {
7066 let first = worktree_a.read(cx);
7067 let third = worktree_c.read(cx);
7068 project.move_worktree(first.id(), third.id(), cx)
7069 })
7070 .expect("moving first after third");
7071
7072 // check the state after moving
7073 project.update(cx, |project, cx| {
7074 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7075 assert_eq!(worktrees.len(), 3);
7076
7077 let first = worktrees[0].read(cx);
7078 let second = worktrees[1].read(cx);
7079 let third = worktrees[2].read(cx);
7080
7081 // check they are now in the right order
7082 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7083 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7084 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7085 });
7086
7087 // move the third worktree to before the first
7088 // [b, c, a] -> [a, b, c]
7089 project
7090 .update(cx, |project, cx| {
7091 let third = worktree_a.read(cx);
7092 let first = worktree_b.read(cx);
7093 project.move_worktree(third.id(), first.id(), cx)
7094 })
7095 .expect("moving third before first");
7096
7097 // check the state after moving
7098 project.update(cx, |project, cx| {
7099 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7100 assert_eq!(worktrees.len(), 3);
7101
7102 let first = worktrees[0].read(cx);
7103 let second = worktrees[1].read(cx);
7104 let third = worktrees[2].read(cx);
7105
7106 // check they are now in the right order
7107 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7108 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7109 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7110 });
7111}
7112
7113#[gpui::test]
7114async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7115 init_test(cx);
7116
7117 let staged_contents = r#"
7118 fn main() {
7119 println!("hello world");
7120 }
7121 "#
7122 .unindent();
7123 let file_contents = r#"
7124 // print goodbye
7125 fn main() {
7126 println!("goodbye world");
7127 }
7128 "#
7129 .unindent();
7130
7131 let fs = FakeFs::new(cx.background_executor.clone());
7132 fs.insert_tree(
7133 "/dir",
7134 json!({
7135 ".git": {},
7136 "src": {
7137 "main.rs": file_contents,
7138 }
7139 }),
7140 )
7141 .await;
7142
7143 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7144
7145 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7146
7147 let buffer = project
7148 .update(cx, |project, cx| {
7149 project.open_local_buffer("/dir/src/main.rs", cx)
7150 })
7151 .await
7152 .unwrap();
7153 let unstaged_diff = project
7154 .update(cx, |project, cx| {
7155 project.open_unstaged_diff(buffer.clone(), cx)
7156 })
7157 .await
7158 .unwrap();
7159
7160 cx.run_until_parked();
7161 unstaged_diff.update(cx, |unstaged_diff, cx| {
7162 let snapshot = buffer.read(cx).snapshot();
7163 assert_hunks(
7164 unstaged_diff.hunks(&snapshot, cx),
7165 &snapshot,
7166 &unstaged_diff.base_text_string().unwrap(),
7167 &[
7168 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7169 (
7170 2..3,
7171 " println!(\"hello world\");\n",
7172 " println!(\"goodbye world\");\n",
7173 DiffHunkStatus::modified_none(),
7174 ),
7175 ],
7176 );
7177 });
7178
7179 let staged_contents = r#"
7180 // print goodbye
7181 fn main() {
7182 }
7183 "#
7184 .unindent();
7185
7186 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7187
7188 cx.run_until_parked();
7189 unstaged_diff.update(cx, |unstaged_diff, cx| {
7190 let snapshot = buffer.read(cx).snapshot();
7191 assert_hunks(
7192 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7193 &snapshot,
7194 &unstaged_diff.base_text().text(),
7195 &[(
7196 2..3,
7197 "",
7198 " println!(\"goodbye world\");\n",
7199 DiffHunkStatus::added_none(),
7200 )],
7201 );
7202 });
7203}
7204
7205#[gpui::test]
7206async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7207 init_test(cx);
7208
7209 let committed_contents = r#"
7210 fn main() {
7211 println!("hello world");
7212 }
7213 "#
7214 .unindent();
7215 let staged_contents = r#"
7216 fn main() {
7217 println!("goodbye world");
7218 }
7219 "#
7220 .unindent();
7221 let file_contents = r#"
7222 // print goodbye
7223 fn main() {
7224 println!("goodbye world");
7225 }
7226 "#
7227 .unindent();
7228
7229 let fs = FakeFs::new(cx.background_executor.clone());
7230 fs.insert_tree(
7231 "/dir",
7232 json!({
7233 ".git": {},
7234 "src": {
7235 "modification.rs": file_contents,
7236 }
7237 }),
7238 )
7239 .await;
7240
7241 fs.set_head_for_repo(
7242 Path::new("/dir/.git"),
7243 &[
7244 ("src/modification.rs", committed_contents),
7245 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7246 ],
7247 "deadbeef",
7248 );
7249 fs.set_index_for_repo(
7250 Path::new("/dir/.git"),
7251 &[
7252 ("src/modification.rs", staged_contents),
7253 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7254 ],
7255 );
7256
7257 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7258 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7259 let language = rust_lang();
7260 language_registry.add(language.clone());
7261
7262 let buffer_1 = project
7263 .update(cx, |project, cx| {
7264 project.open_local_buffer("/dir/src/modification.rs", cx)
7265 })
7266 .await
7267 .unwrap();
7268 let diff_1 = project
7269 .update(cx, |project, cx| {
7270 project.open_uncommitted_diff(buffer_1.clone(), cx)
7271 })
7272 .await
7273 .unwrap();
7274 diff_1.read_with(cx, |diff, _| {
7275 assert_eq!(diff.base_text().language().cloned(), Some(language))
7276 });
7277 cx.run_until_parked();
7278 diff_1.update(cx, |diff, cx| {
7279 let snapshot = buffer_1.read(cx).snapshot();
7280 assert_hunks(
7281 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7282 &snapshot,
7283 &diff.base_text_string().unwrap(),
7284 &[
7285 (
7286 0..1,
7287 "",
7288 "// print goodbye\n",
7289 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7290 ),
7291 (
7292 2..3,
7293 " println!(\"hello world\");\n",
7294 " println!(\"goodbye world\");\n",
7295 DiffHunkStatus::modified_none(),
7296 ),
7297 ],
7298 );
7299 });
7300
7301 // Reset HEAD to a version that differs from both the buffer and the index.
7302 let committed_contents = r#"
7303 // print goodbye
7304 fn main() {
7305 }
7306 "#
7307 .unindent();
7308 fs.set_head_for_repo(
7309 Path::new("/dir/.git"),
7310 &[
7311 ("src/modification.rs", committed_contents.clone()),
7312 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7313 ],
7314 "deadbeef",
7315 );
7316
7317 // Buffer now has an unstaged hunk.
7318 cx.run_until_parked();
7319 diff_1.update(cx, |diff, cx| {
7320 let snapshot = buffer_1.read(cx).snapshot();
7321 assert_hunks(
7322 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7323 &snapshot,
7324 &diff.base_text().text(),
7325 &[(
7326 2..3,
7327 "",
7328 " println!(\"goodbye world\");\n",
7329 DiffHunkStatus::added_none(),
7330 )],
7331 );
7332 });
7333
7334 // Open a buffer for a file that's been deleted.
7335 let buffer_2 = project
7336 .update(cx, |project, cx| {
7337 project.open_local_buffer("/dir/src/deletion.rs", cx)
7338 })
7339 .await
7340 .unwrap();
7341 let diff_2 = project
7342 .update(cx, |project, cx| {
7343 project.open_uncommitted_diff(buffer_2.clone(), cx)
7344 })
7345 .await
7346 .unwrap();
7347 cx.run_until_parked();
7348 diff_2.update(cx, |diff, cx| {
7349 let snapshot = buffer_2.read(cx).snapshot();
7350 assert_hunks(
7351 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7352 &snapshot,
7353 &diff.base_text_string().unwrap(),
7354 &[(
7355 0..0,
7356 "// the-deleted-contents\n",
7357 "",
7358 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7359 )],
7360 );
7361 });
7362
7363 // Stage the deletion of this file
7364 fs.set_index_for_repo(
7365 Path::new("/dir/.git"),
7366 &[("src/modification.rs", committed_contents.clone())],
7367 );
7368 cx.run_until_parked();
7369 diff_2.update(cx, |diff, cx| {
7370 let snapshot = buffer_2.read(cx).snapshot();
7371 assert_hunks(
7372 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7373 &snapshot,
7374 &diff.base_text_string().unwrap(),
7375 &[(
7376 0..0,
7377 "// the-deleted-contents\n",
7378 "",
7379 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7380 )],
7381 );
7382 });
7383}
7384
7385#[gpui::test]
7386async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7387 use DiffHunkSecondaryStatus::*;
7388 init_test(cx);
7389
7390 let committed_contents = r#"
7391 zero
7392 one
7393 two
7394 three
7395 four
7396 five
7397 "#
7398 .unindent();
7399 let file_contents = r#"
7400 one
7401 TWO
7402 three
7403 FOUR
7404 five
7405 "#
7406 .unindent();
7407
7408 let fs = FakeFs::new(cx.background_executor.clone());
7409 fs.insert_tree(
7410 "/dir",
7411 json!({
7412 ".git": {},
7413 "file.txt": file_contents.clone()
7414 }),
7415 )
7416 .await;
7417
7418 fs.set_head_and_index_for_repo(
7419 path!("/dir/.git").as_ref(),
7420 &[("file.txt", committed_contents.clone())],
7421 );
7422
7423 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7424
7425 let buffer = project
7426 .update(cx, |project, cx| {
7427 project.open_local_buffer("/dir/file.txt", cx)
7428 })
7429 .await
7430 .unwrap();
7431 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7432 let uncommitted_diff = project
7433 .update(cx, |project, cx| {
7434 project.open_uncommitted_diff(buffer.clone(), cx)
7435 })
7436 .await
7437 .unwrap();
7438 let mut diff_events = cx.events(&uncommitted_diff);
7439
7440 // The hunks are initially unstaged.
7441 uncommitted_diff.read_with(cx, |diff, cx| {
7442 assert_hunks(
7443 diff.hunks(&snapshot, cx),
7444 &snapshot,
7445 &diff.base_text_string().unwrap(),
7446 &[
7447 (
7448 0..0,
7449 "zero\n",
7450 "",
7451 DiffHunkStatus::deleted(HasSecondaryHunk),
7452 ),
7453 (
7454 1..2,
7455 "two\n",
7456 "TWO\n",
7457 DiffHunkStatus::modified(HasSecondaryHunk),
7458 ),
7459 (
7460 3..4,
7461 "four\n",
7462 "FOUR\n",
7463 DiffHunkStatus::modified(HasSecondaryHunk),
7464 ),
7465 ],
7466 );
7467 });
7468
7469 // Stage a hunk. It appears as optimistically staged.
7470 uncommitted_diff.update(cx, |diff, cx| {
7471 let range =
7472 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7473 let hunks = diff
7474 .hunks_intersecting_range(range, &snapshot, cx)
7475 .collect::<Vec<_>>();
7476 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7477
7478 assert_hunks(
7479 diff.hunks(&snapshot, cx),
7480 &snapshot,
7481 &diff.base_text_string().unwrap(),
7482 &[
7483 (
7484 0..0,
7485 "zero\n",
7486 "",
7487 DiffHunkStatus::deleted(HasSecondaryHunk),
7488 ),
7489 (
7490 1..2,
7491 "two\n",
7492 "TWO\n",
7493 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7494 ),
7495 (
7496 3..4,
7497 "four\n",
7498 "FOUR\n",
7499 DiffHunkStatus::modified(HasSecondaryHunk),
7500 ),
7501 ],
7502 );
7503 });
7504
7505 // The diff emits a change event for the range of the staged hunk.
7506 assert!(matches!(
7507 diff_events.next().await.unwrap(),
7508 BufferDiffEvent::HunksStagedOrUnstaged(_)
7509 ));
7510 let event = diff_events.next().await.unwrap();
7511 if let BufferDiffEvent::DiffChanged {
7512 changed_range: Some(changed_range),
7513 } = event
7514 {
7515 let changed_range = changed_range.to_point(&snapshot);
7516 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7517 } else {
7518 panic!("Unexpected event {event:?}");
7519 }
7520
7521 // When the write to the index completes, it appears as staged.
7522 cx.run_until_parked();
7523 uncommitted_diff.update(cx, |diff, cx| {
7524 assert_hunks(
7525 diff.hunks(&snapshot, cx),
7526 &snapshot,
7527 &diff.base_text_string().unwrap(),
7528 &[
7529 (
7530 0..0,
7531 "zero\n",
7532 "",
7533 DiffHunkStatus::deleted(HasSecondaryHunk),
7534 ),
7535 (
7536 1..2,
7537 "two\n",
7538 "TWO\n",
7539 DiffHunkStatus::modified(NoSecondaryHunk),
7540 ),
7541 (
7542 3..4,
7543 "four\n",
7544 "FOUR\n",
7545 DiffHunkStatus::modified(HasSecondaryHunk),
7546 ),
7547 ],
7548 );
7549 });
7550
7551 // The diff emits a change event for the changed index text.
7552 let event = diff_events.next().await.unwrap();
7553 if let BufferDiffEvent::DiffChanged {
7554 changed_range: Some(changed_range),
7555 } = event
7556 {
7557 let changed_range = changed_range.to_point(&snapshot);
7558 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7559 } else {
7560 panic!("Unexpected event {event:?}");
7561 }
7562
7563 // Simulate a problem writing to the git index.
7564 fs.set_error_message_for_index_write(
7565 "/dir/.git".as_ref(),
7566 Some("failed to write git index".into()),
7567 );
7568
7569 // Stage another hunk.
7570 uncommitted_diff.update(cx, |diff, cx| {
7571 let range =
7572 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7573 let hunks = diff
7574 .hunks_intersecting_range(range, &snapshot, cx)
7575 .collect::<Vec<_>>();
7576 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7577
7578 assert_hunks(
7579 diff.hunks(&snapshot, cx),
7580 &snapshot,
7581 &diff.base_text_string().unwrap(),
7582 &[
7583 (
7584 0..0,
7585 "zero\n",
7586 "",
7587 DiffHunkStatus::deleted(HasSecondaryHunk),
7588 ),
7589 (
7590 1..2,
7591 "two\n",
7592 "TWO\n",
7593 DiffHunkStatus::modified(NoSecondaryHunk),
7594 ),
7595 (
7596 3..4,
7597 "four\n",
7598 "FOUR\n",
7599 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7600 ),
7601 ],
7602 );
7603 });
7604 assert!(matches!(
7605 diff_events.next().await.unwrap(),
7606 BufferDiffEvent::HunksStagedOrUnstaged(_)
7607 ));
7608 let event = diff_events.next().await.unwrap();
7609 if let BufferDiffEvent::DiffChanged {
7610 changed_range: Some(changed_range),
7611 } = event
7612 {
7613 let changed_range = changed_range.to_point(&snapshot);
7614 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7615 } else {
7616 panic!("Unexpected event {event:?}");
7617 }
7618
7619 // When the write fails, the hunk returns to being unstaged.
7620 cx.run_until_parked();
7621 uncommitted_diff.update(cx, |diff, cx| {
7622 assert_hunks(
7623 diff.hunks(&snapshot, cx),
7624 &snapshot,
7625 &diff.base_text_string().unwrap(),
7626 &[
7627 (
7628 0..0,
7629 "zero\n",
7630 "",
7631 DiffHunkStatus::deleted(HasSecondaryHunk),
7632 ),
7633 (
7634 1..2,
7635 "two\n",
7636 "TWO\n",
7637 DiffHunkStatus::modified(NoSecondaryHunk),
7638 ),
7639 (
7640 3..4,
7641 "four\n",
7642 "FOUR\n",
7643 DiffHunkStatus::modified(HasSecondaryHunk),
7644 ),
7645 ],
7646 );
7647 });
7648
7649 let event = diff_events.next().await.unwrap();
7650 if let BufferDiffEvent::DiffChanged {
7651 changed_range: Some(changed_range),
7652 } = event
7653 {
7654 let changed_range = changed_range.to_point(&snapshot);
7655 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7656 } else {
7657 panic!("Unexpected event {event:?}");
7658 }
7659
7660 // Allow writing to the git index to succeed again.
7661 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7662
7663 // Stage two hunks with separate operations.
7664 uncommitted_diff.update(cx, |diff, cx| {
7665 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7666 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7667 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7668 });
7669
7670 // Both staged hunks appear as pending.
7671 uncommitted_diff.update(cx, |diff, cx| {
7672 assert_hunks(
7673 diff.hunks(&snapshot, cx),
7674 &snapshot,
7675 &diff.base_text_string().unwrap(),
7676 &[
7677 (
7678 0..0,
7679 "zero\n",
7680 "",
7681 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7682 ),
7683 (
7684 1..2,
7685 "two\n",
7686 "TWO\n",
7687 DiffHunkStatus::modified(NoSecondaryHunk),
7688 ),
7689 (
7690 3..4,
7691 "four\n",
7692 "FOUR\n",
7693 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7694 ),
7695 ],
7696 );
7697 });
7698
7699 // Both staging operations take effect.
7700 cx.run_until_parked();
7701 uncommitted_diff.update(cx, |diff, cx| {
7702 assert_hunks(
7703 diff.hunks(&snapshot, cx),
7704 &snapshot,
7705 &diff.base_text_string().unwrap(),
7706 &[
7707 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7708 (
7709 1..2,
7710 "two\n",
7711 "TWO\n",
7712 DiffHunkStatus::modified(NoSecondaryHunk),
7713 ),
7714 (
7715 3..4,
7716 "four\n",
7717 "FOUR\n",
7718 DiffHunkStatus::modified(NoSecondaryHunk),
7719 ),
7720 ],
7721 );
7722 });
7723}
7724
7725#[gpui::test(seeds(340, 472))]
7726async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7727 use DiffHunkSecondaryStatus::*;
7728 init_test(cx);
7729
7730 let committed_contents = r#"
7731 zero
7732 one
7733 two
7734 three
7735 four
7736 five
7737 "#
7738 .unindent();
7739 let file_contents = r#"
7740 one
7741 TWO
7742 three
7743 FOUR
7744 five
7745 "#
7746 .unindent();
7747
7748 let fs = FakeFs::new(cx.background_executor.clone());
7749 fs.insert_tree(
7750 "/dir",
7751 json!({
7752 ".git": {},
7753 "file.txt": file_contents.clone()
7754 }),
7755 )
7756 .await;
7757
7758 fs.set_head_for_repo(
7759 "/dir/.git".as_ref(),
7760 &[("file.txt", committed_contents.clone())],
7761 "deadbeef",
7762 );
7763 fs.set_index_for_repo(
7764 "/dir/.git".as_ref(),
7765 &[("file.txt", committed_contents.clone())],
7766 );
7767
7768 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7769
7770 let buffer = project
7771 .update(cx, |project, cx| {
7772 project.open_local_buffer("/dir/file.txt", cx)
7773 })
7774 .await
7775 .unwrap();
7776 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7777 let uncommitted_diff = project
7778 .update(cx, |project, cx| {
7779 project.open_uncommitted_diff(buffer.clone(), cx)
7780 })
7781 .await
7782 .unwrap();
7783
7784 // The hunks are initially unstaged.
7785 uncommitted_diff.read_with(cx, |diff, cx| {
7786 assert_hunks(
7787 diff.hunks(&snapshot, cx),
7788 &snapshot,
7789 &diff.base_text_string().unwrap(),
7790 &[
7791 (
7792 0..0,
7793 "zero\n",
7794 "",
7795 DiffHunkStatus::deleted(HasSecondaryHunk),
7796 ),
7797 (
7798 1..2,
7799 "two\n",
7800 "TWO\n",
7801 DiffHunkStatus::modified(HasSecondaryHunk),
7802 ),
7803 (
7804 3..4,
7805 "four\n",
7806 "FOUR\n",
7807 DiffHunkStatus::modified(HasSecondaryHunk),
7808 ),
7809 ],
7810 );
7811 });
7812
7813 // Pause IO events
7814 fs.pause_events();
7815
7816 // Stage the first hunk.
7817 uncommitted_diff.update(cx, |diff, cx| {
7818 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7819 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7820 assert_hunks(
7821 diff.hunks(&snapshot, cx),
7822 &snapshot,
7823 &diff.base_text_string().unwrap(),
7824 &[
7825 (
7826 0..0,
7827 "zero\n",
7828 "",
7829 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7830 ),
7831 (
7832 1..2,
7833 "two\n",
7834 "TWO\n",
7835 DiffHunkStatus::modified(HasSecondaryHunk),
7836 ),
7837 (
7838 3..4,
7839 "four\n",
7840 "FOUR\n",
7841 DiffHunkStatus::modified(HasSecondaryHunk),
7842 ),
7843 ],
7844 );
7845 });
7846
7847 // Stage the second hunk *before* receiving the FS event for the first hunk.
7848 cx.run_until_parked();
7849 uncommitted_diff.update(cx, |diff, cx| {
7850 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7851 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7852 assert_hunks(
7853 diff.hunks(&snapshot, cx),
7854 &snapshot,
7855 &diff.base_text_string().unwrap(),
7856 &[
7857 (
7858 0..0,
7859 "zero\n",
7860 "",
7861 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7862 ),
7863 (
7864 1..2,
7865 "two\n",
7866 "TWO\n",
7867 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7868 ),
7869 (
7870 3..4,
7871 "four\n",
7872 "FOUR\n",
7873 DiffHunkStatus::modified(HasSecondaryHunk),
7874 ),
7875 ],
7876 );
7877 });
7878
7879 // Process the FS event for staging the first hunk (second event is still pending).
7880 fs.flush_events(1);
7881 cx.run_until_parked();
7882
7883 // Stage the third hunk before receiving the second FS event.
7884 uncommitted_diff.update(cx, |diff, cx| {
7885 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7886 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7887 });
7888
7889 // Wait for all remaining IO.
7890 cx.run_until_parked();
7891 fs.flush_events(fs.buffered_event_count());
7892
7893 // Now all hunks are staged.
7894 cx.run_until_parked();
7895 uncommitted_diff.update(cx, |diff, cx| {
7896 assert_hunks(
7897 diff.hunks(&snapshot, cx),
7898 &snapshot,
7899 &diff.base_text_string().unwrap(),
7900 &[
7901 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7902 (
7903 1..2,
7904 "two\n",
7905 "TWO\n",
7906 DiffHunkStatus::modified(NoSecondaryHunk),
7907 ),
7908 (
7909 3..4,
7910 "four\n",
7911 "FOUR\n",
7912 DiffHunkStatus::modified(NoSecondaryHunk),
7913 ),
7914 ],
7915 );
7916 });
7917}
7918
7919#[gpui::test(iterations = 25)]
7920async fn test_staging_random_hunks(
7921 mut rng: StdRng,
7922 executor: BackgroundExecutor,
7923 cx: &mut gpui::TestAppContext,
7924) {
7925 let operations = env::var("OPERATIONS")
7926 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7927 .unwrap_or(20);
7928
7929 // Try to induce races between diff recalculation and index writes.
7930 if rng.random_bool(0.5) {
7931 executor.deprioritize(*CALCULATE_DIFF_TASK);
7932 }
7933
7934 use DiffHunkSecondaryStatus::*;
7935 init_test(cx);
7936
7937 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7938 let index_text = committed_text.clone();
7939 let buffer_text = (0..30)
7940 .map(|i| match i % 5 {
7941 0 => format!("line {i} (modified)\n"),
7942 _ => format!("line {i}\n"),
7943 })
7944 .collect::<String>();
7945
7946 let fs = FakeFs::new(cx.background_executor.clone());
7947 fs.insert_tree(
7948 path!("/dir"),
7949 json!({
7950 ".git": {},
7951 "file.txt": buffer_text.clone()
7952 }),
7953 )
7954 .await;
7955 fs.set_head_for_repo(
7956 path!("/dir/.git").as_ref(),
7957 &[("file.txt", committed_text.clone())],
7958 "deadbeef",
7959 );
7960 fs.set_index_for_repo(
7961 path!("/dir/.git").as_ref(),
7962 &[("file.txt", index_text.clone())],
7963 );
7964 let repo = fs
7965 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7966 .unwrap();
7967
7968 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7969 let buffer = project
7970 .update(cx, |project, cx| {
7971 project.open_local_buffer(path!("/dir/file.txt"), cx)
7972 })
7973 .await
7974 .unwrap();
7975 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7976 let uncommitted_diff = project
7977 .update(cx, |project, cx| {
7978 project.open_uncommitted_diff(buffer.clone(), cx)
7979 })
7980 .await
7981 .unwrap();
7982
7983 let mut hunks =
7984 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7985 assert_eq!(hunks.len(), 6);
7986
7987 for _i in 0..operations {
7988 let hunk_ix = rng.random_range(0..hunks.len());
7989 let hunk = &mut hunks[hunk_ix];
7990 let row = hunk.range.start.row;
7991
7992 if hunk.status().has_secondary_hunk() {
7993 log::info!("staging hunk at {row}");
7994 uncommitted_diff.update(cx, |diff, cx| {
7995 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7996 });
7997 hunk.secondary_status = SecondaryHunkRemovalPending;
7998 } else {
7999 log::info!("unstaging hunk at {row}");
8000 uncommitted_diff.update(cx, |diff, cx| {
8001 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
8002 });
8003 hunk.secondary_status = SecondaryHunkAdditionPending;
8004 }
8005
8006 for _ in 0..rng.random_range(0..10) {
8007 log::info!("yielding");
8008 cx.executor().simulate_random_delay().await;
8009 }
8010 }
8011
8012 cx.executor().run_until_parked();
8013
8014 for hunk in &mut hunks {
8015 if hunk.secondary_status == SecondaryHunkRemovalPending {
8016 hunk.secondary_status = NoSecondaryHunk;
8017 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8018 hunk.secondary_status = HasSecondaryHunk;
8019 }
8020 }
8021
8022 log::info!(
8023 "index text:\n{}",
8024 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8025 .await
8026 .unwrap()
8027 );
8028
8029 uncommitted_diff.update(cx, |diff, cx| {
8030 let expected_hunks = hunks
8031 .iter()
8032 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8033 .collect::<Vec<_>>();
8034 let actual_hunks = diff
8035 .hunks(&snapshot, cx)
8036 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8037 .collect::<Vec<_>>();
8038 assert_eq!(actual_hunks, expected_hunks);
8039 });
8040}
8041
8042#[gpui::test]
8043async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8044 init_test(cx);
8045
8046 let committed_contents = r#"
8047 fn main() {
8048 println!("hello from HEAD");
8049 }
8050 "#
8051 .unindent();
8052 let file_contents = r#"
8053 fn main() {
8054 println!("hello from the working copy");
8055 }
8056 "#
8057 .unindent();
8058
8059 let fs = FakeFs::new(cx.background_executor.clone());
8060 fs.insert_tree(
8061 "/dir",
8062 json!({
8063 ".git": {},
8064 "src": {
8065 "main.rs": file_contents,
8066 }
8067 }),
8068 )
8069 .await;
8070
8071 fs.set_head_for_repo(
8072 Path::new("/dir/.git"),
8073 &[("src/main.rs", committed_contents.clone())],
8074 "deadbeef",
8075 );
8076 fs.set_index_for_repo(
8077 Path::new("/dir/.git"),
8078 &[("src/main.rs", committed_contents.clone())],
8079 );
8080
8081 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8082
8083 let buffer = project
8084 .update(cx, |project, cx| {
8085 project.open_local_buffer("/dir/src/main.rs", cx)
8086 })
8087 .await
8088 .unwrap();
8089 let uncommitted_diff = project
8090 .update(cx, |project, cx| {
8091 project.open_uncommitted_diff(buffer.clone(), cx)
8092 })
8093 .await
8094 .unwrap();
8095
8096 cx.run_until_parked();
8097 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8098 let snapshot = buffer.read(cx).snapshot();
8099 assert_hunks(
8100 uncommitted_diff.hunks(&snapshot, cx),
8101 &snapshot,
8102 &uncommitted_diff.base_text_string().unwrap(),
8103 &[(
8104 1..2,
8105 " println!(\"hello from HEAD\");\n",
8106 " println!(\"hello from the working copy\");\n",
8107 DiffHunkStatus {
8108 kind: DiffHunkStatusKind::Modified,
8109 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8110 },
8111 )],
8112 );
8113 });
8114}
8115
8116#[gpui::test]
8117async fn test_repository_and_path_for_project_path(
8118 background_executor: BackgroundExecutor,
8119 cx: &mut gpui::TestAppContext,
8120) {
8121 init_test(cx);
8122 let fs = FakeFs::new(background_executor);
8123 fs.insert_tree(
8124 path!("/root"),
8125 json!({
8126 "c.txt": "",
8127 "dir1": {
8128 ".git": {},
8129 "deps": {
8130 "dep1": {
8131 ".git": {},
8132 "src": {
8133 "a.txt": ""
8134 }
8135 }
8136 },
8137 "src": {
8138 "b.txt": ""
8139 }
8140 },
8141 }),
8142 )
8143 .await;
8144
8145 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8146 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8147 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8148 project
8149 .update(cx, |project, cx| project.git_scans_complete(cx))
8150 .await;
8151 cx.run_until_parked();
8152
8153 project.read_with(cx, |project, cx| {
8154 let git_store = project.git_store().read(cx);
8155 let pairs = [
8156 ("c.txt", None),
8157 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8158 (
8159 "dir1/deps/dep1/src/a.txt",
8160 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8161 ),
8162 ];
8163 let expected = pairs
8164 .iter()
8165 .map(|(path, result)| {
8166 (
8167 path,
8168 result.map(|(repo, repo_path)| {
8169 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8170 }),
8171 )
8172 })
8173 .collect::<Vec<_>>();
8174 let actual = pairs
8175 .iter()
8176 .map(|(path, _)| {
8177 let project_path = (tree_id, rel_path(path)).into();
8178 let result = maybe!({
8179 let (repo, repo_path) =
8180 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8181 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8182 });
8183 (path, result)
8184 })
8185 .collect::<Vec<_>>();
8186 pretty_assertions::assert_eq!(expected, actual);
8187 });
8188
8189 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8190 .await
8191 .unwrap();
8192 cx.run_until_parked();
8193
8194 project.read_with(cx, |project, cx| {
8195 let git_store = project.git_store().read(cx);
8196 assert_eq!(
8197 git_store.repository_and_path_for_project_path(
8198 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8199 cx
8200 ),
8201 None
8202 );
8203 });
8204}
8205
8206#[gpui::test]
8207async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8208 init_test(cx);
8209 let fs = FakeFs::new(cx.background_executor.clone());
8210 let home = paths::home_dir();
8211 fs.insert_tree(
8212 home,
8213 json!({
8214 ".git": {},
8215 "project": {
8216 "a.txt": "A"
8217 },
8218 }),
8219 )
8220 .await;
8221
8222 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8223 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8224 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8225
8226 project
8227 .update(cx, |project, cx| project.git_scans_complete(cx))
8228 .await;
8229 tree.flush_fs_events(cx).await;
8230
8231 project.read_with(cx, |project, cx| {
8232 let containing = project
8233 .git_store()
8234 .read(cx)
8235 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8236 assert!(containing.is_none());
8237 });
8238
8239 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8240 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8241 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8242 project
8243 .update(cx, |project, cx| project.git_scans_complete(cx))
8244 .await;
8245 tree.flush_fs_events(cx).await;
8246
8247 project.read_with(cx, |project, cx| {
8248 let containing = project
8249 .git_store()
8250 .read(cx)
8251 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8252 assert_eq!(
8253 containing
8254 .unwrap()
8255 .0
8256 .read(cx)
8257 .work_directory_abs_path
8258 .as_ref(),
8259 home,
8260 );
8261 });
8262}
8263
8264#[gpui::test]
8265async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8266 init_test(cx);
8267 cx.executor().allow_parking();
8268
8269 let root = TempTree::new(json!({
8270 "project": {
8271 "a.txt": "a", // Modified
8272 "b.txt": "bb", // Added
8273 "c.txt": "ccc", // Unchanged
8274 "d.txt": "dddd", // Deleted
8275 },
8276 }));
8277
8278 // Set up git repository before creating the project.
8279 let work_dir = root.path().join("project");
8280 let repo = git_init(work_dir.as_path());
8281 git_add("a.txt", &repo);
8282 git_add("c.txt", &repo);
8283 git_add("d.txt", &repo);
8284 git_commit("Initial commit", &repo);
8285 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8286 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8287
8288 let project = Project::test(
8289 Arc::new(RealFs::new(None, cx.executor())),
8290 [root.path()],
8291 cx,
8292 )
8293 .await;
8294
8295 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8296 tree.flush_fs_events(cx).await;
8297 project
8298 .update(cx, |project, cx| project.git_scans_complete(cx))
8299 .await;
8300 cx.executor().run_until_parked();
8301
8302 let repository = project.read_with(cx, |project, cx| {
8303 project.repositories(cx).values().next().unwrap().clone()
8304 });
8305
8306 // Check that the right git state is observed on startup
8307 repository.read_with(cx, |repository, _| {
8308 let entries = repository.cached_status().collect::<Vec<_>>();
8309 assert_eq!(
8310 entries,
8311 [
8312 StatusEntry {
8313 repo_path: repo_path("a.txt"),
8314 status: StatusCode::Modified.worktree(),
8315 },
8316 StatusEntry {
8317 repo_path: repo_path("b.txt"),
8318 status: FileStatus::Untracked,
8319 },
8320 StatusEntry {
8321 repo_path: repo_path("d.txt"),
8322 status: StatusCode::Deleted.worktree(),
8323 },
8324 ]
8325 );
8326 });
8327
8328 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8329
8330 tree.flush_fs_events(cx).await;
8331 project
8332 .update(cx, |project, cx| project.git_scans_complete(cx))
8333 .await;
8334 cx.executor().run_until_parked();
8335
8336 repository.read_with(cx, |repository, _| {
8337 let entries = repository.cached_status().collect::<Vec<_>>();
8338 assert_eq!(
8339 entries,
8340 [
8341 StatusEntry {
8342 repo_path: repo_path("a.txt"),
8343 status: StatusCode::Modified.worktree(),
8344 },
8345 StatusEntry {
8346 repo_path: repo_path("b.txt"),
8347 status: FileStatus::Untracked,
8348 },
8349 StatusEntry {
8350 repo_path: repo_path("c.txt"),
8351 status: StatusCode::Modified.worktree(),
8352 },
8353 StatusEntry {
8354 repo_path: repo_path("d.txt"),
8355 status: StatusCode::Deleted.worktree(),
8356 },
8357 ]
8358 );
8359 });
8360
8361 git_add("a.txt", &repo);
8362 git_add("c.txt", &repo);
8363 git_remove_index(Path::new("d.txt"), &repo);
8364 git_commit("Another commit", &repo);
8365 tree.flush_fs_events(cx).await;
8366 project
8367 .update(cx, |project, cx| project.git_scans_complete(cx))
8368 .await;
8369 cx.executor().run_until_parked();
8370
8371 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8372 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8373 tree.flush_fs_events(cx).await;
8374 project
8375 .update(cx, |project, cx| project.git_scans_complete(cx))
8376 .await;
8377 cx.executor().run_until_parked();
8378
8379 repository.read_with(cx, |repository, _cx| {
8380 let entries = repository.cached_status().collect::<Vec<_>>();
8381
8382 // Deleting an untracked entry, b.txt, should leave no status
8383 // a.txt was tracked, and so should have a status
8384 assert_eq!(
8385 entries,
8386 [StatusEntry {
8387 repo_path: repo_path("a.txt"),
8388 status: StatusCode::Deleted.worktree(),
8389 }]
8390 );
8391 });
8392}
8393
8394#[gpui::test]
8395async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8396 init_test(cx);
8397 cx.executor().allow_parking();
8398
8399 let root = TempTree::new(json!({
8400 "project": {
8401 "sub": {},
8402 "a.txt": "",
8403 },
8404 }));
8405
8406 let work_dir = root.path().join("project");
8407 let repo = git_init(work_dir.as_path());
8408 // a.txt exists in HEAD and the working copy but is deleted in the index.
8409 git_add("a.txt", &repo);
8410 git_commit("Initial commit", &repo);
8411 git_remove_index("a.txt".as_ref(), &repo);
8412 // `sub` is a nested git repository.
8413 let _sub = git_init(&work_dir.join("sub"));
8414
8415 let project = Project::test(
8416 Arc::new(RealFs::new(None, cx.executor())),
8417 [root.path()],
8418 cx,
8419 )
8420 .await;
8421
8422 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8423 tree.flush_fs_events(cx).await;
8424 project
8425 .update(cx, |project, cx| project.git_scans_complete(cx))
8426 .await;
8427 cx.executor().run_until_parked();
8428
8429 let repository = project.read_with(cx, |project, cx| {
8430 project
8431 .repositories(cx)
8432 .values()
8433 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8434 .unwrap()
8435 .clone()
8436 });
8437
8438 repository.read_with(cx, |repository, _cx| {
8439 let entries = repository.cached_status().collect::<Vec<_>>();
8440
8441 // `sub` doesn't appear in our computed statuses.
8442 // a.txt appears with a combined `DA` status.
8443 assert_eq!(
8444 entries,
8445 [StatusEntry {
8446 repo_path: repo_path("a.txt"),
8447 status: TrackedStatus {
8448 index_status: StatusCode::Deleted,
8449 worktree_status: StatusCode::Added
8450 }
8451 .into(),
8452 }]
8453 )
8454 });
8455}
8456
8457#[track_caller]
8458/// We merge lhs into rhs.
8459fn merge_pending_ops_snapshots(
8460 source: Vec<pending_op::PendingOps>,
8461 mut target: Vec<pending_op::PendingOps>,
8462) -> Vec<pending_op::PendingOps> {
8463 for s_ops in source {
8464 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8465 if ops.repo_path == s_ops.repo_path {
8466 Some(idx)
8467 } else {
8468 None
8469 }
8470 }) {
8471 let t_ops = &mut target[idx];
8472 for s_op in s_ops.ops {
8473 if let Some(op_idx) = t_ops
8474 .ops
8475 .iter()
8476 .zip(0..)
8477 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8478 {
8479 let t_op = &mut t_ops.ops[op_idx];
8480 match (s_op.job_status, t_op.job_status) {
8481 (pending_op::JobStatus::Running, _) => {}
8482 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8483 (s_st, t_st) if s_st == t_st => {}
8484 _ => unreachable!(),
8485 }
8486 } else {
8487 t_ops.ops.push(s_op);
8488 }
8489 }
8490 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8491 } else {
8492 target.push(s_ops);
8493 }
8494 }
8495 target
8496}
8497
8498#[gpui::test]
8499async fn test_repository_pending_ops_staging(
8500 executor: gpui::BackgroundExecutor,
8501 cx: &mut gpui::TestAppContext,
8502) {
8503 init_test(cx);
8504
8505 let fs = FakeFs::new(executor);
8506 fs.insert_tree(
8507 path!("/root"),
8508 json!({
8509 "my-repo": {
8510 ".git": {},
8511 "a.txt": "a",
8512 }
8513
8514 }),
8515 )
8516 .await;
8517
8518 fs.set_status_for_repo(
8519 path!("/root/my-repo/.git").as_ref(),
8520 &[("a.txt", FileStatus::Untracked)],
8521 );
8522
8523 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8524 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8525 project.update(cx, |project, cx| {
8526 let pending_ops_all = pending_ops_all.clone();
8527 cx.subscribe(project.git_store(), move |_, _, e, _| {
8528 if let GitStoreEvent::RepositoryUpdated(
8529 _,
8530 RepositoryEvent::PendingOpsChanged { pending_ops },
8531 _,
8532 ) = e
8533 {
8534 let merged = merge_pending_ops_snapshots(
8535 pending_ops.items(()),
8536 pending_ops_all.lock().items(()),
8537 );
8538 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8539 }
8540 })
8541 .detach();
8542 });
8543 project
8544 .update(cx, |project, cx| project.git_scans_complete(cx))
8545 .await;
8546
8547 let repo = project.read_with(cx, |project, cx| {
8548 project.repositories(cx).values().next().unwrap().clone()
8549 });
8550
8551 // Ensure we have no pending ops for any of the untracked files
8552 repo.read_with(cx, |repo, _cx| {
8553 assert!(repo.pending_ops_by_path.is_empty());
8554 });
8555
8556 let mut id = 1u16;
8557
8558 let mut assert_stage = async |path: RepoPath, stage| {
8559 let git_status = if stage {
8560 pending_op::GitStatus::Staged
8561 } else {
8562 pending_op::GitStatus::Unstaged
8563 };
8564 repo.update(cx, |repo, cx| {
8565 let task = if stage {
8566 repo.stage_entries(vec![path.clone()], cx)
8567 } else {
8568 repo.unstage_entries(vec![path.clone()], cx)
8569 };
8570 let ops = repo.pending_ops_for_path(&path).unwrap();
8571 assert_eq!(
8572 ops.ops.last(),
8573 Some(&pending_op::PendingOp {
8574 id: id.into(),
8575 git_status,
8576 job_status: pending_op::JobStatus::Running
8577 })
8578 );
8579 task
8580 })
8581 .await
8582 .unwrap();
8583
8584 repo.read_with(cx, |repo, _cx| {
8585 let ops = repo.pending_ops_for_path(&path).unwrap();
8586 assert_eq!(
8587 ops.ops.last(),
8588 Some(&pending_op::PendingOp {
8589 id: id.into(),
8590 git_status,
8591 job_status: pending_op::JobStatus::Finished
8592 })
8593 );
8594 });
8595
8596 id += 1;
8597 };
8598
8599 assert_stage(repo_path("a.txt"), true).await;
8600 assert_stage(repo_path("a.txt"), false).await;
8601 assert_stage(repo_path("a.txt"), true).await;
8602 assert_stage(repo_path("a.txt"), false).await;
8603 assert_stage(repo_path("a.txt"), true).await;
8604
8605 cx.run_until_parked();
8606
8607 assert_eq!(
8608 pending_ops_all
8609 .lock()
8610 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8611 .unwrap()
8612 .ops,
8613 vec![
8614 pending_op::PendingOp {
8615 id: 1u16.into(),
8616 git_status: pending_op::GitStatus::Staged,
8617 job_status: pending_op::JobStatus::Finished
8618 },
8619 pending_op::PendingOp {
8620 id: 2u16.into(),
8621 git_status: pending_op::GitStatus::Unstaged,
8622 job_status: pending_op::JobStatus::Finished
8623 },
8624 pending_op::PendingOp {
8625 id: 3u16.into(),
8626 git_status: pending_op::GitStatus::Staged,
8627 job_status: pending_op::JobStatus::Finished
8628 },
8629 pending_op::PendingOp {
8630 id: 4u16.into(),
8631 git_status: pending_op::GitStatus::Unstaged,
8632 job_status: pending_op::JobStatus::Finished
8633 },
8634 pending_op::PendingOp {
8635 id: 5u16.into(),
8636 git_status: pending_op::GitStatus::Staged,
8637 job_status: pending_op::JobStatus::Finished
8638 }
8639 ],
8640 );
8641
8642 repo.update(cx, |repo, _cx| {
8643 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8644
8645 assert_eq!(
8646 git_statuses,
8647 [StatusEntry {
8648 repo_path: repo_path("a.txt"),
8649 status: TrackedStatus {
8650 index_status: StatusCode::Added,
8651 worktree_status: StatusCode::Unmodified
8652 }
8653 .into(),
8654 }]
8655 );
8656 });
8657}
8658
8659#[gpui::test]
8660async fn test_repository_pending_ops_long_running_staging(
8661 executor: gpui::BackgroundExecutor,
8662 cx: &mut gpui::TestAppContext,
8663) {
8664 init_test(cx);
8665
8666 let fs = FakeFs::new(executor);
8667 fs.insert_tree(
8668 path!("/root"),
8669 json!({
8670 "my-repo": {
8671 ".git": {},
8672 "a.txt": "a",
8673 }
8674
8675 }),
8676 )
8677 .await;
8678
8679 fs.set_status_for_repo(
8680 path!("/root/my-repo/.git").as_ref(),
8681 &[("a.txt", FileStatus::Untracked)],
8682 );
8683
8684 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8685 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8686 project.update(cx, |project, cx| {
8687 let pending_ops_all = pending_ops_all.clone();
8688 cx.subscribe(project.git_store(), move |_, _, e, _| {
8689 if let GitStoreEvent::RepositoryUpdated(
8690 _,
8691 RepositoryEvent::PendingOpsChanged { pending_ops },
8692 _,
8693 ) = e
8694 {
8695 let merged = merge_pending_ops_snapshots(
8696 pending_ops.items(()),
8697 pending_ops_all.lock().items(()),
8698 );
8699 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8700 }
8701 })
8702 .detach();
8703 });
8704
8705 project
8706 .update(cx, |project, cx| project.git_scans_complete(cx))
8707 .await;
8708
8709 let repo = project.read_with(cx, |project, cx| {
8710 project.repositories(cx).values().next().unwrap().clone()
8711 });
8712
8713 repo.update(cx, |repo, cx| {
8714 repo.stage_entries(vec![repo_path("a.txt")], cx)
8715 })
8716 .detach();
8717
8718 repo.update(cx, |repo, cx| {
8719 repo.stage_entries(vec![repo_path("a.txt")], cx)
8720 })
8721 .unwrap()
8722 .with_timeout(Duration::from_secs(1), &cx.executor())
8723 .await
8724 .unwrap();
8725
8726 cx.run_until_parked();
8727
8728 assert_eq!(
8729 pending_ops_all
8730 .lock()
8731 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8732 .unwrap()
8733 .ops,
8734 vec![
8735 pending_op::PendingOp {
8736 id: 1u16.into(),
8737 git_status: pending_op::GitStatus::Staged,
8738 job_status: pending_op::JobStatus::Skipped
8739 },
8740 pending_op::PendingOp {
8741 id: 2u16.into(),
8742 git_status: pending_op::GitStatus::Staged,
8743 job_status: pending_op::JobStatus::Finished
8744 }
8745 ],
8746 );
8747
8748 repo.update(cx, |repo, _cx| {
8749 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8750
8751 assert_eq!(
8752 git_statuses,
8753 [StatusEntry {
8754 repo_path: repo_path("a.txt"),
8755 status: TrackedStatus {
8756 index_status: StatusCode::Added,
8757 worktree_status: StatusCode::Unmodified
8758 }
8759 .into(),
8760 }]
8761 );
8762 });
8763}
8764
8765#[gpui::test]
8766async fn test_repository_pending_ops_stage_all(
8767 executor: gpui::BackgroundExecutor,
8768 cx: &mut gpui::TestAppContext,
8769) {
8770 init_test(cx);
8771
8772 let fs = FakeFs::new(executor);
8773 fs.insert_tree(
8774 path!("/root"),
8775 json!({
8776 "my-repo": {
8777 ".git": {},
8778 "a.txt": "a",
8779 "b.txt": "b"
8780 }
8781
8782 }),
8783 )
8784 .await;
8785
8786 fs.set_status_for_repo(
8787 path!("/root/my-repo/.git").as_ref(),
8788 &[
8789 ("a.txt", FileStatus::Untracked),
8790 ("b.txt", FileStatus::Untracked),
8791 ],
8792 );
8793
8794 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8795 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8796 project.update(cx, |project, cx| {
8797 let pending_ops_all = pending_ops_all.clone();
8798 cx.subscribe(project.git_store(), move |_, _, e, _| {
8799 if let GitStoreEvent::RepositoryUpdated(
8800 _,
8801 RepositoryEvent::PendingOpsChanged { pending_ops },
8802 _,
8803 ) = e
8804 {
8805 let merged = merge_pending_ops_snapshots(
8806 pending_ops.items(()),
8807 pending_ops_all.lock().items(()),
8808 );
8809 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8810 }
8811 })
8812 .detach();
8813 });
8814 project
8815 .update(cx, |project, cx| project.git_scans_complete(cx))
8816 .await;
8817
8818 let repo = project.read_with(cx, |project, cx| {
8819 project.repositories(cx).values().next().unwrap().clone()
8820 });
8821
8822 repo.update(cx, |repo, cx| {
8823 repo.stage_entries(vec![repo_path("a.txt")], cx)
8824 })
8825 .await
8826 .unwrap();
8827 repo.update(cx, |repo, cx| repo.stage_all(cx))
8828 .await
8829 .unwrap();
8830 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8831 .await
8832 .unwrap();
8833
8834 cx.run_until_parked();
8835
8836 assert_eq!(
8837 pending_ops_all
8838 .lock()
8839 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8840 .unwrap()
8841 .ops,
8842 vec![
8843 pending_op::PendingOp {
8844 id: 1u16.into(),
8845 git_status: pending_op::GitStatus::Staged,
8846 job_status: pending_op::JobStatus::Finished
8847 },
8848 pending_op::PendingOp {
8849 id: 2u16.into(),
8850 git_status: pending_op::GitStatus::Unstaged,
8851 job_status: pending_op::JobStatus::Finished
8852 },
8853 ],
8854 );
8855 assert_eq!(
8856 pending_ops_all
8857 .lock()
8858 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
8859 .unwrap()
8860 .ops,
8861 vec![
8862 pending_op::PendingOp {
8863 id: 1u16.into(),
8864 git_status: pending_op::GitStatus::Staged,
8865 job_status: pending_op::JobStatus::Finished
8866 },
8867 pending_op::PendingOp {
8868 id: 2u16.into(),
8869 git_status: pending_op::GitStatus::Unstaged,
8870 job_status: pending_op::JobStatus::Finished
8871 },
8872 ],
8873 );
8874
8875 repo.update(cx, |repo, _cx| {
8876 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8877
8878 assert_eq!(
8879 git_statuses,
8880 [
8881 StatusEntry {
8882 repo_path: repo_path("a.txt"),
8883 status: FileStatus::Untracked,
8884 },
8885 StatusEntry {
8886 repo_path: repo_path("b.txt"),
8887 status: FileStatus::Untracked,
8888 },
8889 ]
8890 );
8891 });
8892}
8893
8894#[gpui::test]
8895async fn test_repository_subfolder_git_status(
8896 executor: gpui::BackgroundExecutor,
8897 cx: &mut gpui::TestAppContext,
8898) {
8899 init_test(cx);
8900
8901 let fs = FakeFs::new(executor);
8902 fs.insert_tree(
8903 path!("/root"),
8904 json!({
8905 "my-repo": {
8906 ".git": {},
8907 "a.txt": "a",
8908 "sub-folder-1": {
8909 "sub-folder-2": {
8910 "c.txt": "cc",
8911 "d": {
8912 "e.txt": "eee"
8913 }
8914 },
8915 }
8916 },
8917 }),
8918 )
8919 .await;
8920
8921 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8922 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8923
8924 fs.set_status_for_repo(
8925 path!("/root/my-repo/.git").as_ref(),
8926 &[(E_TXT, FileStatus::Untracked)],
8927 );
8928
8929 let project = Project::test(
8930 fs.clone(),
8931 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8932 cx,
8933 )
8934 .await;
8935
8936 project
8937 .update(cx, |project, cx| project.git_scans_complete(cx))
8938 .await;
8939 cx.run_until_parked();
8940
8941 let repository = project.read_with(cx, |project, cx| {
8942 project.repositories(cx).values().next().unwrap().clone()
8943 });
8944
8945 // Ensure that the git status is loaded correctly
8946 repository.read_with(cx, |repository, _cx| {
8947 assert_eq!(
8948 repository.work_directory_abs_path,
8949 Path::new(path!("/root/my-repo")).into()
8950 );
8951
8952 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8953 assert_eq!(
8954 repository
8955 .status_for_path(&repo_path(E_TXT))
8956 .unwrap()
8957 .status,
8958 FileStatus::Untracked
8959 );
8960 });
8961
8962 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8963 project
8964 .update(cx, |project, cx| project.git_scans_complete(cx))
8965 .await;
8966 cx.run_until_parked();
8967
8968 repository.read_with(cx, |repository, _cx| {
8969 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8970 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8971 });
8972}
8973
8974// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8975#[cfg(any())]
8976#[gpui::test]
8977async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8978 init_test(cx);
8979 cx.executor().allow_parking();
8980
8981 let root = TempTree::new(json!({
8982 "project": {
8983 "a.txt": "a",
8984 },
8985 }));
8986 let root_path = root.path();
8987
8988 let repo = git_init(&root_path.join("project"));
8989 git_add("a.txt", &repo);
8990 git_commit("init", &repo);
8991
8992 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8993
8994 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8995 tree.flush_fs_events(cx).await;
8996 project
8997 .update(cx, |project, cx| project.git_scans_complete(cx))
8998 .await;
8999 cx.executor().run_until_parked();
9000
9001 let repository = project.read_with(cx, |project, cx| {
9002 project.repositories(cx).values().next().unwrap().clone()
9003 });
9004
9005 git_branch("other-branch", &repo);
9006 git_checkout("refs/heads/other-branch", &repo);
9007 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
9008 git_add("a.txt", &repo);
9009 git_commit("capitalize", &repo);
9010 let commit = repo
9011 .head()
9012 .expect("Failed to get HEAD")
9013 .peel_to_commit()
9014 .expect("HEAD is not a commit");
9015 git_checkout("refs/heads/main", &repo);
9016 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9017 git_add("a.txt", &repo);
9018 git_commit("improve letter", &repo);
9019 git_cherry_pick(&commit, &repo);
9020 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9021 .expect("No CHERRY_PICK_HEAD");
9022 pretty_assertions::assert_eq!(
9023 git_status(&repo),
9024 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9025 );
9026 tree.flush_fs_events(cx).await;
9027 project
9028 .update(cx, |project, cx| project.git_scans_complete(cx))
9029 .await;
9030 cx.executor().run_until_parked();
9031 let conflicts = repository.update(cx, |repository, _| {
9032 repository
9033 .merge_conflicts
9034 .iter()
9035 .cloned()
9036 .collect::<Vec<_>>()
9037 });
9038 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9039
9040 git_add("a.txt", &repo);
9041 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9042 git_commit("whatevs", &repo);
9043 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9044 .expect("Failed to remove CHERRY_PICK_HEAD");
9045 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9046 tree.flush_fs_events(cx).await;
9047 let conflicts = repository.update(cx, |repository, _| {
9048 repository
9049 .merge_conflicts
9050 .iter()
9051 .cloned()
9052 .collect::<Vec<_>>()
9053 });
9054 pretty_assertions::assert_eq!(conflicts, []);
9055}
9056
9057#[gpui::test]
9058async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9059 init_test(cx);
9060 let fs = FakeFs::new(cx.background_executor.clone());
9061 fs.insert_tree(
9062 path!("/root"),
9063 json!({
9064 ".git": {},
9065 ".gitignore": "*.txt\n",
9066 "a.xml": "<a></a>",
9067 "b.txt": "Some text"
9068 }),
9069 )
9070 .await;
9071
9072 fs.set_head_and_index_for_repo(
9073 path!("/root/.git").as_ref(),
9074 &[
9075 (".gitignore", "*.txt\n".into()),
9076 ("a.xml", "<a></a>".into()),
9077 ],
9078 );
9079
9080 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9081
9082 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9083 tree.flush_fs_events(cx).await;
9084 project
9085 .update(cx, |project, cx| project.git_scans_complete(cx))
9086 .await;
9087 cx.executor().run_until_parked();
9088
9089 let repository = project.read_with(cx, |project, cx| {
9090 project.repositories(cx).values().next().unwrap().clone()
9091 });
9092
9093 // One file is unmodified, the other is ignored.
9094 cx.read(|cx| {
9095 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9096 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9097 });
9098
9099 // Change the gitignore, and stage the newly non-ignored file.
9100 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9101 .await
9102 .unwrap();
9103 fs.set_index_for_repo(
9104 Path::new(path!("/root/.git")),
9105 &[
9106 (".gitignore", "*.txt\n".into()),
9107 ("a.xml", "<a></a>".into()),
9108 ("b.txt", "Some text".into()),
9109 ],
9110 );
9111
9112 cx.executor().run_until_parked();
9113 cx.read(|cx| {
9114 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9115 assert_entry_git_state(
9116 tree.read(cx),
9117 repository.read(cx),
9118 "b.txt",
9119 Some(StatusCode::Added),
9120 false,
9121 );
9122 });
9123}
9124
9125// NOTE:
9126// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9127// a directory which some program has already open.
9128// This is a limitation of the Windows.
9129// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9130// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9131#[gpui::test]
9132#[cfg_attr(target_os = "windows", ignore)]
9133async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9134 init_test(cx);
9135 cx.executor().allow_parking();
9136 let root = TempTree::new(json!({
9137 "projects": {
9138 "project1": {
9139 "a": "",
9140 "b": "",
9141 }
9142 },
9143
9144 }));
9145 let root_path = root.path();
9146
9147 let repo = git_init(&root_path.join("projects/project1"));
9148 git_add("a", &repo);
9149 git_commit("init", &repo);
9150 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9151
9152 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9153
9154 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9155 tree.flush_fs_events(cx).await;
9156 project
9157 .update(cx, |project, cx| project.git_scans_complete(cx))
9158 .await;
9159 cx.executor().run_until_parked();
9160
9161 let repository = project.read_with(cx, |project, cx| {
9162 project.repositories(cx).values().next().unwrap().clone()
9163 });
9164
9165 repository.read_with(cx, |repository, _| {
9166 assert_eq!(
9167 repository.work_directory_abs_path.as_ref(),
9168 root_path.join("projects/project1").as_path()
9169 );
9170 assert_eq!(
9171 repository
9172 .status_for_path(&repo_path("a"))
9173 .map(|entry| entry.status),
9174 Some(StatusCode::Modified.worktree()),
9175 );
9176 assert_eq!(
9177 repository
9178 .status_for_path(&repo_path("b"))
9179 .map(|entry| entry.status),
9180 Some(FileStatus::Untracked),
9181 );
9182 });
9183
9184 std::fs::rename(
9185 root_path.join("projects/project1"),
9186 root_path.join("projects/project2"),
9187 )
9188 .unwrap();
9189 tree.flush_fs_events(cx).await;
9190
9191 repository.read_with(cx, |repository, _| {
9192 assert_eq!(
9193 repository.work_directory_abs_path.as_ref(),
9194 root_path.join("projects/project2").as_path()
9195 );
9196 assert_eq!(
9197 repository.status_for_path(&repo_path("a")).unwrap().status,
9198 StatusCode::Modified.worktree(),
9199 );
9200 assert_eq!(
9201 repository.status_for_path(&repo_path("b")).unwrap().status,
9202 FileStatus::Untracked,
9203 );
9204 });
9205}
9206
9207// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9208// you can't rename a directory which some program has already open. This is a
9209// limitation of the Windows. See:
9210// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9211// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9212#[gpui::test]
9213#[cfg_attr(target_os = "windows", ignore)]
9214async fn test_file_status(cx: &mut gpui::TestAppContext) {
9215 init_test(cx);
9216 cx.executor().allow_parking();
9217 const IGNORE_RULE: &str = "**/target";
9218
9219 let root = TempTree::new(json!({
9220 "project": {
9221 "a.txt": "a",
9222 "b.txt": "bb",
9223 "c": {
9224 "d": {
9225 "e.txt": "eee"
9226 }
9227 },
9228 "f.txt": "ffff",
9229 "target": {
9230 "build_file": "???"
9231 },
9232 ".gitignore": IGNORE_RULE
9233 },
9234
9235 }));
9236 let root_path = root.path();
9237
9238 const A_TXT: &str = "a.txt";
9239 const B_TXT: &str = "b.txt";
9240 const E_TXT: &str = "c/d/e.txt";
9241 const F_TXT: &str = "f.txt";
9242 const DOTGITIGNORE: &str = ".gitignore";
9243 const BUILD_FILE: &str = "target/build_file";
9244
9245 // Set up git repository before creating the worktree.
9246 let work_dir = root.path().join("project");
9247 let mut repo = git_init(work_dir.as_path());
9248 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9249 git_add(A_TXT, &repo);
9250 git_add(E_TXT, &repo);
9251 git_add(DOTGITIGNORE, &repo);
9252 git_commit("Initial commit", &repo);
9253
9254 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9255
9256 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9257 tree.flush_fs_events(cx).await;
9258 project
9259 .update(cx, |project, cx| project.git_scans_complete(cx))
9260 .await;
9261 cx.executor().run_until_parked();
9262
9263 let repository = project.read_with(cx, |project, cx| {
9264 project.repositories(cx).values().next().unwrap().clone()
9265 });
9266
9267 // Check that the right git state is observed on startup
9268 repository.read_with(cx, |repository, _cx| {
9269 assert_eq!(
9270 repository.work_directory_abs_path.as_ref(),
9271 root_path.join("project").as_path()
9272 );
9273
9274 assert_eq!(
9275 repository
9276 .status_for_path(&repo_path(B_TXT))
9277 .unwrap()
9278 .status,
9279 FileStatus::Untracked,
9280 );
9281 assert_eq!(
9282 repository
9283 .status_for_path(&repo_path(F_TXT))
9284 .unwrap()
9285 .status,
9286 FileStatus::Untracked,
9287 );
9288 });
9289
9290 // Modify a file in the working copy.
9291 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9292 tree.flush_fs_events(cx).await;
9293 project
9294 .update(cx, |project, cx| project.git_scans_complete(cx))
9295 .await;
9296 cx.executor().run_until_parked();
9297
9298 // The worktree detects that the file's git status has changed.
9299 repository.read_with(cx, |repository, _| {
9300 assert_eq!(
9301 repository
9302 .status_for_path(&repo_path(A_TXT))
9303 .unwrap()
9304 .status,
9305 StatusCode::Modified.worktree(),
9306 );
9307 });
9308
9309 // Create a commit in the git repository.
9310 git_add(A_TXT, &repo);
9311 git_add(B_TXT, &repo);
9312 git_commit("Committing modified and added", &repo);
9313 tree.flush_fs_events(cx).await;
9314 project
9315 .update(cx, |project, cx| project.git_scans_complete(cx))
9316 .await;
9317 cx.executor().run_until_parked();
9318
9319 // The worktree detects that the files' git status have changed.
9320 repository.read_with(cx, |repository, _cx| {
9321 assert_eq!(
9322 repository
9323 .status_for_path(&repo_path(F_TXT))
9324 .unwrap()
9325 .status,
9326 FileStatus::Untracked,
9327 );
9328 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9329 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9330 });
9331
9332 // Modify files in the working copy and perform git operations on other files.
9333 git_reset(0, &repo);
9334 git_remove_index(Path::new(B_TXT), &repo);
9335 git_stash(&mut repo);
9336 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9337 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9338 tree.flush_fs_events(cx).await;
9339 project
9340 .update(cx, |project, cx| project.git_scans_complete(cx))
9341 .await;
9342 cx.executor().run_until_parked();
9343
9344 // Check that more complex repo changes are tracked
9345 repository.read_with(cx, |repository, _cx| {
9346 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9347 assert_eq!(
9348 repository
9349 .status_for_path(&repo_path(B_TXT))
9350 .unwrap()
9351 .status,
9352 FileStatus::Untracked,
9353 );
9354 assert_eq!(
9355 repository
9356 .status_for_path(&repo_path(E_TXT))
9357 .unwrap()
9358 .status,
9359 StatusCode::Modified.worktree(),
9360 );
9361 });
9362
9363 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9364 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9365 std::fs::write(
9366 work_dir.join(DOTGITIGNORE),
9367 [IGNORE_RULE, "f.txt"].join("\n"),
9368 )
9369 .unwrap();
9370
9371 git_add(Path::new(DOTGITIGNORE), &repo);
9372 git_commit("Committing modified git ignore", &repo);
9373
9374 tree.flush_fs_events(cx).await;
9375 cx.executor().run_until_parked();
9376
9377 let mut renamed_dir_name = "first_directory/second_directory";
9378 const RENAMED_FILE: &str = "rf.txt";
9379
9380 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9381 std::fs::write(
9382 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9383 "new-contents",
9384 )
9385 .unwrap();
9386
9387 tree.flush_fs_events(cx).await;
9388 project
9389 .update(cx, |project, cx| project.git_scans_complete(cx))
9390 .await;
9391 cx.executor().run_until_parked();
9392
9393 repository.read_with(cx, |repository, _cx| {
9394 assert_eq!(
9395 repository
9396 .status_for_path(&RepoPath::from_rel_path(
9397 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9398 ))
9399 .unwrap()
9400 .status,
9401 FileStatus::Untracked,
9402 );
9403 });
9404
9405 renamed_dir_name = "new_first_directory/second_directory";
9406
9407 std::fs::rename(
9408 work_dir.join("first_directory"),
9409 work_dir.join("new_first_directory"),
9410 )
9411 .unwrap();
9412
9413 tree.flush_fs_events(cx).await;
9414 project
9415 .update(cx, |project, cx| project.git_scans_complete(cx))
9416 .await;
9417 cx.executor().run_until_parked();
9418
9419 repository.read_with(cx, |repository, _cx| {
9420 assert_eq!(
9421 repository
9422 .status_for_path(&RepoPath::from_rel_path(
9423 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9424 ))
9425 .unwrap()
9426 .status,
9427 FileStatus::Untracked,
9428 );
9429 });
9430}
9431
9432#[gpui::test]
9433#[ignore]
9434async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9435 init_test(cx);
9436 cx.executor().allow_parking();
9437
9438 const IGNORE_RULE: &str = "**/target";
9439
9440 let root = TempTree::new(json!({
9441 "project": {
9442 "src": {
9443 "main.rs": "fn main() {}"
9444 },
9445 "target": {
9446 "debug": {
9447 "important_text.txt": "important text",
9448 },
9449 },
9450 ".gitignore": IGNORE_RULE
9451 },
9452
9453 }));
9454 let root_path = root.path();
9455
9456 // Set up git repository before creating the worktree.
9457 let work_dir = root.path().join("project");
9458 let repo = git_init(work_dir.as_path());
9459 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9460 git_add("src/main.rs", &repo);
9461 git_add(".gitignore", &repo);
9462 git_commit("Initial commit", &repo);
9463
9464 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9465 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9466 let project_events = Arc::new(Mutex::new(Vec::new()));
9467 project.update(cx, |project, cx| {
9468 let repo_events = repository_updates.clone();
9469 cx.subscribe(project.git_store(), move |_, _, e, _| {
9470 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9471 repo_events.lock().push(e.clone());
9472 }
9473 })
9474 .detach();
9475 let project_events = project_events.clone();
9476 cx.subscribe_self(move |_, e, _| {
9477 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9478 project_events.lock().extend(
9479 updates
9480 .iter()
9481 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9482 .filter(|(path, _)| path != "fs-event-sentinel"),
9483 );
9484 }
9485 })
9486 .detach();
9487 });
9488
9489 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9490 tree.flush_fs_events(cx).await;
9491 tree.update(cx, |tree, cx| {
9492 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9493 })
9494 .await
9495 .unwrap();
9496 tree.update(cx, |tree, _| {
9497 assert_eq!(
9498 tree.entries(true, 0)
9499 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9500 .collect::<Vec<_>>(),
9501 vec![
9502 (rel_path(""), false),
9503 (rel_path("project/"), false),
9504 (rel_path("project/.gitignore"), false),
9505 (rel_path("project/src"), false),
9506 (rel_path("project/src/main.rs"), false),
9507 (rel_path("project/target"), true),
9508 (rel_path("project/target/debug"), true),
9509 (rel_path("project/target/debug/important_text.txt"), true),
9510 ]
9511 );
9512 });
9513
9514 assert_eq!(
9515 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9516 vec![
9517 RepositoryEvent::StatusesChanged { full_scan: true },
9518 RepositoryEvent::MergeHeadsChanged,
9519 ],
9520 "Initial worktree scan should produce a repo update event"
9521 );
9522 assert_eq!(
9523 project_events.lock().drain(..).collect::<Vec<_>>(),
9524 vec![
9525 ("project/target".to_string(), PathChange::Loaded),
9526 ("project/target/debug".to_string(), PathChange::Loaded),
9527 (
9528 "project/target/debug/important_text.txt".to_string(),
9529 PathChange::Loaded
9530 ),
9531 ],
9532 "Initial project changes should show that all not-ignored and all opened files are loaded"
9533 );
9534
9535 let deps_dir = work_dir.join("target").join("debug").join("deps");
9536 std::fs::create_dir_all(&deps_dir).unwrap();
9537 tree.flush_fs_events(cx).await;
9538 project
9539 .update(cx, |project, cx| project.git_scans_complete(cx))
9540 .await;
9541 cx.executor().run_until_parked();
9542 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9543 tree.flush_fs_events(cx).await;
9544 project
9545 .update(cx, |project, cx| project.git_scans_complete(cx))
9546 .await;
9547 cx.executor().run_until_parked();
9548 std::fs::remove_dir_all(&deps_dir).unwrap();
9549 tree.flush_fs_events(cx).await;
9550 project
9551 .update(cx, |project, cx| project.git_scans_complete(cx))
9552 .await;
9553 cx.executor().run_until_parked();
9554
9555 tree.update(cx, |tree, _| {
9556 assert_eq!(
9557 tree.entries(true, 0)
9558 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9559 .collect::<Vec<_>>(),
9560 vec![
9561 (rel_path(""), false),
9562 (rel_path("project/"), false),
9563 (rel_path("project/.gitignore"), false),
9564 (rel_path("project/src"), false),
9565 (rel_path("project/src/main.rs"), false),
9566 (rel_path("project/target"), true),
9567 (rel_path("project/target/debug"), true),
9568 (rel_path("project/target/debug/important_text.txt"), true),
9569 ],
9570 "No stray temp files should be left after the flycheck changes"
9571 );
9572 });
9573
9574 assert_eq!(
9575 repository_updates
9576 .lock()
9577 .iter()
9578 .cloned()
9579 .collect::<Vec<_>>(),
9580 Vec::new(),
9581 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9582 );
9583 assert_eq!(
9584 project_events.lock().as_slice(),
9585 vec![
9586 ("project/target/debug/deps".to_string(), PathChange::Added),
9587 ("project/target/debug/deps".to_string(), PathChange::Removed),
9588 ],
9589 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9590 No updates for more nested directories should happen as those are ignored",
9591 );
9592}
9593
9594#[gpui::test]
9595async fn test_odd_events_for_ignored_dirs(
9596 executor: BackgroundExecutor,
9597 cx: &mut gpui::TestAppContext,
9598) {
9599 init_test(cx);
9600 let fs = FakeFs::new(executor);
9601 fs.insert_tree(
9602 path!("/root"),
9603 json!({
9604 ".git": {},
9605 ".gitignore": "**/target/",
9606 "src": {
9607 "main.rs": "fn main() {}",
9608 },
9609 "target": {
9610 "debug": {
9611 "foo.txt": "foo",
9612 "deps": {}
9613 }
9614 }
9615 }),
9616 )
9617 .await;
9618 fs.set_head_and_index_for_repo(
9619 path!("/root/.git").as_ref(),
9620 &[
9621 (".gitignore", "**/target/".into()),
9622 ("src/main.rs", "fn main() {}".into()),
9623 ],
9624 );
9625
9626 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9627 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9628 let project_events = Arc::new(Mutex::new(Vec::new()));
9629 project.update(cx, |project, cx| {
9630 let repository_updates = repository_updates.clone();
9631 cx.subscribe(project.git_store(), move |_, _, e, _| {
9632 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9633 repository_updates.lock().push(e.clone());
9634 }
9635 })
9636 .detach();
9637 let project_events = project_events.clone();
9638 cx.subscribe_self(move |_, e, _| {
9639 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9640 project_events.lock().extend(
9641 updates
9642 .iter()
9643 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9644 .filter(|(path, _)| path != "fs-event-sentinel"),
9645 );
9646 }
9647 })
9648 .detach();
9649 });
9650
9651 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9652 tree.update(cx, |tree, cx| {
9653 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9654 })
9655 .await
9656 .unwrap();
9657 tree.flush_fs_events(cx).await;
9658 project
9659 .update(cx, |project, cx| project.git_scans_complete(cx))
9660 .await;
9661 cx.run_until_parked();
9662 tree.update(cx, |tree, _| {
9663 assert_eq!(
9664 tree.entries(true, 0)
9665 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9666 .collect::<Vec<_>>(),
9667 vec![
9668 (rel_path(""), false),
9669 (rel_path(".gitignore"), false),
9670 (rel_path("src"), false),
9671 (rel_path("src/main.rs"), false),
9672 (rel_path("target"), true),
9673 (rel_path("target/debug"), true),
9674 (rel_path("target/debug/deps"), true),
9675 (rel_path("target/debug/foo.txt"), true),
9676 ]
9677 );
9678 });
9679
9680 assert_eq!(
9681 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9682 vec![
9683 RepositoryEvent::MergeHeadsChanged,
9684 RepositoryEvent::BranchChanged,
9685 RepositoryEvent::StatusesChanged { full_scan: false },
9686 RepositoryEvent::StatusesChanged { full_scan: false },
9687 ],
9688 "Initial worktree scan should produce a repo update event"
9689 );
9690 assert_eq!(
9691 project_events.lock().drain(..).collect::<Vec<_>>(),
9692 vec![
9693 ("target".to_string(), PathChange::Loaded),
9694 ("target/debug".to_string(), PathChange::Loaded),
9695 ("target/debug/deps".to_string(), PathChange::Loaded),
9696 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9697 ],
9698 "All non-ignored entries and all opened firs should be getting a project event",
9699 );
9700
9701 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9702 // This may happen multiple times during a single flycheck, but once is enough for testing.
9703 fs.emit_fs_event("/root/target/debug/deps", None);
9704 tree.flush_fs_events(cx).await;
9705 project
9706 .update(cx, |project, cx| project.git_scans_complete(cx))
9707 .await;
9708 cx.executor().run_until_parked();
9709
9710 assert_eq!(
9711 repository_updates
9712 .lock()
9713 .iter()
9714 .cloned()
9715 .collect::<Vec<_>>(),
9716 Vec::new(),
9717 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9718 );
9719 assert_eq!(
9720 project_events.lock().as_slice(),
9721 Vec::new(),
9722 "No further project events should happen, as only ignored dirs received FS events",
9723 );
9724}
9725
9726#[gpui::test]
9727async fn test_repos_in_invisible_worktrees(
9728 executor: BackgroundExecutor,
9729 cx: &mut gpui::TestAppContext,
9730) {
9731 init_test(cx);
9732 let fs = FakeFs::new(executor);
9733 fs.insert_tree(
9734 path!("/root"),
9735 json!({
9736 "dir1": {
9737 ".git": {},
9738 "dep1": {
9739 ".git": {},
9740 "src": {
9741 "a.txt": "",
9742 },
9743 },
9744 "b.txt": "",
9745 },
9746 }),
9747 )
9748 .await;
9749
9750 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9751 let _visible_worktree =
9752 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9753 project
9754 .update(cx, |project, cx| project.git_scans_complete(cx))
9755 .await;
9756
9757 let repos = project.read_with(cx, |project, cx| {
9758 project
9759 .repositories(cx)
9760 .values()
9761 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9762 .collect::<Vec<_>>()
9763 });
9764 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9765
9766 let (_invisible_worktree, _) = project
9767 .update(cx, |project, cx| {
9768 project.worktree_store.update(cx, |worktree_store, cx| {
9769 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9770 })
9771 })
9772 .await
9773 .expect("failed to create worktree");
9774 project
9775 .update(cx, |project, cx| project.git_scans_complete(cx))
9776 .await;
9777
9778 let repos = project.read_with(cx, |project, cx| {
9779 project
9780 .repositories(cx)
9781 .values()
9782 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9783 .collect::<Vec<_>>()
9784 });
9785 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9786}
9787
9788#[gpui::test(iterations = 10)]
9789async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9790 init_test(cx);
9791 cx.update(|cx| {
9792 cx.update_global::<SettingsStore, _>(|store, cx| {
9793 store.update_user_settings(cx, |settings| {
9794 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9795 });
9796 });
9797 });
9798 let fs = FakeFs::new(cx.background_executor.clone());
9799 fs.insert_tree(
9800 path!("/root"),
9801 json!({
9802 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9803 "tree": {
9804 ".git": {},
9805 ".gitignore": "ignored-dir\n",
9806 "tracked-dir": {
9807 "tracked-file1": "",
9808 "ancestor-ignored-file1": "",
9809 },
9810 "ignored-dir": {
9811 "ignored-file1": ""
9812 }
9813 }
9814 }),
9815 )
9816 .await;
9817 fs.set_head_and_index_for_repo(
9818 path!("/root/tree/.git").as_ref(),
9819 &[
9820 (".gitignore", "ignored-dir\n".into()),
9821 ("tracked-dir/tracked-file1", "".into()),
9822 ],
9823 );
9824
9825 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9826
9827 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9828 tree.flush_fs_events(cx).await;
9829 project
9830 .update(cx, |project, cx| project.git_scans_complete(cx))
9831 .await;
9832 cx.executor().run_until_parked();
9833
9834 let repository = project.read_with(cx, |project, cx| {
9835 project.repositories(cx).values().next().unwrap().clone()
9836 });
9837
9838 tree.read_with(cx, |tree, _| {
9839 tree.as_local()
9840 .unwrap()
9841 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9842 })
9843 .recv()
9844 .await;
9845
9846 cx.read(|cx| {
9847 assert_entry_git_state(
9848 tree.read(cx),
9849 repository.read(cx),
9850 "tracked-dir/tracked-file1",
9851 None,
9852 false,
9853 );
9854 assert_entry_git_state(
9855 tree.read(cx),
9856 repository.read(cx),
9857 "tracked-dir/ancestor-ignored-file1",
9858 None,
9859 false,
9860 );
9861 assert_entry_git_state(
9862 tree.read(cx),
9863 repository.read(cx),
9864 "ignored-dir/ignored-file1",
9865 None,
9866 true,
9867 );
9868 });
9869
9870 fs.create_file(
9871 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9872 Default::default(),
9873 )
9874 .await
9875 .unwrap();
9876 fs.set_index_for_repo(
9877 path!("/root/tree/.git").as_ref(),
9878 &[
9879 (".gitignore", "ignored-dir\n".into()),
9880 ("tracked-dir/tracked-file1", "".into()),
9881 ("tracked-dir/tracked-file2", "".into()),
9882 ],
9883 );
9884 fs.create_file(
9885 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9886 Default::default(),
9887 )
9888 .await
9889 .unwrap();
9890 fs.create_file(
9891 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9892 Default::default(),
9893 )
9894 .await
9895 .unwrap();
9896
9897 cx.executor().run_until_parked();
9898 cx.read(|cx| {
9899 assert_entry_git_state(
9900 tree.read(cx),
9901 repository.read(cx),
9902 "tracked-dir/tracked-file2",
9903 Some(StatusCode::Added),
9904 false,
9905 );
9906 assert_entry_git_state(
9907 tree.read(cx),
9908 repository.read(cx),
9909 "tracked-dir/ancestor-ignored-file2",
9910 None,
9911 false,
9912 );
9913 assert_entry_git_state(
9914 tree.read(cx),
9915 repository.read(cx),
9916 "ignored-dir/ignored-file2",
9917 None,
9918 true,
9919 );
9920 assert!(
9921 tree.read(cx)
9922 .entry_for_path(&rel_path(".git"))
9923 .unwrap()
9924 .is_ignored
9925 );
9926 });
9927}
9928
9929#[gpui::test]
9930async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9931 init_test(cx);
9932
9933 let fs = FakeFs::new(cx.executor());
9934 fs.insert_tree(
9935 path!("/project"),
9936 json!({
9937 ".git": {
9938 "worktrees": {
9939 "some-worktree": {
9940 "commondir": "../..\n",
9941 // For is_git_dir
9942 "HEAD": "",
9943 "config": ""
9944 }
9945 },
9946 "modules": {
9947 "subdir": {
9948 "some-submodule": {
9949 // For is_git_dir
9950 "HEAD": "",
9951 "config": "",
9952 }
9953 }
9954 }
9955 },
9956 "src": {
9957 "a.txt": "A",
9958 },
9959 "some-worktree": {
9960 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9961 "src": {
9962 "b.txt": "B",
9963 }
9964 },
9965 "subdir": {
9966 "some-submodule": {
9967 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9968 "c.txt": "C",
9969 }
9970 }
9971 }),
9972 )
9973 .await;
9974
9975 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9976 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9977 scan_complete.await;
9978
9979 let mut repositories = project.update(cx, |project, cx| {
9980 project
9981 .repositories(cx)
9982 .values()
9983 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9984 .collect::<Vec<_>>()
9985 });
9986 repositories.sort();
9987 pretty_assertions::assert_eq!(
9988 repositories,
9989 [
9990 Path::new(path!("/project")).into(),
9991 Path::new(path!("/project/some-worktree")).into(),
9992 Path::new(path!("/project/subdir/some-submodule")).into(),
9993 ]
9994 );
9995
9996 // Generate a git-related event for the worktree and check that it's refreshed.
9997 fs.with_git_state(
9998 path!("/project/some-worktree/.git").as_ref(),
9999 true,
10000 |state| {
10001 state
10002 .head_contents
10003 .insert(repo_path("src/b.txt"), "b".to_owned());
10004 state
10005 .index_contents
10006 .insert(repo_path("src/b.txt"), "b".to_owned());
10007 },
10008 )
10009 .unwrap();
10010 cx.run_until_parked();
10011
10012 let buffer = project
10013 .update(cx, |project, cx| {
10014 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
10015 })
10016 .await
10017 .unwrap();
10018 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10019 let (repo, _) = project
10020 .git_store()
10021 .read(cx)
10022 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10023 .unwrap();
10024 pretty_assertions::assert_eq!(
10025 repo.read(cx).work_directory_abs_path,
10026 Path::new(path!("/project/some-worktree")).into(),
10027 );
10028 let barrier = repo.update(cx, |repo, _| repo.barrier());
10029 (repo.clone(), barrier)
10030 });
10031 barrier.await.unwrap();
10032 worktree_repo.update(cx, |repo, _| {
10033 pretty_assertions::assert_eq!(
10034 repo.status_for_path(&repo_path("src/b.txt"))
10035 .unwrap()
10036 .status,
10037 StatusCode::Modified.worktree(),
10038 );
10039 });
10040
10041 // The same for the submodule.
10042 fs.with_git_state(
10043 path!("/project/subdir/some-submodule/.git").as_ref(),
10044 true,
10045 |state| {
10046 state
10047 .head_contents
10048 .insert(repo_path("c.txt"), "c".to_owned());
10049 state
10050 .index_contents
10051 .insert(repo_path("c.txt"), "c".to_owned());
10052 },
10053 )
10054 .unwrap();
10055 cx.run_until_parked();
10056
10057 let buffer = project
10058 .update(cx, |project, cx| {
10059 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10060 })
10061 .await
10062 .unwrap();
10063 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10064 let (repo, _) = project
10065 .git_store()
10066 .read(cx)
10067 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10068 .unwrap();
10069 pretty_assertions::assert_eq!(
10070 repo.read(cx).work_directory_abs_path,
10071 Path::new(path!("/project/subdir/some-submodule")).into(),
10072 );
10073 let barrier = repo.update(cx, |repo, _| repo.barrier());
10074 (repo.clone(), barrier)
10075 });
10076 barrier.await.unwrap();
10077 submodule_repo.update(cx, |repo, _| {
10078 pretty_assertions::assert_eq!(
10079 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10080 StatusCode::Modified.worktree(),
10081 );
10082 });
10083}
10084
10085#[gpui::test]
10086async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10087 init_test(cx);
10088 let fs = FakeFs::new(cx.background_executor.clone());
10089 fs.insert_tree(
10090 path!("/root"),
10091 json!({
10092 "project": {
10093 ".git": {},
10094 "child1": {
10095 "a.txt": "A",
10096 },
10097 "child2": {
10098 "b.txt": "B",
10099 }
10100 }
10101 }),
10102 )
10103 .await;
10104
10105 let project = Project::test(
10106 fs.clone(),
10107 [
10108 path!("/root/project/child1").as_ref(),
10109 path!("/root/project/child2").as_ref(),
10110 ],
10111 cx,
10112 )
10113 .await;
10114
10115 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10116 tree.flush_fs_events(cx).await;
10117 project
10118 .update(cx, |project, cx| project.git_scans_complete(cx))
10119 .await;
10120 cx.executor().run_until_parked();
10121
10122 let repos = project.read_with(cx, |project, cx| {
10123 project
10124 .repositories(cx)
10125 .values()
10126 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10127 .collect::<Vec<_>>()
10128 });
10129 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10130}
10131
10132#[gpui::test]
10133async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10134 init_test(cx);
10135
10136 let file_1_committed = String::from(r#"file_1_committed"#);
10137 let file_1_staged = String::from(r#"file_1_staged"#);
10138 let file_2_committed = String::from(r#"file_2_committed"#);
10139 let file_2_staged = String::from(r#"file_2_staged"#);
10140 let buffer_contents = String::from(r#"buffer"#);
10141
10142 let fs = FakeFs::new(cx.background_executor.clone());
10143 fs.insert_tree(
10144 path!("/dir"),
10145 json!({
10146 ".git": {},
10147 "src": {
10148 "file_1.rs": file_1_committed.clone(),
10149 "file_2.rs": file_2_committed.clone(),
10150 }
10151 }),
10152 )
10153 .await;
10154
10155 fs.set_head_for_repo(
10156 path!("/dir/.git").as_ref(),
10157 &[
10158 ("src/file_1.rs", file_1_committed.clone()),
10159 ("src/file_2.rs", file_2_committed.clone()),
10160 ],
10161 "deadbeef",
10162 );
10163 fs.set_index_for_repo(
10164 path!("/dir/.git").as_ref(),
10165 &[
10166 ("src/file_1.rs", file_1_staged.clone()),
10167 ("src/file_2.rs", file_2_staged.clone()),
10168 ],
10169 );
10170
10171 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10172
10173 let buffer = project
10174 .update(cx, |project, cx| {
10175 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10176 })
10177 .await
10178 .unwrap();
10179
10180 buffer.update(cx, |buffer, cx| {
10181 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10182 });
10183
10184 let unstaged_diff = project
10185 .update(cx, |project, cx| {
10186 project.open_unstaged_diff(buffer.clone(), cx)
10187 })
10188 .await
10189 .unwrap();
10190
10191 cx.run_until_parked();
10192
10193 unstaged_diff.update(cx, |unstaged_diff, _cx| {
10194 let base_text = unstaged_diff.base_text_string().unwrap();
10195 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10196 });
10197
10198 // Save the buffer as `file_2.rs`, which should trigger the
10199 // `BufferChangedFilePath` event.
10200 project
10201 .update(cx, |project, cx| {
10202 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10203 let path = ProjectPath {
10204 worktree_id,
10205 path: rel_path("src/file_2.rs").into(),
10206 };
10207 project.save_buffer_as(buffer.clone(), path, cx)
10208 })
10209 .await
10210 .unwrap();
10211
10212 cx.run_until_parked();
10213
10214 // Verify that the diff bases have been updated to file_2's contents due to
10215 // the `BufferChangedFilePath` event being handled.
10216 unstaged_diff.update(cx, |unstaged_diff, cx| {
10217 let snapshot = buffer.read(cx).snapshot();
10218 let base_text = unstaged_diff.base_text_string().unwrap();
10219 assert_eq!(
10220 base_text, file_2_staged,
10221 "Diff bases should be automatically updated to file_2 staged content"
10222 );
10223
10224 let hunks: Vec<_> = unstaged_diff.hunks(&snapshot, cx).collect();
10225 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10226 });
10227
10228 let uncommitted_diff = project
10229 .update(cx, |project, cx| {
10230 project.open_uncommitted_diff(buffer.clone(), cx)
10231 })
10232 .await
10233 .unwrap();
10234
10235 cx.run_until_parked();
10236
10237 uncommitted_diff.update(cx, |uncommitted_diff, _cx| {
10238 let base_text = uncommitted_diff.base_text_string().unwrap();
10239 assert_eq!(
10240 base_text, file_2_committed,
10241 "Uncommitted diff should compare against file_2 committed content"
10242 );
10243 });
10244}
10245
10246async fn search(
10247 project: &Entity<Project>,
10248 query: SearchQuery,
10249 cx: &mut gpui::TestAppContext,
10250) -> Result<HashMap<String, Vec<Range<usize>>>> {
10251 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10252 let mut results = HashMap::default();
10253 while let Ok(search_result) = search_rx.recv().await {
10254 match search_result {
10255 SearchResult::Buffer { buffer, ranges } => {
10256 results.entry(buffer).or_insert(ranges);
10257 }
10258 SearchResult::LimitReached => {}
10259 }
10260 }
10261 Ok(results
10262 .into_iter()
10263 .map(|(buffer, ranges)| {
10264 buffer.update(cx, |buffer, cx| {
10265 let path = buffer
10266 .file()
10267 .unwrap()
10268 .full_path(cx)
10269 .to_string_lossy()
10270 .to_string();
10271 let ranges = ranges
10272 .into_iter()
10273 .map(|range| range.to_offset(buffer))
10274 .collect::<Vec<_>>();
10275 (path, ranges)
10276 })
10277 })
10278 .collect())
10279}
10280
10281pub fn init_test(cx: &mut gpui::TestAppContext) {
10282 zlog::init_test();
10283
10284 cx.update(|cx| {
10285 let settings_store = SettingsStore::test(cx);
10286 cx.set_global(settings_store);
10287 release_channel::init(SemanticVersion::default(), cx);
10288 });
10289}
10290
10291fn json_lang() -> Arc<Language> {
10292 Arc::new(Language::new(
10293 LanguageConfig {
10294 name: "JSON".into(),
10295 matcher: LanguageMatcher {
10296 path_suffixes: vec!["json".to_string()],
10297 ..Default::default()
10298 },
10299 ..Default::default()
10300 },
10301 None,
10302 ))
10303}
10304
10305fn js_lang() -> Arc<Language> {
10306 Arc::new(Language::new(
10307 LanguageConfig {
10308 name: "JavaScript".into(),
10309 matcher: LanguageMatcher {
10310 path_suffixes: vec!["js".to_string()],
10311 ..Default::default()
10312 },
10313 ..Default::default()
10314 },
10315 None,
10316 ))
10317}
10318
10319fn rust_lang() -> Arc<Language> {
10320 Arc::new(Language::new(
10321 LanguageConfig {
10322 name: "Rust".into(),
10323 matcher: LanguageMatcher {
10324 path_suffixes: vec!["rs".to_string()],
10325 ..Default::default()
10326 },
10327 ..Default::default()
10328 },
10329 Some(tree_sitter_rust::LANGUAGE.into()),
10330 ))
10331}
10332
10333fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10334 struct PythonMootToolchainLister(Arc<FakeFs>);
10335 #[async_trait]
10336 impl ToolchainLister for PythonMootToolchainLister {
10337 async fn list(
10338 &self,
10339 worktree_root: PathBuf,
10340 subroot_relative_path: Arc<RelPath>,
10341 _: Option<HashMap<String, String>>,
10342 _: &dyn Fs,
10343 ) -> ToolchainList {
10344 // This lister will always return a path .venv directories within ancestors
10345 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10346 let mut toolchains = vec![];
10347 for ancestor in ancestors {
10348 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10349 if self.0.is_dir(&venv_path).await {
10350 toolchains.push(Toolchain {
10351 name: SharedString::new("Python Venv"),
10352 path: venv_path.to_string_lossy().into_owned().into(),
10353 language_name: LanguageName(SharedString::new_static("Python")),
10354 as_json: serde_json::Value::Null,
10355 })
10356 }
10357 }
10358 ToolchainList {
10359 toolchains,
10360 ..Default::default()
10361 }
10362 }
10363 async fn resolve(
10364 &self,
10365 _: PathBuf,
10366 _: Option<HashMap<String, String>>,
10367 _: &dyn Fs,
10368 ) -> anyhow::Result<Toolchain> {
10369 Err(anyhow::anyhow!("Not implemented"))
10370 }
10371 fn meta(&self) -> ToolchainMetadata {
10372 ToolchainMetadata {
10373 term: SharedString::new_static("Virtual Environment"),
10374 new_toolchain_placeholder: SharedString::new_static(
10375 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10376 ),
10377 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10378 }
10379 }
10380 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10381 vec![]
10382 }
10383 }
10384 Arc::new(
10385 Language::new(
10386 LanguageConfig {
10387 name: "Python".into(),
10388 matcher: LanguageMatcher {
10389 path_suffixes: vec!["py".to_string()],
10390 ..Default::default()
10391 },
10392 ..Default::default()
10393 },
10394 None, // We're not testing Python parsing with this language.
10395 )
10396 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10397 "pyproject.toml",
10398 ))))
10399 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10400 )
10401}
10402
10403fn typescript_lang() -> Arc<Language> {
10404 Arc::new(Language::new(
10405 LanguageConfig {
10406 name: "TypeScript".into(),
10407 matcher: LanguageMatcher {
10408 path_suffixes: vec!["ts".to_string()],
10409 ..Default::default()
10410 },
10411 ..Default::default()
10412 },
10413 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10414 ))
10415}
10416
10417fn tsx_lang() -> Arc<Language> {
10418 Arc::new(Language::new(
10419 LanguageConfig {
10420 name: "tsx".into(),
10421 matcher: LanguageMatcher {
10422 path_suffixes: vec!["tsx".to_string()],
10423 ..Default::default()
10424 },
10425 ..Default::default()
10426 },
10427 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10428 ))
10429}
10430
10431fn get_all_tasks(
10432 project: &Entity<Project>,
10433 task_contexts: Arc<TaskContexts>,
10434 cx: &mut App,
10435) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10436 let new_tasks = project.update(cx, |project, cx| {
10437 project.task_store.update(cx, |task_store, cx| {
10438 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10439 this.used_and_current_resolved_tasks(task_contexts, cx)
10440 })
10441 })
10442 });
10443
10444 cx.background_spawn(async move {
10445 let (mut old, new) = new_tasks.await;
10446 old.extend(new);
10447 old
10448 })
10449}
10450
10451#[track_caller]
10452fn assert_entry_git_state(
10453 tree: &Worktree,
10454 repository: &Repository,
10455 path: &str,
10456 index_status: Option<StatusCode>,
10457 is_ignored: bool,
10458) {
10459 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10460 let entry = tree
10461 .entry_for_path(&rel_path(path))
10462 .unwrap_or_else(|| panic!("entry {path} not found"));
10463 let status = repository
10464 .status_for_path(&repo_path(path))
10465 .map(|entry| entry.status);
10466 let expected = index_status.map(|index_status| {
10467 TrackedStatus {
10468 index_status,
10469 worktree_status: StatusCode::Unmodified,
10470 }
10471 .into()
10472 });
10473 assert_eq!(
10474 status, expected,
10475 "expected {path} to have git status: {expected:?}"
10476 );
10477 assert_eq!(
10478 entry.is_ignored, is_ignored,
10479 "expected {path} to have is_ignored: {is_ignored}"
10480 );
10481}
10482
10483#[track_caller]
10484fn git_init(path: &Path) -> git2::Repository {
10485 let mut init_opts = RepositoryInitOptions::new();
10486 init_opts.initial_head("main");
10487 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10488}
10489
10490#[track_caller]
10491fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10492 let path = path.as_ref();
10493 let mut index = repo.index().expect("Failed to get index");
10494 index.add_path(path).expect("Failed to add file");
10495 index.write().expect("Failed to write index");
10496}
10497
10498#[track_caller]
10499fn git_remove_index(path: &Path, repo: &git2::Repository) {
10500 let mut index = repo.index().expect("Failed to get index");
10501 index.remove_path(path).expect("Failed to add file");
10502 index.write().expect("Failed to write index");
10503}
10504
10505#[track_caller]
10506fn git_commit(msg: &'static str, repo: &git2::Repository) {
10507 use git2::Signature;
10508
10509 let signature = Signature::now("test", "test@zed.dev").unwrap();
10510 let oid = repo.index().unwrap().write_tree().unwrap();
10511 let tree = repo.find_tree(oid).unwrap();
10512 if let Ok(head) = repo.head() {
10513 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10514
10515 let parent_commit = parent_obj.as_commit().unwrap();
10516
10517 repo.commit(
10518 Some("HEAD"),
10519 &signature,
10520 &signature,
10521 msg,
10522 &tree,
10523 &[parent_commit],
10524 )
10525 .expect("Failed to commit with parent");
10526 } else {
10527 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10528 .expect("Failed to commit");
10529 }
10530}
10531
10532#[cfg(any())]
10533#[track_caller]
10534fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10535 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10536}
10537
10538#[track_caller]
10539fn git_stash(repo: &mut git2::Repository) {
10540 use git2::Signature;
10541
10542 let signature = Signature::now("test", "test@zed.dev").unwrap();
10543 repo.stash_save(&signature, "N/A", None)
10544 .expect("Failed to stash");
10545}
10546
10547#[track_caller]
10548fn git_reset(offset: usize, repo: &git2::Repository) {
10549 let head = repo.head().expect("Couldn't get repo head");
10550 let object = head.peel(git2::ObjectType::Commit).unwrap();
10551 let commit = object.as_commit().unwrap();
10552 let new_head = commit
10553 .parents()
10554 .inspect(|parnet| {
10555 parnet.message();
10556 })
10557 .nth(offset)
10558 .expect("Not enough history");
10559 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10560 .expect("Could not reset");
10561}
10562
10563#[cfg(any())]
10564#[track_caller]
10565fn git_branch(name: &str, repo: &git2::Repository) {
10566 let head = repo
10567 .head()
10568 .expect("Couldn't get repo head")
10569 .peel_to_commit()
10570 .expect("HEAD is not a commit");
10571 repo.branch(name, &head, false).expect("Failed to commit");
10572}
10573
10574#[cfg(any())]
10575#[track_caller]
10576fn git_checkout(name: &str, repo: &git2::Repository) {
10577 repo.set_head(name).expect("Failed to set head");
10578 repo.checkout_head(None).expect("Failed to check out head");
10579}
10580
10581#[cfg(any())]
10582#[track_caller]
10583fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10584 repo.statuses(None)
10585 .unwrap()
10586 .iter()
10587 .map(|status| (status.path().unwrap().to_string(), status.status()))
10588 .collect()
10589}
10590
10591#[gpui::test]
10592async fn test_find_project_path_abs(
10593 background_executor: BackgroundExecutor,
10594 cx: &mut gpui::TestAppContext,
10595) {
10596 // find_project_path should work with absolute paths
10597 init_test(cx);
10598
10599 let fs = FakeFs::new(background_executor);
10600 fs.insert_tree(
10601 path!("/root"),
10602 json!({
10603 "project1": {
10604 "file1.txt": "content1",
10605 "subdir": {
10606 "file2.txt": "content2"
10607 }
10608 },
10609 "project2": {
10610 "file3.txt": "content3"
10611 }
10612 }),
10613 )
10614 .await;
10615
10616 let project = Project::test(
10617 fs.clone(),
10618 [
10619 path!("/root/project1").as_ref(),
10620 path!("/root/project2").as_ref(),
10621 ],
10622 cx,
10623 )
10624 .await;
10625
10626 // Make sure the worktrees are fully initialized
10627 project
10628 .update(cx, |project, cx| project.git_scans_complete(cx))
10629 .await;
10630 cx.run_until_parked();
10631
10632 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10633 project.read_with(cx, |project, cx| {
10634 let worktrees: Vec<_> = project.worktrees(cx).collect();
10635 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10636 let id1 = worktrees[0].read(cx).id();
10637 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10638 let id2 = worktrees[1].read(cx).id();
10639 (abs_path1, id1, abs_path2, id2)
10640 });
10641
10642 project.update(cx, |project, cx| {
10643 let abs_path = project1_abs_path.join("file1.txt");
10644 let found_path = project.find_project_path(abs_path, cx).unwrap();
10645 assert_eq!(found_path.worktree_id, project1_id);
10646 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10647
10648 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10649 let found_path = project.find_project_path(abs_path, cx).unwrap();
10650 assert_eq!(found_path.worktree_id, project1_id);
10651 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10652
10653 let abs_path = project2_abs_path.join("file3.txt");
10654 let found_path = project.find_project_path(abs_path, cx).unwrap();
10655 assert_eq!(found_path.worktree_id, project2_id);
10656 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10657
10658 let abs_path = project1_abs_path.join("nonexistent.txt");
10659 let found_path = project.find_project_path(abs_path, cx);
10660 assert!(
10661 found_path.is_some(),
10662 "Should find project path for nonexistent file in worktree"
10663 );
10664
10665 // Test with an absolute path outside any worktree
10666 let abs_path = Path::new("/some/other/path");
10667 let found_path = project.find_project_path(abs_path, cx);
10668 assert!(
10669 found_path.is_none(),
10670 "Should not find project path for path outside any worktree"
10671 );
10672 });
10673}
10674
10675#[gpui::test]
10676async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10677 init_test(cx);
10678
10679 let fs = FakeFs::new(cx.executor());
10680 fs.insert_tree(
10681 path!("/root"),
10682 json!({
10683 "a": {
10684 ".git": {},
10685 "src": {
10686 "main.rs": "fn main() {}",
10687 }
10688 },
10689 "b": {
10690 ".git": {},
10691 "src": {
10692 "main.rs": "fn main() {}",
10693 },
10694 "script": {
10695 "run.sh": "#!/bin/bash"
10696 }
10697 }
10698 }),
10699 )
10700 .await;
10701
10702 let project = Project::test(
10703 fs.clone(),
10704 [
10705 path!("/root/a").as_ref(),
10706 path!("/root/b/script").as_ref(),
10707 path!("/root/b").as_ref(),
10708 ],
10709 cx,
10710 )
10711 .await;
10712 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10713 scan_complete.await;
10714
10715 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10716 assert_eq!(worktrees.len(), 3);
10717
10718 let worktree_id_by_abs_path = worktrees
10719 .into_iter()
10720 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10721 .collect::<HashMap<_, _>>();
10722 let worktree_id = worktree_id_by_abs_path
10723 .get(Path::new(path!("/root/b/script")))
10724 .unwrap();
10725
10726 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10727 assert_eq!(repos.len(), 2);
10728
10729 project.update(cx, |project, cx| {
10730 project.remove_worktree(*worktree_id, cx);
10731 });
10732 cx.run_until_parked();
10733
10734 let mut repo_paths = project
10735 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10736 .values()
10737 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10738 .collect::<Vec<_>>();
10739 repo_paths.sort();
10740
10741 pretty_assertions::assert_eq!(
10742 repo_paths,
10743 [
10744 Path::new(path!("/root/a")).into(),
10745 Path::new(path!("/root/b")).into(),
10746 ]
10747 );
10748
10749 let active_repo_path = project
10750 .read_with(cx, |p, cx| {
10751 p.active_repository(cx)
10752 .map(|r| r.read(cx).work_directory_abs_path.clone())
10753 })
10754 .unwrap();
10755 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10756
10757 let worktree_id = worktree_id_by_abs_path
10758 .get(Path::new(path!("/root/a")))
10759 .unwrap();
10760 project.update(cx, |project, cx| {
10761 project.remove_worktree(*worktree_id, cx);
10762 });
10763 cx.run_until_parked();
10764
10765 let active_repo_path = project
10766 .read_with(cx, |p, cx| {
10767 p.active_repository(cx)
10768 .map(|r| r.read(cx).work_directory_abs_path.clone())
10769 })
10770 .unwrap();
10771 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10772
10773 let worktree_id = worktree_id_by_abs_path
10774 .get(Path::new(path!("/root/b")))
10775 .unwrap();
10776 project.update(cx, |project, cx| {
10777 project.remove_worktree(*worktree_id, cx);
10778 });
10779 cx.run_until_parked();
10780
10781 let active_repo_path = project.read_with(cx, |p, cx| {
10782 p.active_repository(cx)
10783 .map(|r| r.read(cx).work_directory_abs_path.clone())
10784 });
10785 assert!(active_repo_path.is_none());
10786}