1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry, pending_op},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, FutureExt, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use sum_tree::SumTree;
54use task::{ResolvedTask, ShellKind, TaskContext};
55use unindent::Unindent as _;
56use util::{
57 TryFutureExt as _, assert_set_eq, maybe, path,
58 paths::PathMatcher,
59 rel_path::rel_path,
60 test::{TempTree, marked_text_offsets},
61 uri,
62};
63use worktree::WorktreeModelHandle as _;
64
65#[gpui::test]
66async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
67 cx.executor().allow_parking();
68
69 let (tx, mut rx) = futures::channel::mpsc::unbounded();
70 let _thread = std::thread::spawn(move || {
71 #[cfg(not(target_os = "windows"))]
72 std::fs::metadata("/tmp").unwrap();
73 #[cfg(target_os = "windows")]
74 std::fs::metadata("C:/Windows").unwrap();
75 std::thread::sleep(Duration::from_millis(1000));
76 tx.unbounded_send(1).unwrap();
77 });
78 rx.next().await.unwrap();
79}
80
81#[gpui::test]
82async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
83 cx.executor().allow_parking();
84
85 let io_task = smol::unblock(move || {
86 println!("sleeping on thread {:?}", std::thread::current().id());
87 std::thread::sleep(Duration::from_millis(10));
88 1
89 });
90
91 let task = cx.foreground_executor().spawn(async move {
92 io_task.await;
93 });
94
95 task.await;
96}
97
98// NOTE:
99// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
100// we assume that they are not supported out of the box.
101#[cfg(not(windows))]
102#[gpui::test]
103async fn test_symlinks(cx: &mut gpui::TestAppContext) {
104 init_test(cx);
105 cx.executor().allow_parking();
106
107 let dir = TempTree::new(json!({
108 "root": {
109 "apple": "",
110 "banana": {
111 "carrot": {
112 "date": "",
113 "endive": "",
114 }
115 },
116 "fennel": {
117 "grape": "",
118 }
119 }
120 }));
121
122 let root_link_path = dir.path().join("root_link");
123 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
124 os::unix::fs::symlink(
125 dir.path().join("root/fennel"),
126 dir.path().join("root/finnochio"),
127 )
128 .unwrap();
129
130 let project = Project::test(
131 Arc::new(RealFs::new(None, cx.executor())),
132 [root_link_path.as_ref()],
133 cx,
134 )
135 .await;
136
137 project.update(cx, |project, cx| {
138 let tree = project.worktrees(cx).next().unwrap().read(cx);
139 assert_eq!(tree.file_count(), 5);
140 assert_eq!(
141 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
142 tree.entry_for_path(rel_path("finnochio/grape"))
143 .unwrap()
144 .inode
145 );
146 });
147}
148
149#[gpui::test]
150async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
151 init_test(cx);
152
153 let dir = TempTree::new(json!({
154 ".editorconfig": r#"
155 root = true
156 [*.rs]
157 indent_style = tab
158 indent_size = 3
159 end_of_line = lf
160 insert_final_newline = true
161 trim_trailing_whitespace = true
162 max_line_length = 120
163 [*.js]
164 tab_width = 10
165 max_line_length = off
166 "#,
167 ".zed": {
168 "settings.json": r#"{
169 "tab_size": 8,
170 "hard_tabs": false,
171 "ensure_final_newline_on_save": false,
172 "remove_trailing_whitespace_on_save": false,
173 "preferred_line_length": 64,
174 "soft_wrap": "editor_width",
175 }"#,
176 },
177 "a.rs": "fn a() {\n A\n}",
178 "b": {
179 ".editorconfig": r#"
180 [*.rs]
181 indent_size = 2
182 max_line_length = off,
183 "#,
184 "b.rs": "fn b() {\n B\n}",
185 },
186 "c.js": "def c\n C\nend",
187 "README.json": "tabs are better\n",
188 }));
189
190 let path = dir.path();
191 let fs = FakeFs::new(cx.executor());
192 fs.insert_tree_from_real_fs(path, path).await;
193 let project = Project::test(fs, [path], cx).await;
194
195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
196 language_registry.add(js_lang());
197 language_registry.add(json_lang());
198 language_registry.add(rust_lang());
199
200 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
201
202 cx.executor().run_until_parked();
203
204 cx.update(|cx| {
205 let tree = worktree.read(cx);
206 let settings_for = |path: &str| {
207 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
208 let file = File::for_entry(file_entry, worktree.clone());
209 let file_language = project
210 .read(cx)
211 .languages()
212 .load_language_for_file_path(file.path.as_std_path());
213 let file_language = cx
214 .background_executor()
215 .block(file_language)
216 .expect("Failed to get file language");
217 let file = file as _;
218 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
219 };
220
221 let settings_a = settings_for("a.rs");
222 let settings_b = settings_for("b/b.rs");
223 let settings_c = settings_for("c.js");
224 let settings_readme = settings_for("README.json");
225
226 // .editorconfig overrides .zed/settings
227 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
228 assert_eq!(settings_a.hard_tabs, true);
229 assert_eq!(settings_a.ensure_final_newline_on_save, true);
230 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
231 assert_eq!(settings_a.preferred_line_length, 120);
232
233 // .editorconfig in b/ overrides .editorconfig in root
234 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
235
236 // "indent_size" is not set, so "tab_width" is used
237 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
238
239 // When max_line_length is "off", default to .zed/settings.json
240 assert_eq!(settings_b.preferred_line_length, 64);
241 assert_eq!(settings_c.preferred_line_length, 64);
242
243 // README.md should not be affected by .editorconfig's globe "*.rs"
244 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
245 });
246}
247
248#[gpui::test]
249async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
250 init_test(cx);
251 cx.update(|cx| {
252 GitHostingProviderRegistry::default_global(cx);
253 git_hosting_providers::init(cx);
254 });
255
256 let fs = FakeFs::new(cx.executor());
257 let str_path = path!("/dir");
258 let path = Path::new(str_path);
259
260 fs.insert_tree(
261 path!("/dir"),
262 json!({
263 ".zed": {
264 "settings.json": r#"{
265 "git_hosting_providers": [
266 {
267 "provider": "gitlab",
268 "base_url": "https://google.com",
269 "name": "foo"
270 }
271 ]
272 }"#
273 },
274 }),
275 )
276 .await;
277
278 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
279 let (_worktree, _) =
280 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
281 cx.executor().run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292
293 fs.atomic_write(
294 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
295 "{}".into(),
296 )
297 .await
298 .unwrap();
299
300 cx.run_until_parked();
301
302 cx.update(|cx| {
303 let provider = GitHostingProviderRegistry::global(cx);
304 assert!(
305 !provider
306 .list_hosting_providers()
307 .into_iter()
308 .any(|provider| provider.name() == "foo")
309 );
310 });
311}
312
313#[gpui::test]
314async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
315 init_test(cx);
316 TaskStore::init(None);
317
318 let fs = FakeFs::new(cx.executor());
319 fs.insert_tree(
320 path!("/dir"),
321 json!({
322 ".zed": {
323 "settings.json": r#"{ "tab_size": 8 }"#,
324 "tasks.json": r#"[{
325 "label": "cargo check all",
326 "command": "cargo",
327 "args": ["check", "--all"]
328 },]"#,
329 },
330 "a": {
331 "a.rs": "fn a() {\n A\n}"
332 },
333 "b": {
334 ".zed": {
335 "settings.json": r#"{ "tab_size": 2 }"#,
336 "tasks.json": r#"[{
337 "label": "cargo check",
338 "command": "cargo",
339 "args": ["check"]
340 },]"#,
341 },
342 "b.rs": "fn b() {\n B\n}"
343 }
344 }),
345 )
346 .await;
347
348 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
349 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
350
351 cx.executor().run_until_parked();
352 let worktree_id = cx.update(|cx| {
353 project.update(cx, |project, cx| {
354 project.worktrees(cx).next().unwrap().read(cx).id()
355 })
356 });
357
358 let mut task_contexts = TaskContexts::default();
359 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
360 let task_contexts = Arc::new(task_contexts);
361
362 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
363 id: worktree_id,
364 directory_in_worktree: rel_path(".zed").into(),
365 id_base: "local worktree tasks from directory \".zed\"".into(),
366 };
367
368 let all_tasks = cx
369 .update(|cx| {
370 let tree = worktree.read(cx);
371
372 let file_a = File::for_entry(
373 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
374 worktree.clone(),
375 ) as _;
376 let settings_a = language_settings(None, Some(&file_a), cx);
377 let file_b = File::for_entry(
378 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
379 worktree.clone(),
380 ) as _;
381 let settings_b = language_settings(None, Some(&file_b), cx);
382
383 assert_eq!(settings_a.tab_size.get(), 8);
384 assert_eq!(settings_b.tab_size.get(), 2);
385
386 get_all_tasks(&project, task_contexts.clone(), cx)
387 })
388 .await
389 .into_iter()
390 .map(|(source_kind, task)| {
391 let resolved = task.resolved;
392 (
393 source_kind,
394 task.resolved_label,
395 resolved.args,
396 resolved.env,
397 )
398 })
399 .collect::<Vec<_>>();
400 assert_eq!(
401 all_tasks,
402 vec![
403 (
404 TaskSourceKind::Worktree {
405 id: worktree_id,
406 directory_in_worktree: rel_path("b/.zed").into(),
407 id_base: "local worktree tasks from directory \"b/.zed\"".into()
408 },
409 "cargo check".to_string(),
410 vec!["check".to_string()],
411 HashMap::default(),
412 ),
413 (
414 topmost_local_task_source_kind.clone(),
415 "cargo check all".to_string(),
416 vec!["check".to_string(), "--all".to_string()],
417 HashMap::default(),
418 ),
419 ]
420 );
421
422 let (_, resolved_task) = cx
423 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
424 .await
425 .into_iter()
426 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
427 .expect("should have one global task");
428 project.update(cx, |project, cx| {
429 let task_inventory = project
430 .task_store
431 .read(cx)
432 .task_inventory()
433 .cloned()
434 .unwrap();
435 task_inventory.update(cx, |inventory, _| {
436 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
437 inventory
438 .update_file_based_tasks(
439 TaskSettingsLocation::Global(tasks_file()),
440 Some(
441 &json!([{
442 "label": "cargo check unstable",
443 "command": "cargo",
444 "args": [
445 "check",
446 "--all",
447 "--all-targets"
448 ],
449 "env": {
450 "RUSTFLAGS": "-Zunstable-options"
451 }
452 }])
453 .to_string(),
454 ),
455 )
456 .unwrap();
457 });
458 });
459 cx.run_until_parked();
460
461 let all_tasks = cx
462 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
463 .await
464 .into_iter()
465 .map(|(source_kind, task)| {
466 let resolved = task.resolved;
467 (
468 source_kind,
469 task.resolved_label,
470 resolved.args,
471 resolved.env,
472 )
473 })
474 .collect::<Vec<_>>();
475 assert_eq!(
476 all_tasks,
477 vec![
478 (
479 topmost_local_task_source_kind.clone(),
480 "cargo check all".to_string(),
481 vec!["check".to_string(), "--all".to_string()],
482 HashMap::default(),
483 ),
484 (
485 TaskSourceKind::Worktree {
486 id: worktree_id,
487 directory_in_worktree: rel_path("b/.zed").into(),
488 id_base: "local worktree tasks from directory \"b/.zed\"".into()
489 },
490 "cargo check".to_string(),
491 vec!["check".to_string()],
492 HashMap::default(),
493 ),
494 (
495 TaskSourceKind::AbsPath {
496 abs_path: paths::tasks_file().clone(),
497 id_base: "global tasks.json".into(),
498 },
499 "cargo check unstable".to_string(),
500 vec![
501 "check".to_string(),
502 "--all".to_string(),
503 "--all-targets".to_string(),
504 ],
505 HashMap::from_iter(Some((
506 "RUSTFLAGS".to_string(),
507 "-Zunstable-options".to_string()
508 ))),
509 ),
510 ]
511 );
512}
513
514#[gpui::test]
515async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
516 init_test(cx);
517 TaskStore::init(None);
518
519 let fs = FakeFs::new(cx.executor());
520 fs.insert_tree(
521 path!("/dir"),
522 json!({
523 ".zed": {
524 "tasks.json": r#"[{
525 "label": "test worktree root",
526 "command": "echo $ZED_WORKTREE_ROOT"
527 }]"#,
528 },
529 "a": {
530 "a.rs": "fn a() {\n A\n}"
531 },
532 }),
533 )
534 .await;
535
536 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
537 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
538
539 cx.executor().run_until_parked();
540 let worktree_id = cx.update(|cx| {
541 project.update(cx, |project, cx| {
542 project.worktrees(cx).next().unwrap().read(cx).id()
543 })
544 });
545
546 let active_non_worktree_item_tasks = cx
547 .update(|cx| {
548 get_all_tasks(
549 &project,
550 Arc::new(TaskContexts {
551 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
552 active_worktree_context: None,
553 other_worktree_contexts: Vec::new(),
554 lsp_task_sources: HashMap::default(),
555 latest_selection: None,
556 }),
557 cx,
558 )
559 })
560 .await;
561 assert!(
562 active_non_worktree_item_tasks.is_empty(),
563 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
564 );
565
566 let active_worktree_tasks = cx
567 .update(|cx| {
568 get_all_tasks(
569 &project,
570 Arc::new(TaskContexts {
571 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
572 active_worktree_context: Some((worktree_id, {
573 let mut worktree_context = TaskContext::default();
574 worktree_context
575 .task_variables
576 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
577 worktree_context
578 })),
579 other_worktree_contexts: Vec::new(),
580 lsp_task_sources: HashMap::default(),
581 latest_selection: None,
582 }),
583 cx,
584 )
585 })
586 .await;
587 assert_eq!(
588 active_worktree_tasks
589 .into_iter()
590 .map(|(source_kind, task)| {
591 let resolved = task.resolved;
592 (source_kind, resolved.command.unwrap())
593 })
594 .collect::<Vec<_>>(),
595 vec![(
596 TaskSourceKind::Worktree {
597 id: worktree_id,
598 directory_in_worktree: rel_path(".zed").into(),
599 id_base: "local worktree tasks from directory \".zed\"".into(),
600 },
601 "echo /dir".to_string(),
602 )]
603 );
604}
605
606#[gpui::test]
607async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
608 cx: &mut gpui::TestAppContext,
609) {
610 pub(crate) struct PyprojectTomlManifestProvider;
611
612 impl ManifestProvider for PyprojectTomlManifestProvider {
613 fn name(&self) -> ManifestName {
614 SharedString::new_static("pyproject.toml").into()
615 }
616
617 fn search(
618 &self,
619 ManifestQuery {
620 path,
621 depth,
622 delegate,
623 }: ManifestQuery,
624 ) -> Option<Arc<RelPath>> {
625 for path in path.ancestors().take(depth) {
626 let p = path.join(rel_path("pyproject.toml"));
627 if delegate.exists(&p, Some(false)) {
628 return Some(path.into());
629 }
630 }
631
632 None
633 }
634 }
635
636 init_test(cx);
637 let fs = FakeFs::new(cx.executor());
638
639 fs.insert_tree(
640 path!("/the-root"),
641 json!({
642 ".zed": {
643 "settings.json": r#"
644 {
645 "languages": {
646 "Python": {
647 "language_servers": ["ty"]
648 }
649 }
650 }"#
651 },
652 "project-a": {
653 ".venv": {},
654 "file.py": "",
655 "pyproject.toml": ""
656 },
657 "project-b": {
658 ".venv": {},
659 "source_file.py":"",
660 "another_file.py": "",
661 "pyproject.toml": ""
662 }
663 }),
664 )
665 .await;
666 cx.update(|cx| {
667 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
668 });
669
670 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
671 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
672 let _fake_python_server = language_registry.register_fake_lsp(
673 "Python",
674 FakeLspAdapter {
675 name: "ty",
676 capabilities: lsp::ServerCapabilities {
677 ..Default::default()
678 },
679 ..Default::default()
680 },
681 );
682
683 language_registry.add(python_lang(fs.clone()));
684 let (first_buffer, _handle) = project
685 .update(cx, |project, cx| {
686 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
687 })
688 .await
689 .unwrap();
690 cx.executor().run_until_parked();
691 let servers = project.update(cx, |project, cx| {
692 project.lsp_store.update(cx, |this, cx| {
693 first_buffer.update(cx, |buffer, cx| {
694 this.language_servers_for_local_buffer(buffer, cx)
695 .map(|(adapter, server)| (adapter.clone(), server.clone()))
696 .collect::<Vec<_>>()
697 })
698 })
699 });
700 cx.executor().run_until_parked();
701 assert_eq!(servers.len(), 1);
702 let (adapter, server) = servers.into_iter().next().unwrap();
703 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
704 assert_eq!(server.server_id(), LanguageServerId(0));
705 // `workspace_folders` are set to the rooting point.
706 assert_eq!(
707 server.workspace_folders(),
708 BTreeSet::from_iter(
709 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
710 )
711 );
712
713 let (second_project_buffer, _other_handle) = project
714 .update(cx, |project, cx| {
715 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
716 })
717 .await
718 .unwrap();
719 cx.executor().run_until_parked();
720 let servers = project.update(cx, |project, cx| {
721 project.lsp_store.update(cx, |this, cx| {
722 second_project_buffer.update(cx, |buffer, cx| {
723 this.language_servers_for_local_buffer(buffer, cx)
724 .map(|(adapter, server)| (adapter.clone(), server.clone()))
725 .collect::<Vec<_>>()
726 })
727 })
728 });
729 cx.executor().run_until_parked();
730 assert_eq!(servers.len(), 1);
731 let (adapter, server) = servers.into_iter().next().unwrap();
732 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
733 // We're not using venvs at all here, so both folders should fall under the same root.
734 assert_eq!(server.server_id(), LanguageServerId(0));
735 // Now, let's select a different toolchain for one of subprojects.
736
737 let Toolchains {
738 toolchains: available_toolchains_for_b,
739 root_path,
740 ..
741 } = project
742 .update(cx, |this, cx| {
743 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
744 this.available_toolchains(
745 ProjectPath {
746 worktree_id,
747 path: rel_path("project-b/source_file.py").into(),
748 },
749 LanguageName::new("Python"),
750 cx,
751 )
752 })
753 .await
754 .expect("A toolchain to be discovered");
755 assert_eq!(root_path.as_ref(), rel_path("project-b"));
756 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
757 let currently_active_toolchain = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.active_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: rel_path("project-b/source_file.py").into(),
764 },
765 LanguageName::new("Python"),
766 cx,
767 )
768 })
769 .await;
770
771 assert!(currently_active_toolchain.is_none());
772 let _ = project
773 .update(cx, |this, cx| {
774 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
775 this.activate_toolchain(
776 ProjectPath {
777 worktree_id,
778 path: root_path,
779 },
780 available_toolchains_for_b
781 .toolchains
782 .into_iter()
783 .next()
784 .unwrap(),
785 cx,
786 )
787 })
788 .await
789 .unwrap();
790 cx.run_until_parked();
791 let servers = project.update(cx, |project, cx| {
792 project.lsp_store.update(cx, |this, cx| {
793 second_project_buffer.update(cx, |buffer, cx| {
794 this.language_servers_for_local_buffer(buffer, cx)
795 .map(|(adapter, server)| (adapter.clone(), server.clone()))
796 .collect::<Vec<_>>()
797 })
798 })
799 });
800 cx.executor().run_until_parked();
801 assert_eq!(servers.len(), 1);
802 let (adapter, server) = servers.into_iter().next().unwrap();
803 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
804 // There's a new language server in town.
805 assert_eq!(server.server_id(), LanguageServerId(1));
806}
807
808#[gpui::test]
809async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
810 init_test(cx);
811
812 let fs = FakeFs::new(cx.executor());
813 fs.insert_tree(
814 path!("/dir"),
815 json!({
816 "test.rs": "const A: i32 = 1;",
817 "test2.rs": "",
818 "Cargo.toml": "a = 1",
819 "package.json": "{\"a\": 1}",
820 }),
821 )
822 .await;
823
824 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
825 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
826
827 let mut fake_rust_servers = language_registry.register_fake_lsp(
828 "Rust",
829 FakeLspAdapter {
830 name: "the-rust-language-server",
831 capabilities: lsp::ServerCapabilities {
832 completion_provider: Some(lsp::CompletionOptions {
833 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
834 ..Default::default()
835 }),
836 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
837 lsp::TextDocumentSyncOptions {
838 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
839 ..Default::default()
840 },
841 )),
842 ..Default::default()
843 },
844 ..Default::default()
845 },
846 );
847 let mut fake_json_servers = language_registry.register_fake_lsp(
848 "JSON",
849 FakeLspAdapter {
850 name: "the-json-language-server",
851 capabilities: lsp::ServerCapabilities {
852 completion_provider: Some(lsp::CompletionOptions {
853 trigger_characters: Some(vec![":".to_string()]),
854 ..Default::default()
855 }),
856 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
857 lsp::TextDocumentSyncOptions {
858 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
859 ..Default::default()
860 },
861 )),
862 ..Default::default()
863 },
864 ..Default::default()
865 },
866 );
867
868 // Open a buffer without an associated language server.
869 let (toml_buffer, _handle) = project
870 .update(cx, |project, cx| {
871 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
872 })
873 .await
874 .unwrap();
875
876 // Open a buffer with an associated language server before the language for it has been loaded.
877 let (rust_buffer, _handle2) = project
878 .update(cx, |project, cx| {
879 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
880 })
881 .await
882 .unwrap();
883 rust_buffer.update(cx, |buffer, _| {
884 assert_eq!(buffer.language().map(|l| l.name()), None);
885 });
886
887 // Now we add the languages to the project, and ensure they get assigned to all
888 // the relevant open buffers.
889 language_registry.add(json_lang());
890 language_registry.add(rust_lang());
891 cx.executor().run_until_parked();
892 rust_buffer.update(cx, |buffer, _| {
893 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
894 });
895
896 // A server is started up, and it is notified about Rust files.
897 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
898 assert_eq!(
899 fake_rust_server
900 .receive_notification::<lsp::notification::DidOpenTextDocument>()
901 .await
902 .text_document,
903 lsp::TextDocumentItem {
904 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
905 version: 0,
906 text: "const A: i32 = 1;".to_string(),
907 language_id: "rust".to_string(),
908 }
909 );
910
911 // The buffer is configured based on the language server's capabilities.
912 rust_buffer.update(cx, |buffer, _| {
913 assert_eq!(
914 buffer
915 .completion_triggers()
916 .iter()
917 .cloned()
918 .collect::<Vec<_>>(),
919 &[".".to_string(), "::".to_string()]
920 );
921 });
922 toml_buffer.update(cx, |buffer, _| {
923 assert!(buffer.completion_triggers().is_empty());
924 });
925
926 // Edit a buffer. The changes are reported to the language server.
927 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
928 assert_eq!(
929 fake_rust_server
930 .receive_notification::<lsp::notification::DidChangeTextDocument>()
931 .await
932 .text_document,
933 lsp::VersionedTextDocumentIdentifier::new(
934 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
935 1
936 )
937 );
938
939 // Open a third buffer with a different associated language server.
940 let (json_buffer, _json_handle) = project
941 .update(cx, |project, cx| {
942 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
943 })
944 .await
945 .unwrap();
946
947 // A json language server is started up and is only notified about the json buffer.
948 let mut fake_json_server = fake_json_servers.next().await.unwrap();
949 assert_eq!(
950 fake_json_server
951 .receive_notification::<lsp::notification::DidOpenTextDocument>()
952 .await
953 .text_document,
954 lsp::TextDocumentItem {
955 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
956 version: 0,
957 text: "{\"a\": 1}".to_string(),
958 language_id: "json".to_string(),
959 }
960 );
961
962 // This buffer is configured based on the second language server's
963 // capabilities.
964 json_buffer.update(cx, |buffer, _| {
965 assert_eq!(
966 buffer
967 .completion_triggers()
968 .iter()
969 .cloned()
970 .collect::<Vec<_>>(),
971 &[":".to_string()]
972 );
973 });
974
975 // When opening another buffer whose language server is already running,
976 // it is also configured based on the existing language server's capabilities.
977 let (rust_buffer2, _handle4) = project
978 .update(cx, |project, cx| {
979 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
980 })
981 .await
982 .unwrap();
983 rust_buffer2.update(cx, |buffer, _| {
984 assert_eq!(
985 buffer
986 .completion_triggers()
987 .iter()
988 .cloned()
989 .collect::<Vec<_>>(),
990 &[".".to_string(), "::".to_string()]
991 );
992 });
993
994 // Changes are reported only to servers matching the buffer's language.
995 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
996 rust_buffer2.update(cx, |buffer, cx| {
997 buffer.edit([(0..0, "let x = 1;")], None, cx)
998 });
999 assert_eq!(
1000 fake_rust_server
1001 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1002 .await
1003 .text_document,
1004 lsp::VersionedTextDocumentIdentifier::new(
1005 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1006 1
1007 )
1008 );
1009
1010 // Save notifications are reported to all servers.
1011 project
1012 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1013 .await
1014 .unwrap();
1015 assert_eq!(
1016 fake_rust_server
1017 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1018 .await
1019 .text_document,
1020 lsp::TextDocumentIdentifier::new(
1021 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1022 )
1023 );
1024 assert_eq!(
1025 fake_json_server
1026 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1027 .await
1028 .text_document,
1029 lsp::TextDocumentIdentifier::new(
1030 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1031 )
1032 );
1033
1034 // Renames are reported only to servers matching the buffer's language.
1035 fs.rename(
1036 Path::new(path!("/dir/test2.rs")),
1037 Path::new(path!("/dir/test3.rs")),
1038 Default::default(),
1039 )
1040 .await
1041 .unwrap();
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1048 );
1049 assert_eq!(
1050 fake_rust_server
1051 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1052 .await
1053 .text_document,
1054 lsp::TextDocumentItem {
1055 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1056 version: 0,
1057 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1058 language_id: "rust".to_string(),
1059 },
1060 );
1061
1062 rust_buffer2.update(cx, |buffer, cx| {
1063 buffer.update_diagnostics(
1064 LanguageServerId(0),
1065 DiagnosticSet::from_sorted_entries(
1066 vec![DiagnosticEntry {
1067 diagnostic: Default::default(),
1068 range: Anchor::MIN..Anchor::MAX,
1069 }],
1070 &buffer.snapshot(),
1071 ),
1072 cx,
1073 );
1074 assert_eq!(
1075 buffer
1076 .snapshot()
1077 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1078 .count(),
1079 1
1080 );
1081 });
1082
1083 // When the rename changes the extension of the file, the buffer gets closed on the old
1084 // language server and gets opened on the new one.
1085 fs.rename(
1086 Path::new(path!("/dir/test3.rs")),
1087 Path::new(path!("/dir/test3.json")),
1088 Default::default(),
1089 )
1090 .await
1091 .unwrap();
1092 assert_eq!(
1093 fake_rust_server
1094 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1098 );
1099 assert_eq!(
1100 fake_json_server
1101 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1102 .await
1103 .text_document,
1104 lsp::TextDocumentItem {
1105 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1106 version: 0,
1107 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1108 language_id: "json".to_string(),
1109 },
1110 );
1111
1112 // We clear the diagnostics, since the language has changed.
1113 rust_buffer2.update(cx, |buffer, _| {
1114 assert_eq!(
1115 buffer
1116 .snapshot()
1117 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1118 .count(),
1119 0
1120 );
1121 });
1122
1123 // The renamed file's version resets after changing language server.
1124 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1125 assert_eq!(
1126 fake_json_server
1127 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1128 .await
1129 .text_document,
1130 lsp::VersionedTextDocumentIdentifier::new(
1131 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1132 1
1133 )
1134 );
1135
1136 // Restart language servers
1137 project.update(cx, |project, cx| {
1138 project.restart_language_servers_for_buffers(
1139 vec![rust_buffer.clone(), json_buffer.clone()],
1140 HashSet::default(),
1141 cx,
1142 );
1143 });
1144
1145 let mut rust_shutdown_requests = fake_rust_server
1146 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1147 let mut json_shutdown_requests = fake_json_server
1148 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1149 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1150
1151 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1152 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1153
1154 // Ensure rust document is reopened in new rust language server
1155 assert_eq!(
1156 fake_rust_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 lsp::TextDocumentItem {
1161 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1162 version: 0,
1163 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1164 language_id: "rust".to_string(),
1165 }
1166 );
1167
1168 // Ensure json documents are reopened in new json language server
1169 assert_set_eq!(
1170 [
1171 fake_json_server
1172 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1173 .await
1174 .text_document,
1175 fake_json_server
1176 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1177 .await
1178 .text_document,
1179 ],
1180 [
1181 lsp::TextDocumentItem {
1182 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1183 version: 0,
1184 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1185 language_id: "json".to_string(),
1186 },
1187 lsp::TextDocumentItem {
1188 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1189 version: 0,
1190 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1191 language_id: "json".to_string(),
1192 }
1193 ]
1194 );
1195
1196 // Close notifications are reported only to servers matching the buffer's language.
1197 cx.update(|_| drop(_json_handle));
1198 let close_message = lsp::DidCloseTextDocumentParams {
1199 text_document: lsp::TextDocumentIdentifier::new(
1200 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1201 ),
1202 };
1203 assert_eq!(
1204 fake_json_server
1205 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1206 .await,
1207 close_message,
1208 );
1209}
1210
1211#[gpui::test]
1212async fn test_language_server_relative_path(cx: &mut gpui::TestAppContext) {
1213 init_test(cx);
1214
1215 let settings_json_contents = json!({
1216 "languages": {
1217 "Rust": {
1218 "language_servers": ["my_fake_lsp"]
1219 }
1220 },
1221 "lsp": {
1222 "my_fake_lsp": {
1223 "binary": {
1224 "path": path!("relative_path/to/my_fake_lsp_binary.exe").to_string(),
1225 }
1226 }
1227 },
1228 });
1229
1230 let fs = FakeFs::new(cx.executor());
1231 fs.insert_tree(
1232 path!("/the-root"),
1233 json!({
1234 ".zed": {
1235 "settings.json": settings_json_contents.to_string(),
1236 },
1237 "relative_path": {
1238 "to": {
1239 "my_fake_lsp.exe": "",
1240 },
1241 },
1242 "src": {
1243 "main.rs": "",
1244 }
1245 }),
1246 )
1247 .await;
1248
1249 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1250 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1251 language_registry.add(rust_lang());
1252
1253 let mut fake_rust_servers = language_registry.register_fake_lsp(
1254 "Rust",
1255 FakeLspAdapter {
1256 name: "my_fake_lsp",
1257 ..Default::default()
1258 },
1259 );
1260
1261 cx.run_until_parked();
1262
1263 // Start the language server by opening a buffer with a compatible file extension.
1264 project
1265 .update(cx, |project, cx| {
1266 project.open_local_buffer_with_lsp(path!("/the-root/src/main.rs"), cx)
1267 })
1268 .await
1269 .unwrap();
1270
1271 let lsp_path = fake_rust_servers.next().await.unwrap().binary.path;
1272 assert_eq!(
1273 lsp_path.to_string_lossy(),
1274 path!("/the-root/relative_path/to/my_fake_lsp_binary.exe"),
1275 );
1276}
1277
1278#[gpui::test]
1279async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1280 init_test(cx);
1281
1282 let fs = FakeFs::new(cx.executor());
1283 fs.insert_tree(
1284 path!("/the-root"),
1285 json!({
1286 ".gitignore": "target\n",
1287 "Cargo.lock": "",
1288 "src": {
1289 "a.rs": "",
1290 "b.rs": "",
1291 },
1292 "target": {
1293 "x": {
1294 "out": {
1295 "x.rs": ""
1296 }
1297 },
1298 "y": {
1299 "out": {
1300 "y.rs": "",
1301 }
1302 },
1303 "z": {
1304 "out": {
1305 "z.rs": ""
1306 }
1307 }
1308 }
1309 }),
1310 )
1311 .await;
1312 fs.insert_tree(
1313 path!("/the-registry"),
1314 json!({
1315 "dep1": {
1316 "src": {
1317 "dep1.rs": "",
1318 }
1319 },
1320 "dep2": {
1321 "src": {
1322 "dep2.rs": "",
1323 }
1324 },
1325 }),
1326 )
1327 .await;
1328 fs.insert_tree(
1329 path!("/the/stdlib"),
1330 json!({
1331 "LICENSE": "",
1332 "src": {
1333 "string.rs": "",
1334 }
1335 }),
1336 )
1337 .await;
1338
1339 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1340 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1341 (project.languages().clone(), project.lsp_store())
1342 });
1343 language_registry.add(rust_lang());
1344 let mut fake_servers = language_registry.register_fake_lsp(
1345 "Rust",
1346 FakeLspAdapter {
1347 name: "the-language-server",
1348 ..Default::default()
1349 },
1350 );
1351
1352 cx.executor().run_until_parked();
1353
1354 // Start the language server by opening a buffer with a compatible file extension.
1355 project
1356 .update(cx, |project, cx| {
1357 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1358 })
1359 .await
1360 .unwrap();
1361
1362 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1363 project.update(cx, |project, cx| {
1364 let worktree = project.worktrees(cx).next().unwrap();
1365 assert_eq!(
1366 worktree
1367 .read(cx)
1368 .snapshot()
1369 .entries(true, 0)
1370 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1371 .collect::<Vec<_>>(),
1372 &[
1373 ("", false),
1374 (".gitignore", false),
1375 ("Cargo.lock", false),
1376 ("src", false),
1377 ("src/a.rs", false),
1378 ("src/b.rs", false),
1379 ("target", true),
1380 ]
1381 );
1382 });
1383
1384 let prev_read_dir_count = fs.read_dir_call_count();
1385
1386 let fake_server = fake_servers.next().await.unwrap();
1387 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1388 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1389 id
1390 });
1391
1392 // Simulate jumping to a definition in a dependency outside of the worktree.
1393 let _out_of_worktree_buffer = project
1394 .update(cx, |project, cx| {
1395 project.open_local_buffer_via_lsp(
1396 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1397 server_id,
1398 cx,
1399 )
1400 })
1401 .await
1402 .unwrap();
1403
1404 // Keep track of the FS events reported to the language server.
1405 let file_changes = Arc::new(Mutex::new(Vec::new()));
1406 fake_server
1407 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1408 registrations: vec![lsp::Registration {
1409 id: Default::default(),
1410 method: "workspace/didChangeWatchedFiles".to_string(),
1411 register_options: serde_json::to_value(
1412 lsp::DidChangeWatchedFilesRegistrationOptions {
1413 watchers: vec![
1414 lsp::FileSystemWatcher {
1415 glob_pattern: lsp::GlobPattern::String(
1416 path!("/the-root/Cargo.toml").to_string(),
1417 ),
1418 kind: None,
1419 },
1420 lsp::FileSystemWatcher {
1421 glob_pattern: lsp::GlobPattern::String(
1422 path!("/the-root/src/*.{rs,c}").to_string(),
1423 ),
1424 kind: None,
1425 },
1426 lsp::FileSystemWatcher {
1427 glob_pattern: lsp::GlobPattern::String(
1428 path!("/the-root/target/y/**/*.rs").to_string(),
1429 ),
1430 kind: None,
1431 },
1432 lsp::FileSystemWatcher {
1433 glob_pattern: lsp::GlobPattern::String(
1434 path!("/the/stdlib/src/**/*.rs").to_string(),
1435 ),
1436 kind: None,
1437 },
1438 lsp::FileSystemWatcher {
1439 glob_pattern: lsp::GlobPattern::String(
1440 path!("**/Cargo.lock").to_string(),
1441 ),
1442 kind: None,
1443 },
1444 ],
1445 },
1446 )
1447 .ok(),
1448 }],
1449 })
1450 .await
1451 .into_response()
1452 .unwrap();
1453 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1454 let file_changes = file_changes.clone();
1455 move |params, _| {
1456 let mut file_changes = file_changes.lock();
1457 file_changes.extend(params.changes);
1458 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1459 }
1460 });
1461
1462 cx.executor().run_until_parked();
1463 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1464 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1465
1466 let mut new_watched_paths = fs.watched_paths();
1467 new_watched_paths.retain(|path| {
1468 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1469 });
1470 assert_eq!(
1471 &new_watched_paths,
1472 &[
1473 Path::new(path!("/the-root")),
1474 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1475 Path::new(path!("/the/stdlib/src"))
1476 ]
1477 );
1478
1479 // Now the language server has asked us to watch an ignored directory path,
1480 // so we recursively load it.
1481 project.update(cx, |project, cx| {
1482 let worktree = project.visible_worktrees(cx).next().unwrap();
1483 assert_eq!(
1484 worktree
1485 .read(cx)
1486 .snapshot()
1487 .entries(true, 0)
1488 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1489 .collect::<Vec<_>>(),
1490 &[
1491 ("", false),
1492 (".gitignore", false),
1493 ("Cargo.lock", false),
1494 ("src", false),
1495 ("src/a.rs", false),
1496 ("src/b.rs", false),
1497 ("target", true),
1498 ("target/x", true),
1499 ("target/y", true),
1500 ("target/y/out", true),
1501 ("target/y/out/y.rs", true),
1502 ("target/z", true),
1503 ]
1504 );
1505 });
1506
1507 // Perform some file system mutations, two of which match the watched patterns,
1508 // and one of which does not.
1509 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1510 .await
1511 .unwrap();
1512 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1513 .await
1514 .unwrap();
1515 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1516 .await
1517 .unwrap();
1518 fs.create_file(
1519 path!("/the-root/target/x/out/x2.rs").as_ref(),
1520 Default::default(),
1521 )
1522 .await
1523 .unwrap();
1524 fs.create_file(
1525 path!("/the-root/target/y/out/y2.rs").as_ref(),
1526 Default::default(),
1527 )
1528 .await
1529 .unwrap();
1530 fs.save(
1531 path!("/the-root/Cargo.lock").as_ref(),
1532 &"".into(),
1533 Default::default(),
1534 )
1535 .await
1536 .unwrap();
1537 fs.save(
1538 path!("/the-stdlib/LICENSE").as_ref(),
1539 &"".into(),
1540 Default::default(),
1541 )
1542 .await
1543 .unwrap();
1544 fs.save(
1545 path!("/the/stdlib/src/string.rs").as_ref(),
1546 &"".into(),
1547 Default::default(),
1548 )
1549 .await
1550 .unwrap();
1551
1552 // The language server receives events for the FS mutations that match its watch patterns.
1553 cx.executor().run_until_parked();
1554 assert_eq!(
1555 &*file_changes.lock(),
1556 &[
1557 lsp::FileEvent {
1558 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1559 typ: lsp::FileChangeType::CHANGED,
1560 },
1561 lsp::FileEvent {
1562 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1563 typ: lsp::FileChangeType::DELETED,
1564 },
1565 lsp::FileEvent {
1566 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1567 typ: lsp::FileChangeType::CREATED,
1568 },
1569 lsp::FileEvent {
1570 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1571 typ: lsp::FileChangeType::CREATED,
1572 },
1573 lsp::FileEvent {
1574 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1575 typ: lsp::FileChangeType::CHANGED,
1576 },
1577 ]
1578 );
1579}
1580
1581#[gpui::test]
1582async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1583 init_test(cx);
1584
1585 let fs = FakeFs::new(cx.executor());
1586 fs.insert_tree(
1587 path!("/dir"),
1588 json!({
1589 "a.rs": "let a = 1;",
1590 "b.rs": "let b = 2;"
1591 }),
1592 )
1593 .await;
1594
1595 let project = Project::test(
1596 fs,
1597 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1598 cx,
1599 )
1600 .await;
1601 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1602
1603 let buffer_a = project
1604 .update(cx, |project, cx| {
1605 project.open_local_buffer(path!("/dir/a.rs"), cx)
1606 })
1607 .await
1608 .unwrap();
1609 let buffer_b = project
1610 .update(cx, |project, cx| {
1611 project.open_local_buffer(path!("/dir/b.rs"), cx)
1612 })
1613 .await
1614 .unwrap();
1615
1616 lsp_store.update(cx, |lsp_store, cx| {
1617 lsp_store
1618 .update_diagnostics(
1619 LanguageServerId(0),
1620 lsp::PublishDiagnosticsParams {
1621 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1622 version: None,
1623 diagnostics: vec![lsp::Diagnostic {
1624 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1625 severity: Some(lsp::DiagnosticSeverity::ERROR),
1626 message: "error 1".to_string(),
1627 ..Default::default()
1628 }],
1629 },
1630 None,
1631 DiagnosticSourceKind::Pushed,
1632 &[],
1633 cx,
1634 )
1635 .unwrap();
1636 lsp_store
1637 .update_diagnostics(
1638 LanguageServerId(0),
1639 lsp::PublishDiagnosticsParams {
1640 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1641 version: None,
1642 diagnostics: vec![lsp::Diagnostic {
1643 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1644 severity: Some(DiagnosticSeverity::WARNING),
1645 message: "error 2".to_string(),
1646 ..Default::default()
1647 }],
1648 },
1649 None,
1650 DiagnosticSourceKind::Pushed,
1651 &[],
1652 cx,
1653 )
1654 .unwrap();
1655 });
1656
1657 buffer_a.update(cx, |buffer, _| {
1658 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1659 assert_eq!(
1660 chunks
1661 .iter()
1662 .map(|(s, d)| (s.as_str(), *d))
1663 .collect::<Vec<_>>(),
1664 &[
1665 ("let ", None),
1666 ("a", Some(DiagnosticSeverity::ERROR)),
1667 (" = 1;", None),
1668 ]
1669 );
1670 });
1671 buffer_b.update(cx, |buffer, _| {
1672 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1673 assert_eq!(
1674 chunks
1675 .iter()
1676 .map(|(s, d)| (s.as_str(), *d))
1677 .collect::<Vec<_>>(),
1678 &[
1679 ("let ", None),
1680 ("b", Some(DiagnosticSeverity::WARNING)),
1681 (" = 2;", None),
1682 ]
1683 );
1684 });
1685}
1686
1687#[gpui::test]
1688async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1689 init_test(cx);
1690
1691 let fs = FakeFs::new(cx.executor());
1692 fs.insert_tree(
1693 path!("/root"),
1694 json!({
1695 "dir": {
1696 ".git": {
1697 "HEAD": "ref: refs/heads/main",
1698 },
1699 ".gitignore": "b.rs",
1700 "a.rs": "let a = 1;",
1701 "b.rs": "let b = 2;",
1702 },
1703 "other.rs": "let b = c;"
1704 }),
1705 )
1706 .await;
1707
1708 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1709 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1710 let (worktree, _) = project
1711 .update(cx, |project, cx| {
1712 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1713 })
1714 .await
1715 .unwrap();
1716 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1717
1718 let (worktree, _) = project
1719 .update(cx, |project, cx| {
1720 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1721 })
1722 .await
1723 .unwrap();
1724 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1725
1726 let server_id = LanguageServerId(0);
1727 lsp_store.update(cx, |lsp_store, cx| {
1728 lsp_store
1729 .update_diagnostics(
1730 server_id,
1731 lsp::PublishDiagnosticsParams {
1732 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1733 version: None,
1734 diagnostics: vec![lsp::Diagnostic {
1735 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1736 severity: Some(lsp::DiagnosticSeverity::ERROR),
1737 message: "unused variable 'b'".to_string(),
1738 ..Default::default()
1739 }],
1740 },
1741 None,
1742 DiagnosticSourceKind::Pushed,
1743 &[],
1744 cx,
1745 )
1746 .unwrap();
1747 lsp_store
1748 .update_diagnostics(
1749 server_id,
1750 lsp::PublishDiagnosticsParams {
1751 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1752 version: None,
1753 diagnostics: vec![lsp::Diagnostic {
1754 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1755 severity: Some(lsp::DiagnosticSeverity::ERROR),
1756 message: "unknown variable 'c'".to_string(),
1757 ..Default::default()
1758 }],
1759 },
1760 None,
1761 DiagnosticSourceKind::Pushed,
1762 &[],
1763 cx,
1764 )
1765 .unwrap();
1766 });
1767
1768 let main_ignored_buffer = project
1769 .update(cx, |project, cx| {
1770 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1771 })
1772 .await
1773 .unwrap();
1774 main_ignored_buffer.update(cx, |buffer, _| {
1775 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1776 assert_eq!(
1777 chunks
1778 .iter()
1779 .map(|(s, d)| (s.as_str(), *d))
1780 .collect::<Vec<_>>(),
1781 &[
1782 ("let ", None),
1783 ("b", Some(DiagnosticSeverity::ERROR)),
1784 (" = 2;", None),
1785 ],
1786 "Gigitnored buffers should still get in-buffer diagnostics",
1787 );
1788 });
1789 let other_buffer = project
1790 .update(cx, |project, cx| {
1791 project.open_buffer((other_worktree_id, rel_path("")), cx)
1792 })
1793 .await
1794 .unwrap();
1795 other_buffer.update(cx, |buffer, _| {
1796 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1797 assert_eq!(
1798 chunks
1799 .iter()
1800 .map(|(s, d)| (s.as_str(), *d))
1801 .collect::<Vec<_>>(),
1802 &[
1803 ("let b = ", None),
1804 ("c", Some(DiagnosticSeverity::ERROR)),
1805 (";", None),
1806 ],
1807 "Buffers from hidden projects should still get in-buffer diagnostics"
1808 );
1809 });
1810
1811 project.update(cx, |project, cx| {
1812 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1813 assert_eq!(
1814 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1815 vec![(
1816 ProjectPath {
1817 worktree_id: main_worktree_id,
1818 path: rel_path("b.rs").into(),
1819 },
1820 server_id,
1821 DiagnosticSummary {
1822 error_count: 1,
1823 warning_count: 0,
1824 }
1825 )]
1826 );
1827 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1828 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1829 });
1830}
1831
1832#[gpui::test]
1833async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1834 init_test(cx);
1835
1836 let progress_token = "the-progress-token";
1837
1838 let fs = FakeFs::new(cx.executor());
1839 fs.insert_tree(
1840 path!("/dir"),
1841 json!({
1842 "a.rs": "fn a() { A }",
1843 "b.rs": "const y: i32 = 1",
1844 }),
1845 )
1846 .await;
1847
1848 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1849 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1850
1851 language_registry.add(rust_lang());
1852 let mut fake_servers = language_registry.register_fake_lsp(
1853 "Rust",
1854 FakeLspAdapter {
1855 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1856 disk_based_diagnostics_sources: vec!["disk".into()],
1857 ..Default::default()
1858 },
1859 );
1860
1861 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1862
1863 // Cause worktree to start the fake language server
1864 let _ = project
1865 .update(cx, |project, cx| {
1866 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1867 })
1868 .await
1869 .unwrap();
1870
1871 let mut events = cx.events(&project);
1872
1873 let fake_server = fake_servers.next().await.unwrap();
1874 assert_eq!(
1875 events.next().await.unwrap(),
1876 Event::LanguageServerAdded(
1877 LanguageServerId(0),
1878 fake_server.server.name(),
1879 Some(worktree_id)
1880 ),
1881 );
1882
1883 fake_server
1884 .start_progress(format!("{}/0", progress_token))
1885 .await;
1886 assert_eq!(
1887 events.next().await.unwrap(),
1888 Event::DiskBasedDiagnosticsStarted {
1889 language_server_id: LanguageServerId(0),
1890 }
1891 );
1892
1893 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1894 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1895 version: None,
1896 diagnostics: vec![lsp::Diagnostic {
1897 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1898 severity: Some(lsp::DiagnosticSeverity::ERROR),
1899 message: "undefined variable 'A'".to_string(),
1900 ..Default::default()
1901 }],
1902 });
1903 assert_eq!(
1904 events.next().await.unwrap(),
1905 Event::DiagnosticsUpdated {
1906 language_server_id: LanguageServerId(0),
1907 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1908 }
1909 );
1910
1911 fake_server.end_progress(format!("{}/0", progress_token));
1912 assert_eq!(
1913 events.next().await.unwrap(),
1914 Event::DiskBasedDiagnosticsFinished {
1915 language_server_id: LanguageServerId(0)
1916 }
1917 );
1918
1919 let buffer = project
1920 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1921 .await
1922 .unwrap();
1923
1924 buffer.update(cx, |buffer, _| {
1925 let snapshot = buffer.snapshot();
1926 let diagnostics = snapshot
1927 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1928 .collect::<Vec<_>>();
1929 assert_eq!(
1930 diagnostics,
1931 &[DiagnosticEntryRef {
1932 range: Point::new(0, 9)..Point::new(0, 10),
1933 diagnostic: &Diagnostic {
1934 severity: lsp::DiagnosticSeverity::ERROR,
1935 message: "undefined variable 'A'".to_string(),
1936 group_id: 0,
1937 is_primary: true,
1938 source_kind: DiagnosticSourceKind::Pushed,
1939 ..Diagnostic::default()
1940 }
1941 }]
1942 )
1943 });
1944
1945 // Ensure publishing empty diagnostics twice only results in one update event.
1946 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1947 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1948 version: None,
1949 diagnostics: Default::default(),
1950 });
1951 assert_eq!(
1952 events.next().await.unwrap(),
1953 Event::DiagnosticsUpdated {
1954 language_server_id: LanguageServerId(0),
1955 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1956 }
1957 );
1958
1959 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1960 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1961 version: None,
1962 diagnostics: Default::default(),
1963 });
1964 cx.executor().run_until_parked();
1965 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1966}
1967
1968#[gpui::test]
1969async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1970 init_test(cx);
1971
1972 let progress_token = "the-progress-token";
1973
1974 let fs = FakeFs::new(cx.executor());
1975 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1976
1977 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1978
1979 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1980 language_registry.add(rust_lang());
1981 let mut fake_servers = language_registry.register_fake_lsp(
1982 "Rust",
1983 FakeLspAdapter {
1984 name: "the-language-server",
1985 disk_based_diagnostics_sources: vec!["disk".into()],
1986 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1987 ..FakeLspAdapter::default()
1988 },
1989 );
1990
1991 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1992
1993 let (buffer, _handle) = project
1994 .update(cx, |project, cx| {
1995 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1996 })
1997 .await
1998 .unwrap();
1999 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
2000 // Simulate diagnostics starting to update.
2001 let fake_server = fake_servers.next().await.unwrap();
2002 fake_server.start_progress(progress_token).await;
2003
2004 // Restart the server before the diagnostics finish updating.
2005 project.update(cx, |project, cx| {
2006 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
2007 });
2008 let mut events = cx.events(&project);
2009
2010 // Simulate the newly started server sending more diagnostics.
2011 let fake_server = fake_servers.next().await.unwrap();
2012 assert_eq!(
2013 events.next().await.unwrap(),
2014 Event::LanguageServerRemoved(LanguageServerId(0))
2015 );
2016 assert_eq!(
2017 events.next().await.unwrap(),
2018 Event::LanguageServerAdded(
2019 LanguageServerId(1),
2020 fake_server.server.name(),
2021 Some(worktree_id)
2022 )
2023 );
2024 fake_server.start_progress(progress_token).await;
2025 assert_eq!(
2026 events.next().await.unwrap(),
2027 Event::LanguageServerBufferRegistered {
2028 server_id: LanguageServerId(1),
2029 buffer_id,
2030 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
2031 name: Some(fake_server.server.name())
2032 }
2033 );
2034 assert_eq!(
2035 events.next().await.unwrap(),
2036 Event::DiskBasedDiagnosticsStarted {
2037 language_server_id: LanguageServerId(1)
2038 }
2039 );
2040 project.update(cx, |project, cx| {
2041 assert_eq!(
2042 project
2043 .language_servers_running_disk_based_diagnostics(cx)
2044 .collect::<Vec<_>>(),
2045 [LanguageServerId(1)]
2046 );
2047 });
2048
2049 // All diagnostics are considered done, despite the old server's diagnostic
2050 // task never completing.
2051 fake_server.end_progress(progress_token);
2052 assert_eq!(
2053 events.next().await.unwrap(),
2054 Event::DiskBasedDiagnosticsFinished {
2055 language_server_id: LanguageServerId(1)
2056 }
2057 );
2058 project.update(cx, |project, cx| {
2059 assert_eq!(
2060 project
2061 .language_servers_running_disk_based_diagnostics(cx)
2062 .collect::<Vec<_>>(),
2063 [] as [language::LanguageServerId; 0]
2064 );
2065 });
2066}
2067
2068#[gpui::test]
2069async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2070 init_test(cx);
2071
2072 let fs = FakeFs::new(cx.executor());
2073 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2074
2075 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2076
2077 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2078 language_registry.add(rust_lang());
2079 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2080
2081 let (buffer, _) = project
2082 .update(cx, |project, cx| {
2083 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2084 })
2085 .await
2086 .unwrap();
2087
2088 // Publish diagnostics
2089 let fake_server = fake_servers.next().await.unwrap();
2090 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2091 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2092 version: None,
2093 diagnostics: vec![lsp::Diagnostic {
2094 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2095 severity: Some(lsp::DiagnosticSeverity::ERROR),
2096 message: "the message".to_string(),
2097 ..Default::default()
2098 }],
2099 });
2100
2101 cx.executor().run_until_parked();
2102 buffer.update(cx, |buffer, _| {
2103 assert_eq!(
2104 buffer
2105 .snapshot()
2106 .diagnostics_in_range::<_, usize>(0..1, false)
2107 .map(|entry| entry.diagnostic.message.clone())
2108 .collect::<Vec<_>>(),
2109 ["the message".to_string()]
2110 );
2111 });
2112 project.update(cx, |project, cx| {
2113 assert_eq!(
2114 project.diagnostic_summary(false, cx),
2115 DiagnosticSummary {
2116 error_count: 1,
2117 warning_count: 0,
2118 }
2119 );
2120 });
2121
2122 project.update(cx, |project, cx| {
2123 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2124 });
2125
2126 // The diagnostics are cleared.
2127 cx.executor().run_until_parked();
2128 buffer.update(cx, |buffer, _| {
2129 assert_eq!(
2130 buffer
2131 .snapshot()
2132 .diagnostics_in_range::<_, usize>(0..1, false)
2133 .map(|entry| entry.diagnostic.message.clone())
2134 .collect::<Vec<_>>(),
2135 Vec::<String>::new(),
2136 );
2137 });
2138 project.update(cx, |project, cx| {
2139 assert_eq!(
2140 project.diagnostic_summary(false, cx),
2141 DiagnosticSummary {
2142 error_count: 0,
2143 warning_count: 0,
2144 }
2145 );
2146 });
2147}
2148
2149#[gpui::test]
2150async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2151 init_test(cx);
2152
2153 let fs = FakeFs::new(cx.executor());
2154 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2155
2156 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2157 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2158
2159 language_registry.add(rust_lang());
2160 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2161
2162 let (buffer, _handle) = project
2163 .update(cx, |project, cx| {
2164 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2165 })
2166 .await
2167 .unwrap();
2168
2169 // Before restarting the server, report diagnostics with an unknown buffer version.
2170 let fake_server = fake_servers.next().await.unwrap();
2171 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2172 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2173 version: Some(10000),
2174 diagnostics: Vec::new(),
2175 });
2176 cx.executor().run_until_parked();
2177 project.update(cx, |project, cx| {
2178 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2179 });
2180
2181 let mut fake_server = fake_servers.next().await.unwrap();
2182 let notification = fake_server
2183 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2184 .await
2185 .text_document;
2186 assert_eq!(notification.version, 0);
2187}
2188
2189#[gpui::test]
2190async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2191 init_test(cx);
2192
2193 let progress_token = "the-progress-token";
2194
2195 let fs = FakeFs::new(cx.executor());
2196 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2197
2198 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2199
2200 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2201 language_registry.add(rust_lang());
2202 let mut fake_servers = language_registry.register_fake_lsp(
2203 "Rust",
2204 FakeLspAdapter {
2205 name: "the-language-server",
2206 disk_based_diagnostics_sources: vec!["disk".into()],
2207 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2208 ..Default::default()
2209 },
2210 );
2211
2212 let (buffer, _handle) = project
2213 .update(cx, |project, cx| {
2214 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2215 })
2216 .await
2217 .unwrap();
2218
2219 // Simulate diagnostics starting to update.
2220 let mut fake_server = fake_servers.next().await.unwrap();
2221 fake_server
2222 .start_progress_with(
2223 "another-token",
2224 lsp::WorkDoneProgressBegin {
2225 cancellable: Some(false),
2226 ..Default::default()
2227 },
2228 )
2229 .await;
2230 fake_server
2231 .start_progress_with(
2232 progress_token,
2233 lsp::WorkDoneProgressBegin {
2234 cancellable: Some(true),
2235 ..Default::default()
2236 },
2237 )
2238 .await;
2239 cx.executor().run_until_parked();
2240
2241 project.update(cx, |project, cx| {
2242 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2243 });
2244
2245 let cancel_notification = fake_server
2246 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2247 .await;
2248 assert_eq!(
2249 cancel_notification.token,
2250 NumberOrString::String(progress_token.into())
2251 );
2252}
2253
2254#[gpui::test]
2255async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2256 init_test(cx);
2257
2258 let fs = FakeFs::new(cx.executor());
2259 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2260 .await;
2261
2262 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2263 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2264
2265 let mut fake_rust_servers = language_registry.register_fake_lsp(
2266 "Rust",
2267 FakeLspAdapter {
2268 name: "rust-lsp",
2269 ..Default::default()
2270 },
2271 );
2272 let mut fake_js_servers = language_registry.register_fake_lsp(
2273 "JavaScript",
2274 FakeLspAdapter {
2275 name: "js-lsp",
2276 ..Default::default()
2277 },
2278 );
2279 language_registry.add(rust_lang());
2280 language_registry.add(js_lang());
2281
2282 let _rs_buffer = project
2283 .update(cx, |project, cx| {
2284 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2285 })
2286 .await
2287 .unwrap();
2288 let _js_buffer = project
2289 .update(cx, |project, cx| {
2290 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2291 })
2292 .await
2293 .unwrap();
2294
2295 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2296 assert_eq!(
2297 fake_rust_server_1
2298 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2299 .await
2300 .text_document
2301 .uri
2302 .as_str(),
2303 uri!("file:///dir/a.rs")
2304 );
2305
2306 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2307 assert_eq!(
2308 fake_js_server
2309 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2310 .await
2311 .text_document
2312 .uri
2313 .as_str(),
2314 uri!("file:///dir/b.js")
2315 );
2316
2317 // Disable Rust language server, ensuring only that server gets stopped.
2318 cx.update(|cx| {
2319 SettingsStore::update_global(cx, |settings, cx| {
2320 settings.update_user_settings(cx, |settings| {
2321 settings.languages_mut().insert(
2322 "Rust".into(),
2323 LanguageSettingsContent {
2324 enable_language_server: Some(false),
2325 ..Default::default()
2326 },
2327 );
2328 });
2329 })
2330 });
2331 fake_rust_server_1
2332 .receive_notification::<lsp::notification::Exit>()
2333 .await;
2334
2335 // Enable Rust and disable JavaScript language servers, ensuring that the
2336 // former gets started again and that the latter stops.
2337 cx.update(|cx| {
2338 SettingsStore::update_global(cx, |settings, cx| {
2339 settings.update_user_settings(cx, |settings| {
2340 settings.languages_mut().insert(
2341 "Rust".into(),
2342 LanguageSettingsContent {
2343 enable_language_server: Some(true),
2344 ..Default::default()
2345 },
2346 );
2347 settings.languages_mut().insert(
2348 "JavaScript".into(),
2349 LanguageSettingsContent {
2350 enable_language_server: Some(false),
2351 ..Default::default()
2352 },
2353 );
2354 });
2355 })
2356 });
2357 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2358 assert_eq!(
2359 fake_rust_server_2
2360 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2361 .await
2362 .text_document
2363 .uri
2364 .as_str(),
2365 uri!("file:///dir/a.rs")
2366 );
2367 fake_js_server
2368 .receive_notification::<lsp::notification::Exit>()
2369 .await;
2370}
2371
2372#[gpui::test(iterations = 3)]
2373async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2374 init_test(cx);
2375
2376 let text = "
2377 fn a() { A }
2378 fn b() { BB }
2379 fn c() { CCC }
2380 "
2381 .unindent();
2382
2383 let fs = FakeFs::new(cx.executor());
2384 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2385
2386 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2387 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2388
2389 language_registry.add(rust_lang());
2390 let mut fake_servers = language_registry.register_fake_lsp(
2391 "Rust",
2392 FakeLspAdapter {
2393 disk_based_diagnostics_sources: vec!["disk".into()],
2394 ..Default::default()
2395 },
2396 );
2397
2398 let buffer = project
2399 .update(cx, |project, cx| {
2400 project.open_local_buffer(path!("/dir/a.rs"), cx)
2401 })
2402 .await
2403 .unwrap();
2404
2405 let _handle = project.update(cx, |project, cx| {
2406 project.register_buffer_with_language_servers(&buffer, cx)
2407 });
2408
2409 let mut fake_server = fake_servers.next().await.unwrap();
2410 let open_notification = fake_server
2411 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2412 .await;
2413
2414 // Edit the buffer, moving the content down
2415 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2416 let change_notification_1 = fake_server
2417 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2418 .await;
2419 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2420
2421 // Report some diagnostics for the initial version of the buffer
2422 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2423 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2424 version: Some(open_notification.text_document.version),
2425 diagnostics: vec![
2426 lsp::Diagnostic {
2427 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2428 severity: Some(DiagnosticSeverity::ERROR),
2429 message: "undefined variable 'A'".to_string(),
2430 source: Some("disk".to_string()),
2431 ..Default::default()
2432 },
2433 lsp::Diagnostic {
2434 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2435 severity: Some(DiagnosticSeverity::ERROR),
2436 message: "undefined variable 'BB'".to_string(),
2437 source: Some("disk".to_string()),
2438 ..Default::default()
2439 },
2440 lsp::Diagnostic {
2441 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2442 severity: Some(DiagnosticSeverity::ERROR),
2443 source: Some("disk".to_string()),
2444 message: "undefined variable 'CCC'".to_string(),
2445 ..Default::default()
2446 },
2447 ],
2448 });
2449
2450 // The diagnostics have moved down since they were created.
2451 cx.executor().run_until_parked();
2452 buffer.update(cx, |buffer, _| {
2453 assert_eq!(
2454 buffer
2455 .snapshot()
2456 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2457 .collect::<Vec<_>>(),
2458 &[
2459 DiagnosticEntry {
2460 range: Point::new(3, 9)..Point::new(3, 11),
2461 diagnostic: Diagnostic {
2462 source: Some("disk".into()),
2463 severity: DiagnosticSeverity::ERROR,
2464 message: "undefined variable 'BB'".to_string(),
2465 is_disk_based: true,
2466 group_id: 1,
2467 is_primary: true,
2468 source_kind: DiagnosticSourceKind::Pushed,
2469 ..Diagnostic::default()
2470 },
2471 },
2472 DiagnosticEntry {
2473 range: Point::new(4, 9)..Point::new(4, 12),
2474 diagnostic: Diagnostic {
2475 source: Some("disk".into()),
2476 severity: DiagnosticSeverity::ERROR,
2477 message: "undefined variable 'CCC'".to_string(),
2478 is_disk_based: true,
2479 group_id: 2,
2480 is_primary: true,
2481 source_kind: DiagnosticSourceKind::Pushed,
2482 ..Diagnostic::default()
2483 }
2484 }
2485 ]
2486 );
2487 assert_eq!(
2488 chunks_with_diagnostics(buffer, 0..buffer.len()),
2489 [
2490 ("\n\nfn a() { ".to_string(), None),
2491 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2492 (" }\nfn b() { ".to_string(), None),
2493 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2494 (" }\nfn c() { ".to_string(), None),
2495 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2496 (" }\n".to_string(), None),
2497 ]
2498 );
2499 assert_eq!(
2500 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2501 [
2502 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2503 (" }\nfn c() { ".to_string(), None),
2504 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2505 ]
2506 );
2507 });
2508
2509 // Ensure overlapping diagnostics are highlighted correctly.
2510 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2511 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2512 version: Some(open_notification.text_document.version),
2513 diagnostics: vec![
2514 lsp::Diagnostic {
2515 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2516 severity: Some(DiagnosticSeverity::ERROR),
2517 message: "undefined variable 'A'".to_string(),
2518 source: Some("disk".to_string()),
2519 ..Default::default()
2520 },
2521 lsp::Diagnostic {
2522 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2523 severity: Some(DiagnosticSeverity::WARNING),
2524 message: "unreachable statement".to_string(),
2525 source: Some("disk".to_string()),
2526 ..Default::default()
2527 },
2528 ],
2529 });
2530
2531 cx.executor().run_until_parked();
2532 buffer.update(cx, |buffer, _| {
2533 assert_eq!(
2534 buffer
2535 .snapshot()
2536 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2537 .collect::<Vec<_>>(),
2538 &[
2539 DiagnosticEntry {
2540 range: Point::new(2, 9)..Point::new(2, 12),
2541 diagnostic: Diagnostic {
2542 source: Some("disk".into()),
2543 severity: DiagnosticSeverity::WARNING,
2544 message: "unreachable statement".to_string(),
2545 is_disk_based: true,
2546 group_id: 4,
2547 is_primary: true,
2548 source_kind: DiagnosticSourceKind::Pushed,
2549 ..Diagnostic::default()
2550 }
2551 },
2552 DiagnosticEntry {
2553 range: Point::new(2, 9)..Point::new(2, 10),
2554 diagnostic: Diagnostic {
2555 source: Some("disk".into()),
2556 severity: DiagnosticSeverity::ERROR,
2557 message: "undefined variable 'A'".to_string(),
2558 is_disk_based: true,
2559 group_id: 3,
2560 is_primary: true,
2561 source_kind: DiagnosticSourceKind::Pushed,
2562 ..Diagnostic::default()
2563 },
2564 }
2565 ]
2566 );
2567 assert_eq!(
2568 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2569 [
2570 ("fn a() { ".to_string(), None),
2571 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2572 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2573 ("\n".to_string(), None),
2574 ]
2575 );
2576 assert_eq!(
2577 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2578 [
2579 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2580 ("\n".to_string(), None),
2581 ]
2582 );
2583 });
2584
2585 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2586 // changes since the last save.
2587 buffer.update(cx, |buffer, cx| {
2588 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2589 buffer.edit(
2590 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2591 None,
2592 cx,
2593 );
2594 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2595 });
2596 let change_notification_2 = fake_server
2597 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2598 .await;
2599 assert!(
2600 change_notification_2.text_document.version > change_notification_1.text_document.version
2601 );
2602
2603 // Handle out-of-order diagnostics
2604 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2605 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2606 version: Some(change_notification_2.text_document.version),
2607 diagnostics: vec![
2608 lsp::Diagnostic {
2609 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2610 severity: Some(DiagnosticSeverity::ERROR),
2611 message: "undefined variable 'BB'".to_string(),
2612 source: Some("disk".to_string()),
2613 ..Default::default()
2614 },
2615 lsp::Diagnostic {
2616 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2617 severity: Some(DiagnosticSeverity::WARNING),
2618 message: "undefined variable 'A'".to_string(),
2619 source: Some("disk".to_string()),
2620 ..Default::default()
2621 },
2622 ],
2623 });
2624
2625 cx.executor().run_until_parked();
2626 buffer.update(cx, |buffer, _| {
2627 assert_eq!(
2628 buffer
2629 .snapshot()
2630 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2631 .collect::<Vec<_>>(),
2632 &[
2633 DiagnosticEntry {
2634 range: Point::new(2, 21)..Point::new(2, 22),
2635 diagnostic: Diagnostic {
2636 source: Some("disk".into()),
2637 severity: DiagnosticSeverity::WARNING,
2638 message: "undefined variable 'A'".to_string(),
2639 is_disk_based: true,
2640 group_id: 6,
2641 is_primary: true,
2642 source_kind: DiagnosticSourceKind::Pushed,
2643 ..Diagnostic::default()
2644 }
2645 },
2646 DiagnosticEntry {
2647 range: Point::new(3, 9)..Point::new(3, 14),
2648 diagnostic: Diagnostic {
2649 source: Some("disk".into()),
2650 severity: DiagnosticSeverity::ERROR,
2651 message: "undefined variable 'BB'".to_string(),
2652 is_disk_based: true,
2653 group_id: 5,
2654 is_primary: true,
2655 source_kind: DiagnosticSourceKind::Pushed,
2656 ..Diagnostic::default()
2657 },
2658 }
2659 ]
2660 );
2661 });
2662}
2663
2664#[gpui::test]
2665async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2666 init_test(cx);
2667
2668 let text = concat!(
2669 "let one = ;\n", //
2670 "let two = \n",
2671 "let three = 3;\n",
2672 );
2673
2674 let fs = FakeFs::new(cx.executor());
2675 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2676
2677 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2678 let buffer = project
2679 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2680 .await
2681 .unwrap();
2682
2683 project.update(cx, |project, cx| {
2684 project.lsp_store.update(cx, |lsp_store, cx| {
2685 lsp_store
2686 .update_diagnostic_entries(
2687 LanguageServerId(0),
2688 PathBuf::from("/dir/a.rs"),
2689 None,
2690 None,
2691 vec![
2692 DiagnosticEntry {
2693 range: Unclipped(PointUtf16::new(0, 10))
2694 ..Unclipped(PointUtf16::new(0, 10)),
2695 diagnostic: Diagnostic {
2696 severity: DiagnosticSeverity::ERROR,
2697 message: "syntax error 1".to_string(),
2698 source_kind: DiagnosticSourceKind::Pushed,
2699 ..Diagnostic::default()
2700 },
2701 },
2702 DiagnosticEntry {
2703 range: Unclipped(PointUtf16::new(1, 10))
2704 ..Unclipped(PointUtf16::new(1, 10)),
2705 diagnostic: Diagnostic {
2706 severity: DiagnosticSeverity::ERROR,
2707 message: "syntax error 2".to_string(),
2708 source_kind: DiagnosticSourceKind::Pushed,
2709 ..Diagnostic::default()
2710 },
2711 },
2712 ],
2713 cx,
2714 )
2715 .unwrap();
2716 })
2717 });
2718
2719 // An empty range is extended forward to include the following character.
2720 // At the end of a line, an empty range is extended backward to include
2721 // the preceding character.
2722 buffer.update(cx, |buffer, _| {
2723 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2724 assert_eq!(
2725 chunks
2726 .iter()
2727 .map(|(s, d)| (s.as_str(), *d))
2728 .collect::<Vec<_>>(),
2729 &[
2730 ("let one = ", None),
2731 (";", Some(DiagnosticSeverity::ERROR)),
2732 ("\nlet two =", None),
2733 (" ", Some(DiagnosticSeverity::ERROR)),
2734 ("\nlet three = 3;\n", None)
2735 ]
2736 );
2737 });
2738}
2739
2740#[gpui::test]
2741async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2742 init_test(cx);
2743
2744 let fs = FakeFs::new(cx.executor());
2745 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2746 .await;
2747
2748 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2749 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2750
2751 lsp_store.update(cx, |lsp_store, cx| {
2752 lsp_store
2753 .update_diagnostic_entries(
2754 LanguageServerId(0),
2755 Path::new("/dir/a.rs").to_owned(),
2756 None,
2757 None,
2758 vec![DiagnosticEntry {
2759 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2760 diagnostic: Diagnostic {
2761 severity: DiagnosticSeverity::ERROR,
2762 is_primary: true,
2763 message: "syntax error a1".to_string(),
2764 source_kind: DiagnosticSourceKind::Pushed,
2765 ..Diagnostic::default()
2766 },
2767 }],
2768 cx,
2769 )
2770 .unwrap();
2771 lsp_store
2772 .update_diagnostic_entries(
2773 LanguageServerId(1),
2774 Path::new("/dir/a.rs").to_owned(),
2775 None,
2776 None,
2777 vec![DiagnosticEntry {
2778 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2779 diagnostic: Diagnostic {
2780 severity: DiagnosticSeverity::ERROR,
2781 is_primary: true,
2782 message: "syntax error b1".to_string(),
2783 source_kind: DiagnosticSourceKind::Pushed,
2784 ..Diagnostic::default()
2785 },
2786 }],
2787 cx,
2788 )
2789 .unwrap();
2790
2791 assert_eq!(
2792 lsp_store.diagnostic_summary(false, cx),
2793 DiagnosticSummary {
2794 error_count: 2,
2795 warning_count: 0,
2796 }
2797 );
2798 });
2799}
2800
2801#[gpui::test]
2802async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2803 init_test(cx);
2804
2805 let text = "
2806 fn a() {
2807 f1();
2808 }
2809 fn b() {
2810 f2();
2811 }
2812 fn c() {
2813 f3();
2814 }
2815 "
2816 .unindent();
2817
2818 let fs = FakeFs::new(cx.executor());
2819 fs.insert_tree(
2820 path!("/dir"),
2821 json!({
2822 "a.rs": text.clone(),
2823 }),
2824 )
2825 .await;
2826
2827 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2828 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2829
2830 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2831 language_registry.add(rust_lang());
2832 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2833
2834 let (buffer, _handle) = project
2835 .update(cx, |project, cx| {
2836 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2837 })
2838 .await
2839 .unwrap();
2840
2841 let mut fake_server = fake_servers.next().await.unwrap();
2842 let lsp_document_version = fake_server
2843 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2844 .await
2845 .text_document
2846 .version;
2847
2848 // Simulate editing the buffer after the language server computes some edits.
2849 buffer.update(cx, |buffer, cx| {
2850 buffer.edit(
2851 [(
2852 Point::new(0, 0)..Point::new(0, 0),
2853 "// above first function\n",
2854 )],
2855 None,
2856 cx,
2857 );
2858 buffer.edit(
2859 [(
2860 Point::new(2, 0)..Point::new(2, 0),
2861 " // inside first function\n",
2862 )],
2863 None,
2864 cx,
2865 );
2866 buffer.edit(
2867 [(
2868 Point::new(6, 4)..Point::new(6, 4),
2869 "// inside second function ",
2870 )],
2871 None,
2872 cx,
2873 );
2874
2875 assert_eq!(
2876 buffer.text(),
2877 "
2878 // above first function
2879 fn a() {
2880 // inside first function
2881 f1();
2882 }
2883 fn b() {
2884 // inside second function f2();
2885 }
2886 fn c() {
2887 f3();
2888 }
2889 "
2890 .unindent()
2891 );
2892 });
2893
2894 let edits = lsp_store
2895 .update(cx, |lsp_store, cx| {
2896 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2897 &buffer,
2898 vec![
2899 // replace body of first function
2900 lsp::TextEdit {
2901 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2902 new_text: "
2903 fn a() {
2904 f10();
2905 }
2906 "
2907 .unindent(),
2908 },
2909 // edit inside second function
2910 lsp::TextEdit {
2911 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2912 new_text: "00".into(),
2913 },
2914 // edit inside third function via two distinct edits
2915 lsp::TextEdit {
2916 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2917 new_text: "4000".into(),
2918 },
2919 lsp::TextEdit {
2920 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2921 new_text: "".into(),
2922 },
2923 ],
2924 LanguageServerId(0),
2925 Some(lsp_document_version),
2926 cx,
2927 )
2928 })
2929 .await
2930 .unwrap();
2931
2932 buffer.update(cx, |buffer, cx| {
2933 for (range, new_text) in edits {
2934 buffer.edit([(range, new_text)], None, cx);
2935 }
2936 assert_eq!(
2937 buffer.text(),
2938 "
2939 // above first function
2940 fn a() {
2941 // inside first function
2942 f10();
2943 }
2944 fn b() {
2945 // inside second function f200();
2946 }
2947 fn c() {
2948 f4000();
2949 }
2950 "
2951 .unindent()
2952 );
2953 });
2954}
2955
2956#[gpui::test]
2957async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2958 init_test(cx);
2959
2960 let text = "
2961 use a::b;
2962 use a::c;
2963
2964 fn f() {
2965 b();
2966 c();
2967 }
2968 "
2969 .unindent();
2970
2971 let fs = FakeFs::new(cx.executor());
2972 fs.insert_tree(
2973 path!("/dir"),
2974 json!({
2975 "a.rs": text.clone(),
2976 }),
2977 )
2978 .await;
2979
2980 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2981 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2982 let buffer = project
2983 .update(cx, |project, cx| {
2984 project.open_local_buffer(path!("/dir/a.rs"), cx)
2985 })
2986 .await
2987 .unwrap();
2988
2989 // Simulate the language server sending us a small edit in the form of a very large diff.
2990 // Rust-analyzer does this when performing a merge-imports code action.
2991 let edits = lsp_store
2992 .update(cx, |lsp_store, cx| {
2993 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2994 &buffer,
2995 [
2996 // Replace the first use statement without editing the semicolon.
2997 lsp::TextEdit {
2998 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2999 new_text: "a::{b, c}".into(),
3000 },
3001 // Reinsert the remainder of the file between the semicolon and the final
3002 // newline of the file.
3003 lsp::TextEdit {
3004 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3005 new_text: "\n\n".into(),
3006 },
3007 lsp::TextEdit {
3008 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3009 new_text: "
3010 fn f() {
3011 b();
3012 c();
3013 }"
3014 .unindent(),
3015 },
3016 // Delete everything after the first newline of the file.
3017 lsp::TextEdit {
3018 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
3019 new_text: "".into(),
3020 },
3021 ],
3022 LanguageServerId(0),
3023 None,
3024 cx,
3025 )
3026 })
3027 .await
3028 .unwrap();
3029
3030 buffer.update(cx, |buffer, cx| {
3031 let edits = edits
3032 .into_iter()
3033 .map(|(range, text)| {
3034 (
3035 range.start.to_point(buffer)..range.end.to_point(buffer),
3036 text,
3037 )
3038 })
3039 .collect::<Vec<_>>();
3040
3041 assert_eq!(
3042 edits,
3043 [
3044 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3045 (Point::new(1, 0)..Point::new(2, 0), "".into())
3046 ]
3047 );
3048
3049 for (range, new_text) in edits {
3050 buffer.edit([(range, new_text)], None, cx);
3051 }
3052 assert_eq!(
3053 buffer.text(),
3054 "
3055 use a::{b, c};
3056
3057 fn f() {
3058 b();
3059 c();
3060 }
3061 "
3062 .unindent()
3063 );
3064 });
3065}
3066
3067#[gpui::test]
3068async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3069 cx: &mut gpui::TestAppContext,
3070) {
3071 init_test(cx);
3072
3073 let text = "Path()";
3074
3075 let fs = FakeFs::new(cx.executor());
3076 fs.insert_tree(
3077 path!("/dir"),
3078 json!({
3079 "a.rs": text
3080 }),
3081 )
3082 .await;
3083
3084 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3085 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3086 let buffer = project
3087 .update(cx, |project, cx| {
3088 project.open_local_buffer(path!("/dir/a.rs"), cx)
3089 })
3090 .await
3091 .unwrap();
3092
3093 // Simulate the language server sending us a pair of edits at the same location,
3094 // with an insertion following a replacement (which violates the LSP spec).
3095 let edits = lsp_store
3096 .update(cx, |lsp_store, cx| {
3097 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3098 &buffer,
3099 [
3100 lsp::TextEdit {
3101 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3102 new_text: "Path".into(),
3103 },
3104 lsp::TextEdit {
3105 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3106 new_text: "from path import Path\n\n\n".into(),
3107 },
3108 ],
3109 LanguageServerId(0),
3110 None,
3111 cx,
3112 )
3113 })
3114 .await
3115 .unwrap();
3116
3117 buffer.update(cx, |buffer, cx| {
3118 buffer.edit(edits, None, cx);
3119 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3120 });
3121}
3122
3123#[gpui::test]
3124async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3125 init_test(cx);
3126
3127 let text = "
3128 use a::b;
3129 use a::c;
3130
3131 fn f() {
3132 b();
3133 c();
3134 }
3135 "
3136 .unindent();
3137
3138 let fs = FakeFs::new(cx.executor());
3139 fs.insert_tree(
3140 path!("/dir"),
3141 json!({
3142 "a.rs": text.clone(),
3143 }),
3144 )
3145 .await;
3146
3147 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3148 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3149 let buffer = project
3150 .update(cx, |project, cx| {
3151 project.open_local_buffer(path!("/dir/a.rs"), cx)
3152 })
3153 .await
3154 .unwrap();
3155
3156 // Simulate the language server sending us edits in a non-ordered fashion,
3157 // with ranges sometimes being inverted or pointing to invalid locations.
3158 let edits = lsp_store
3159 .update(cx, |lsp_store, cx| {
3160 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3161 &buffer,
3162 [
3163 lsp::TextEdit {
3164 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3165 new_text: "\n\n".into(),
3166 },
3167 lsp::TextEdit {
3168 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3169 new_text: "a::{b, c}".into(),
3170 },
3171 lsp::TextEdit {
3172 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3173 new_text: "".into(),
3174 },
3175 lsp::TextEdit {
3176 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3177 new_text: "
3178 fn f() {
3179 b();
3180 c();
3181 }"
3182 .unindent(),
3183 },
3184 ],
3185 LanguageServerId(0),
3186 None,
3187 cx,
3188 )
3189 })
3190 .await
3191 .unwrap();
3192
3193 buffer.update(cx, |buffer, cx| {
3194 let edits = edits
3195 .into_iter()
3196 .map(|(range, text)| {
3197 (
3198 range.start.to_point(buffer)..range.end.to_point(buffer),
3199 text,
3200 )
3201 })
3202 .collect::<Vec<_>>();
3203
3204 assert_eq!(
3205 edits,
3206 [
3207 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3208 (Point::new(1, 0)..Point::new(2, 0), "".into())
3209 ]
3210 );
3211
3212 for (range, new_text) in edits {
3213 buffer.edit([(range, new_text)], None, cx);
3214 }
3215 assert_eq!(
3216 buffer.text(),
3217 "
3218 use a::{b, c};
3219
3220 fn f() {
3221 b();
3222 c();
3223 }
3224 "
3225 .unindent()
3226 );
3227 });
3228}
3229
3230fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3231 buffer: &Buffer,
3232 range: Range<T>,
3233) -> Vec<(String, Option<DiagnosticSeverity>)> {
3234 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3235 for chunk in buffer.snapshot().chunks(range, true) {
3236 if chunks
3237 .last()
3238 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3239 {
3240 chunks.last_mut().unwrap().0.push_str(chunk.text);
3241 } else {
3242 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3243 }
3244 }
3245 chunks
3246}
3247
3248#[gpui::test(iterations = 10)]
3249async fn test_definition(cx: &mut gpui::TestAppContext) {
3250 init_test(cx);
3251
3252 let fs = FakeFs::new(cx.executor());
3253 fs.insert_tree(
3254 path!("/dir"),
3255 json!({
3256 "a.rs": "const fn a() { A }",
3257 "b.rs": "const y: i32 = crate::a()",
3258 }),
3259 )
3260 .await;
3261
3262 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3263
3264 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3265 language_registry.add(rust_lang());
3266 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3267
3268 let (buffer, _handle) = project
3269 .update(cx, |project, cx| {
3270 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3271 })
3272 .await
3273 .unwrap();
3274
3275 let fake_server = fake_servers.next().await.unwrap();
3276 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3277 let params = params.text_document_position_params;
3278 assert_eq!(
3279 params.text_document.uri.to_file_path().unwrap(),
3280 Path::new(path!("/dir/b.rs")),
3281 );
3282 assert_eq!(params.position, lsp::Position::new(0, 22));
3283
3284 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3285 lsp::Location::new(
3286 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3287 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3288 ),
3289 )))
3290 });
3291 let mut definitions = project
3292 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3293 .await
3294 .unwrap()
3295 .unwrap();
3296
3297 // Assert no new language server started
3298 cx.executor().run_until_parked();
3299 assert!(fake_servers.try_next().is_err());
3300
3301 assert_eq!(definitions.len(), 1);
3302 let definition = definitions.pop().unwrap();
3303 cx.update(|cx| {
3304 let target_buffer = definition.target.buffer.read(cx);
3305 assert_eq!(
3306 target_buffer
3307 .file()
3308 .unwrap()
3309 .as_local()
3310 .unwrap()
3311 .abs_path(cx),
3312 Path::new(path!("/dir/a.rs")),
3313 );
3314 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3315 assert_eq!(
3316 list_worktrees(&project, cx),
3317 [
3318 (path!("/dir/a.rs").as_ref(), false),
3319 (path!("/dir/b.rs").as_ref(), true)
3320 ],
3321 );
3322
3323 drop(definition);
3324 });
3325 cx.update(|cx| {
3326 assert_eq!(
3327 list_worktrees(&project, cx),
3328 [(path!("/dir/b.rs").as_ref(), true)]
3329 );
3330 });
3331
3332 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3333 project
3334 .read(cx)
3335 .worktrees(cx)
3336 .map(|worktree| {
3337 let worktree = worktree.read(cx);
3338 (
3339 worktree.as_local().unwrap().abs_path().as_ref(),
3340 worktree.is_visible(),
3341 )
3342 })
3343 .collect::<Vec<_>>()
3344 }
3345}
3346
3347#[gpui::test]
3348async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3349 init_test(cx);
3350
3351 let fs = FakeFs::new(cx.executor());
3352 fs.insert_tree(
3353 path!("/dir"),
3354 json!({
3355 "a.ts": "",
3356 }),
3357 )
3358 .await;
3359
3360 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3361
3362 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3363 language_registry.add(typescript_lang());
3364 let mut fake_language_servers = language_registry.register_fake_lsp(
3365 "TypeScript",
3366 FakeLspAdapter {
3367 capabilities: lsp::ServerCapabilities {
3368 completion_provider: Some(lsp::CompletionOptions {
3369 trigger_characters: Some(vec![".".to_string()]),
3370 ..Default::default()
3371 }),
3372 ..Default::default()
3373 },
3374 ..Default::default()
3375 },
3376 );
3377
3378 let (buffer, _handle) = project
3379 .update(cx, |p, cx| {
3380 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3381 })
3382 .await
3383 .unwrap();
3384
3385 let fake_server = fake_language_servers.next().await.unwrap();
3386
3387 // When text_edit exists, it takes precedence over insert_text and label
3388 let text = "let a = obj.fqn";
3389 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3390 let completions = project.update(cx, |project, cx| {
3391 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3392 });
3393
3394 fake_server
3395 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3396 Ok(Some(lsp::CompletionResponse::Array(vec![
3397 lsp::CompletionItem {
3398 label: "labelText".into(),
3399 insert_text: Some("insertText".into()),
3400 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3401 range: lsp::Range::new(
3402 lsp::Position::new(0, text.len() as u32 - 3),
3403 lsp::Position::new(0, text.len() as u32),
3404 ),
3405 new_text: "textEditText".into(),
3406 })),
3407 ..Default::default()
3408 },
3409 ])))
3410 })
3411 .next()
3412 .await;
3413
3414 let completions = completions
3415 .await
3416 .unwrap()
3417 .into_iter()
3418 .flat_map(|response| response.completions)
3419 .collect::<Vec<_>>();
3420 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3421
3422 assert_eq!(completions.len(), 1);
3423 assert_eq!(completions[0].new_text, "textEditText");
3424 assert_eq!(
3425 completions[0].replace_range.to_offset(&snapshot),
3426 text.len() - 3..text.len()
3427 );
3428}
3429
3430#[gpui::test]
3431async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3432 init_test(cx);
3433
3434 let fs = FakeFs::new(cx.executor());
3435 fs.insert_tree(
3436 path!("/dir"),
3437 json!({
3438 "a.ts": "",
3439 }),
3440 )
3441 .await;
3442
3443 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3444
3445 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3446 language_registry.add(typescript_lang());
3447 let mut fake_language_servers = language_registry.register_fake_lsp(
3448 "TypeScript",
3449 FakeLspAdapter {
3450 capabilities: lsp::ServerCapabilities {
3451 completion_provider: Some(lsp::CompletionOptions {
3452 trigger_characters: Some(vec![".".to_string()]),
3453 ..Default::default()
3454 }),
3455 ..Default::default()
3456 },
3457 ..Default::default()
3458 },
3459 );
3460
3461 let (buffer, _handle) = project
3462 .update(cx, |p, cx| {
3463 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3464 })
3465 .await
3466 .unwrap();
3467
3468 let fake_server = fake_language_servers.next().await.unwrap();
3469 let text = "let a = obj.fqn";
3470
3471 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3472 {
3473 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3474 let completions = project.update(cx, |project, cx| {
3475 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3476 });
3477
3478 fake_server
3479 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3480 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3481 is_incomplete: false,
3482 item_defaults: Some(lsp::CompletionListItemDefaults {
3483 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3484 lsp::Range::new(
3485 lsp::Position::new(0, text.len() as u32 - 3),
3486 lsp::Position::new(0, text.len() as u32),
3487 ),
3488 )),
3489 ..Default::default()
3490 }),
3491 items: vec![lsp::CompletionItem {
3492 label: "labelText".into(),
3493 text_edit_text: Some("textEditText".into()),
3494 text_edit: None,
3495 ..Default::default()
3496 }],
3497 })))
3498 })
3499 .next()
3500 .await;
3501
3502 let completions = completions
3503 .await
3504 .unwrap()
3505 .into_iter()
3506 .flat_map(|response| response.completions)
3507 .collect::<Vec<_>>();
3508 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3509
3510 assert_eq!(completions.len(), 1);
3511 assert_eq!(completions[0].new_text, "textEditText");
3512 assert_eq!(
3513 completions[0].replace_range.to_offset(&snapshot),
3514 text.len() - 3..text.len()
3515 );
3516 }
3517
3518 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3519 {
3520 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3521 let completions = project.update(cx, |project, cx| {
3522 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3523 });
3524
3525 fake_server
3526 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3527 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3528 is_incomplete: false,
3529 item_defaults: Some(lsp::CompletionListItemDefaults {
3530 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3531 lsp::Range::new(
3532 lsp::Position::new(0, text.len() as u32 - 3),
3533 lsp::Position::new(0, text.len() as u32),
3534 ),
3535 )),
3536 ..Default::default()
3537 }),
3538 items: vec![lsp::CompletionItem {
3539 label: "labelText".into(),
3540 text_edit_text: None,
3541 insert_text: Some("irrelevant".into()),
3542 text_edit: None,
3543 ..Default::default()
3544 }],
3545 })))
3546 })
3547 .next()
3548 .await;
3549
3550 let completions = completions
3551 .await
3552 .unwrap()
3553 .into_iter()
3554 .flat_map(|response| response.completions)
3555 .collect::<Vec<_>>();
3556 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3557
3558 assert_eq!(completions.len(), 1);
3559 assert_eq!(completions[0].new_text, "labelText");
3560 assert_eq!(
3561 completions[0].replace_range.to_offset(&snapshot),
3562 text.len() - 3..text.len()
3563 );
3564 }
3565}
3566
3567#[gpui::test]
3568async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3569 init_test(cx);
3570
3571 let fs = FakeFs::new(cx.executor());
3572 fs.insert_tree(
3573 path!("/dir"),
3574 json!({
3575 "a.ts": "",
3576 }),
3577 )
3578 .await;
3579
3580 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3581
3582 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3583 language_registry.add(typescript_lang());
3584 let mut fake_language_servers = language_registry.register_fake_lsp(
3585 "TypeScript",
3586 FakeLspAdapter {
3587 capabilities: lsp::ServerCapabilities {
3588 completion_provider: Some(lsp::CompletionOptions {
3589 trigger_characters: Some(vec![":".to_string()]),
3590 ..Default::default()
3591 }),
3592 ..Default::default()
3593 },
3594 ..Default::default()
3595 },
3596 );
3597
3598 let (buffer, _handle) = project
3599 .update(cx, |p, cx| {
3600 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3601 })
3602 .await
3603 .unwrap();
3604
3605 let fake_server = fake_language_servers.next().await.unwrap();
3606
3607 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3608 let text = "let a = b.fqn";
3609 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3610 let completions = project.update(cx, |project, cx| {
3611 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3612 });
3613
3614 fake_server
3615 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3616 Ok(Some(lsp::CompletionResponse::Array(vec![
3617 lsp::CompletionItem {
3618 label: "fullyQualifiedName?".into(),
3619 insert_text: Some("fullyQualifiedName".into()),
3620 ..Default::default()
3621 },
3622 ])))
3623 })
3624 .next()
3625 .await;
3626 let completions = completions
3627 .await
3628 .unwrap()
3629 .into_iter()
3630 .flat_map(|response| response.completions)
3631 .collect::<Vec<_>>();
3632 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3633 assert_eq!(completions.len(), 1);
3634 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3635 assert_eq!(
3636 completions[0].replace_range.to_offset(&snapshot),
3637 text.len() - 3..text.len()
3638 );
3639
3640 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3641 let text = "let a = \"atoms/cmp\"";
3642 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3643 let completions = project.update(cx, |project, cx| {
3644 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3645 });
3646
3647 fake_server
3648 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3649 Ok(Some(lsp::CompletionResponse::Array(vec![
3650 lsp::CompletionItem {
3651 label: "component".into(),
3652 ..Default::default()
3653 },
3654 ])))
3655 })
3656 .next()
3657 .await;
3658 let completions = completions
3659 .await
3660 .unwrap()
3661 .into_iter()
3662 .flat_map(|response| response.completions)
3663 .collect::<Vec<_>>();
3664 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3665 assert_eq!(completions.len(), 1);
3666 assert_eq!(completions[0].new_text, "component");
3667 assert_eq!(
3668 completions[0].replace_range.to_offset(&snapshot),
3669 text.len() - 4..text.len() - 1
3670 );
3671}
3672
3673#[gpui::test]
3674async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3675 init_test(cx);
3676
3677 let fs = FakeFs::new(cx.executor());
3678 fs.insert_tree(
3679 path!("/dir"),
3680 json!({
3681 "a.ts": "",
3682 }),
3683 )
3684 .await;
3685
3686 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3687
3688 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3689 language_registry.add(typescript_lang());
3690 let mut fake_language_servers = language_registry.register_fake_lsp(
3691 "TypeScript",
3692 FakeLspAdapter {
3693 capabilities: lsp::ServerCapabilities {
3694 completion_provider: Some(lsp::CompletionOptions {
3695 trigger_characters: Some(vec![":".to_string()]),
3696 ..Default::default()
3697 }),
3698 ..Default::default()
3699 },
3700 ..Default::default()
3701 },
3702 );
3703
3704 let (buffer, _handle) = project
3705 .update(cx, |p, cx| {
3706 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3707 })
3708 .await
3709 .unwrap();
3710
3711 let fake_server = fake_language_servers.next().await.unwrap();
3712
3713 let text = "let a = b.fqn";
3714 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3715 let completions = project.update(cx, |project, cx| {
3716 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3717 });
3718
3719 fake_server
3720 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3721 Ok(Some(lsp::CompletionResponse::Array(vec![
3722 lsp::CompletionItem {
3723 label: "fullyQualifiedName?".into(),
3724 insert_text: Some("fully\rQualified\r\nName".into()),
3725 ..Default::default()
3726 },
3727 ])))
3728 })
3729 .next()
3730 .await;
3731 let completions = completions
3732 .await
3733 .unwrap()
3734 .into_iter()
3735 .flat_map(|response| response.completions)
3736 .collect::<Vec<_>>();
3737 assert_eq!(completions.len(), 1);
3738 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3739}
3740
3741#[gpui::test(iterations = 10)]
3742async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3743 init_test(cx);
3744
3745 let fs = FakeFs::new(cx.executor());
3746 fs.insert_tree(
3747 path!("/dir"),
3748 json!({
3749 "a.ts": "a",
3750 }),
3751 )
3752 .await;
3753
3754 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3755
3756 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3757 language_registry.add(typescript_lang());
3758 let mut fake_language_servers = language_registry.register_fake_lsp(
3759 "TypeScript",
3760 FakeLspAdapter {
3761 capabilities: lsp::ServerCapabilities {
3762 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3763 lsp::CodeActionOptions {
3764 resolve_provider: Some(true),
3765 ..lsp::CodeActionOptions::default()
3766 },
3767 )),
3768 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3769 commands: vec!["_the/command".to_string()],
3770 ..lsp::ExecuteCommandOptions::default()
3771 }),
3772 ..lsp::ServerCapabilities::default()
3773 },
3774 ..FakeLspAdapter::default()
3775 },
3776 );
3777
3778 let (buffer, _handle) = project
3779 .update(cx, |p, cx| {
3780 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3781 })
3782 .await
3783 .unwrap();
3784
3785 let fake_server = fake_language_servers.next().await.unwrap();
3786
3787 // Language server returns code actions that contain commands, and not edits.
3788 let actions = project.update(cx, |project, cx| {
3789 project.code_actions(&buffer, 0..0, None, cx)
3790 });
3791 fake_server
3792 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3793 Ok(Some(vec![
3794 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3795 title: "The code action".into(),
3796 data: Some(serde_json::json!({
3797 "command": "_the/command",
3798 })),
3799 ..lsp::CodeAction::default()
3800 }),
3801 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3802 title: "two".into(),
3803 ..lsp::CodeAction::default()
3804 }),
3805 ]))
3806 })
3807 .next()
3808 .await;
3809
3810 let action = actions.await.unwrap().unwrap()[0].clone();
3811 let apply = project.update(cx, |project, cx| {
3812 project.apply_code_action(buffer.clone(), action, true, cx)
3813 });
3814
3815 // Resolving the code action does not populate its edits. In absence of
3816 // edits, we must execute the given command.
3817 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3818 |mut action, _| async move {
3819 if action.data.is_some() {
3820 action.command = Some(lsp::Command {
3821 title: "The command".into(),
3822 command: "_the/command".into(),
3823 arguments: Some(vec![json!("the-argument")]),
3824 });
3825 }
3826 Ok(action)
3827 },
3828 );
3829
3830 // While executing the command, the language server sends the editor
3831 // a `workspaceEdit` request.
3832 fake_server
3833 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3834 let fake = fake_server.clone();
3835 move |params, _| {
3836 assert_eq!(params.command, "_the/command");
3837 let fake = fake.clone();
3838 async move {
3839 fake.server
3840 .request::<lsp::request::ApplyWorkspaceEdit>(
3841 lsp::ApplyWorkspaceEditParams {
3842 label: None,
3843 edit: lsp::WorkspaceEdit {
3844 changes: Some(
3845 [(
3846 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3847 vec![lsp::TextEdit {
3848 range: lsp::Range::new(
3849 lsp::Position::new(0, 0),
3850 lsp::Position::new(0, 0),
3851 ),
3852 new_text: "X".into(),
3853 }],
3854 )]
3855 .into_iter()
3856 .collect(),
3857 ),
3858 ..Default::default()
3859 },
3860 },
3861 )
3862 .await
3863 .into_response()
3864 .unwrap();
3865 Ok(Some(json!(null)))
3866 }
3867 }
3868 })
3869 .next()
3870 .await;
3871
3872 // Applying the code action returns a project transaction containing the edits
3873 // sent by the language server in its `workspaceEdit` request.
3874 let transaction = apply.await.unwrap();
3875 assert!(transaction.0.contains_key(&buffer));
3876 buffer.update(cx, |buffer, cx| {
3877 assert_eq!(buffer.text(), "Xa");
3878 buffer.undo(cx);
3879 assert_eq!(buffer.text(), "a");
3880 });
3881}
3882
3883#[gpui::test]
3884async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3885 init_test(cx);
3886 let fs = FakeFs::new(cx.background_executor.clone());
3887 let expected_contents = "content";
3888 fs.as_fake()
3889 .insert_tree(
3890 "/root",
3891 json!({
3892 "test.txt": expected_contents
3893 }),
3894 )
3895 .await;
3896
3897 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3898
3899 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3900 let worktree = project.worktrees(cx).next().unwrap();
3901 let entry_id = worktree
3902 .read(cx)
3903 .entry_for_path(rel_path("test.txt"))
3904 .unwrap()
3905 .id;
3906 (worktree, entry_id)
3907 });
3908 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3909 let _result = project
3910 .update(cx, |project, cx| {
3911 project.rename_entry(
3912 entry_id,
3913 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3914 cx,
3915 )
3916 })
3917 .await
3918 .unwrap();
3919 worktree.read_with(cx, |worktree, _| {
3920 assert!(
3921 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3922 "Old file should have been removed"
3923 );
3924 assert!(
3925 worktree
3926 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3927 .is_some(),
3928 "Whole directory hierarchy and the new file should have been created"
3929 );
3930 });
3931 assert_eq!(
3932 worktree
3933 .update(cx, |worktree, cx| {
3934 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3935 })
3936 .await
3937 .unwrap()
3938 .text,
3939 expected_contents,
3940 "Moved file's contents should be preserved"
3941 );
3942
3943 let entry_id = worktree.read_with(cx, |worktree, _| {
3944 worktree
3945 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3946 .unwrap()
3947 .id
3948 });
3949
3950 let _result = project
3951 .update(cx, |project, cx| {
3952 project.rename_entry(
3953 entry_id,
3954 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3955 cx,
3956 )
3957 })
3958 .await
3959 .unwrap();
3960 worktree.read_with(cx, |worktree, _| {
3961 assert!(
3962 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3963 "First file should not reappear"
3964 );
3965 assert!(
3966 worktree
3967 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3968 .is_none(),
3969 "Old file should have been removed"
3970 );
3971 assert!(
3972 worktree
3973 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3974 .is_some(),
3975 "No error should have occurred after moving into existing directory"
3976 );
3977 });
3978 assert_eq!(
3979 worktree
3980 .update(cx, |worktree, cx| {
3981 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3982 })
3983 .await
3984 .unwrap()
3985 .text,
3986 expected_contents,
3987 "Moved file's contents should be preserved"
3988 );
3989}
3990
3991#[gpui::test(iterations = 10)]
3992async fn test_save_file(cx: &mut gpui::TestAppContext) {
3993 init_test(cx);
3994
3995 let fs = FakeFs::new(cx.executor());
3996 fs.insert_tree(
3997 path!("/dir"),
3998 json!({
3999 "file1": "the old contents",
4000 }),
4001 )
4002 .await;
4003
4004 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4005 let buffer = project
4006 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4007 .await
4008 .unwrap();
4009 buffer.update(cx, |buffer, cx| {
4010 assert_eq!(buffer.text(), "the old contents");
4011 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4012 });
4013
4014 project
4015 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4016 .await
4017 .unwrap();
4018
4019 let new_text = fs
4020 .load(Path::new(path!("/dir/file1")))
4021 .await
4022 .unwrap()
4023 .replace("\r\n", "\n");
4024 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4025}
4026
4027#[gpui::test(iterations = 10)]
4028async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
4029 // Issue: #24349
4030 init_test(cx);
4031
4032 let fs = FakeFs::new(cx.executor());
4033 fs.insert_tree(path!("/dir"), json!({})).await;
4034
4035 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4036 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4037
4038 language_registry.add(rust_lang());
4039 let mut fake_rust_servers = language_registry.register_fake_lsp(
4040 "Rust",
4041 FakeLspAdapter {
4042 name: "the-rust-language-server",
4043 capabilities: lsp::ServerCapabilities {
4044 completion_provider: Some(lsp::CompletionOptions {
4045 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
4046 ..Default::default()
4047 }),
4048 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
4049 lsp::TextDocumentSyncOptions {
4050 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
4051 ..Default::default()
4052 },
4053 )),
4054 ..Default::default()
4055 },
4056 ..Default::default()
4057 },
4058 );
4059
4060 let buffer = project
4061 .update(cx, |this, cx| this.create_buffer(false, cx))
4062 .unwrap()
4063 .await;
4064 project.update(cx, |this, cx| {
4065 this.register_buffer_with_language_servers(&buffer, cx);
4066 buffer.update(cx, |buffer, cx| {
4067 assert!(!this.has_language_servers_for(buffer, cx));
4068 })
4069 });
4070
4071 project
4072 .update(cx, |this, cx| {
4073 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4074 this.save_buffer_as(
4075 buffer.clone(),
4076 ProjectPath {
4077 worktree_id,
4078 path: rel_path("file.rs").into(),
4079 },
4080 cx,
4081 )
4082 })
4083 .await
4084 .unwrap();
4085 // A server is started up, and it is notified about Rust files.
4086 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4087 assert_eq!(
4088 fake_rust_server
4089 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4090 .await
4091 .text_document,
4092 lsp::TextDocumentItem {
4093 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4094 version: 0,
4095 text: "".to_string(),
4096 language_id: "rust".to_string(),
4097 }
4098 );
4099
4100 project.update(cx, |this, cx| {
4101 buffer.update(cx, |buffer, cx| {
4102 assert!(this.has_language_servers_for(buffer, cx));
4103 })
4104 });
4105}
4106
4107#[gpui::test(iterations = 30)]
4108async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4109 init_test(cx);
4110
4111 let fs = FakeFs::new(cx.executor());
4112 fs.insert_tree(
4113 path!("/dir"),
4114 json!({
4115 "file1": "the original contents",
4116 }),
4117 )
4118 .await;
4119
4120 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4121 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4122 let buffer = project
4123 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4124 .await
4125 .unwrap();
4126
4127 // Simulate buffer diffs being slow, so that they don't complete before
4128 // the next file change occurs.
4129 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4130
4131 // Change the buffer's file on disk, and then wait for the file change
4132 // to be detected by the worktree, so that the buffer starts reloading.
4133 fs.save(
4134 path!("/dir/file1").as_ref(),
4135 &"the first contents".into(),
4136 Default::default(),
4137 )
4138 .await
4139 .unwrap();
4140 worktree.next_event(cx).await;
4141
4142 // Change the buffer's file again. Depending on the random seed, the
4143 // previous file change may still be in progress.
4144 fs.save(
4145 path!("/dir/file1").as_ref(),
4146 &"the second contents".into(),
4147 Default::default(),
4148 )
4149 .await
4150 .unwrap();
4151 worktree.next_event(cx).await;
4152
4153 cx.executor().run_until_parked();
4154 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4155 buffer.read_with(cx, |buffer, _| {
4156 assert_eq!(buffer.text(), on_disk_text);
4157 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4158 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4159 });
4160}
4161
4162#[gpui::test(iterations = 30)]
4163async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4164 init_test(cx);
4165
4166 let fs = FakeFs::new(cx.executor());
4167 fs.insert_tree(
4168 path!("/dir"),
4169 json!({
4170 "file1": "the original contents",
4171 }),
4172 )
4173 .await;
4174
4175 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4176 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4177 let buffer = project
4178 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4179 .await
4180 .unwrap();
4181
4182 // Simulate buffer diffs being slow, so that they don't complete before
4183 // the next file change occurs.
4184 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4185
4186 // Change the buffer's file on disk, and then wait for the file change
4187 // to be detected by the worktree, so that the buffer starts reloading.
4188 fs.save(
4189 path!("/dir/file1").as_ref(),
4190 &"the first contents".into(),
4191 Default::default(),
4192 )
4193 .await
4194 .unwrap();
4195 worktree.next_event(cx).await;
4196
4197 cx.executor()
4198 .spawn(cx.executor().simulate_random_delay())
4199 .await;
4200
4201 // Perform a noop edit, causing the buffer's version to increase.
4202 buffer.update(cx, |buffer, cx| {
4203 buffer.edit([(0..0, " ")], None, cx);
4204 buffer.undo(cx);
4205 });
4206
4207 cx.executor().run_until_parked();
4208 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4209 buffer.read_with(cx, |buffer, _| {
4210 let buffer_text = buffer.text();
4211 if buffer_text == on_disk_text {
4212 assert!(
4213 !buffer.is_dirty() && !buffer.has_conflict(),
4214 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4215 );
4216 }
4217 // If the file change occurred while the buffer was processing the first
4218 // change, the buffer will be in a conflicting state.
4219 else {
4220 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4221 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4222 }
4223 });
4224}
4225
4226#[gpui::test]
4227async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4228 init_test(cx);
4229
4230 let fs = FakeFs::new(cx.executor());
4231 fs.insert_tree(
4232 path!("/dir"),
4233 json!({
4234 "file1": "the old contents",
4235 }),
4236 )
4237 .await;
4238
4239 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4240 let buffer = project
4241 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4242 .await
4243 .unwrap();
4244 buffer.update(cx, |buffer, cx| {
4245 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4246 });
4247
4248 project
4249 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4250 .await
4251 .unwrap();
4252
4253 let new_text = fs
4254 .load(Path::new(path!("/dir/file1")))
4255 .await
4256 .unwrap()
4257 .replace("\r\n", "\n");
4258 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4259}
4260
4261#[gpui::test]
4262async fn test_save_as(cx: &mut gpui::TestAppContext) {
4263 init_test(cx);
4264
4265 let fs = FakeFs::new(cx.executor());
4266 fs.insert_tree("/dir", json!({})).await;
4267
4268 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4269
4270 let languages = project.update(cx, |project, _| project.languages().clone());
4271 languages.add(rust_lang());
4272
4273 let buffer = project.update(cx, |project, cx| {
4274 project.create_local_buffer("", None, false, cx)
4275 });
4276 buffer.update(cx, |buffer, cx| {
4277 buffer.edit([(0..0, "abc")], None, cx);
4278 assert!(buffer.is_dirty());
4279 assert!(!buffer.has_conflict());
4280 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4281 });
4282 project
4283 .update(cx, |project, cx| {
4284 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4285 let path = ProjectPath {
4286 worktree_id,
4287 path: rel_path("file1.rs").into(),
4288 };
4289 project.save_buffer_as(buffer.clone(), path, cx)
4290 })
4291 .await
4292 .unwrap();
4293 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4294
4295 cx.executor().run_until_parked();
4296 buffer.update(cx, |buffer, cx| {
4297 assert_eq!(
4298 buffer.file().unwrap().full_path(cx),
4299 Path::new("dir/file1.rs")
4300 );
4301 assert!(!buffer.is_dirty());
4302 assert!(!buffer.has_conflict());
4303 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4304 });
4305
4306 let opened_buffer = project
4307 .update(cx, |project, cx| {
4308 project.open_local_buffer("/dir/file1.rs", cx)
4309 })
4310 .await
4311 .unwrap();
4312 assert_eq!(opened_buffer, buffer);
4313}
4314
4315#[gpui::test]
4316async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4317 init_test(cx);
4318
4319 let fs = FakeFs::new(cx.executor());
4320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4321
4322 fs.insert_tree(
4323 path!("/dir"),
4324 json!({
4325 "data_a.txt": "data about a"
4326 }),
4327 )
4328 .await;
4329
4330 let buffer = project
4331 .update(cx, |project, cx| {
4332 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4333 })
4334 .await
4335 .unwrap();
4336
4337 buffer.update(cx, |buffer, cx| {
4338 buffer.edit([(11..12, "b")], None, cx);
4339 });
4340
4341 // Save buffer's contents as a new file and confirm that the buffer's now
4342 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4343 // file associated with the buffer has now been updated to `data_b.txt`
4344 project
4345 .update(cx, |project, cx| {
4346 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4347 let new_path = ProjectPath {
4348 worktree_id,
4349 path: rel_path("data_b.txt").into(),
4350 };
4351
4352 project.save_buffer_as(buffer.clone(), new_path, cx)
4353 })
4354 .await
4355 .unwrap();
4356
4357 buffer.update(cx, |buffer, cx| {
4358 assert_eq!(
4359 buffer.file().unwrap().full_path(cx),
4360 Path::new("dir/data_b.txt")
4361 )
4362 });
4363
4364 // Open the original `data_a.txt` file, confirming that its contents are
4365 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4366 let original_buffer = project
4367 .update(cx, |project, cx| {
4368 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4369 })
4370 .await
4371 .unwrap();
4372
4373 original_buffer.update(cx, |buffer, cx| {
4374 assert_eq!(buffer.text(), "data about a");
4375 assert_eq!(
4376 buffer.file().unwrap().full_path(cx),
4377 Path::new("dir/data_a.txt")
4378 )
4379 });
4380}
4381
4382#[gpui::test(retries = 5)]
4383async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4384 use worktree::WorktreeModelHandle as _;
4385
4386 init_test(cx);
4387 cx.executor().allow_parking();
4388
4389 let dir = TempTree::new(json!({
4390 "a": {
4391 "file1": "",
4392 "file2": "",
4393 "file3": "",
4394 },
4395 "b": {
4396 "c": {
4397 "file4": "",
4398 "file5": "",
4399 }
4400 }
4401 }));
4402
4403 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4404
4405 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4406 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4407 async move { buffer.await.unwrap() }
4408 };
4409 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4410 project.update(cx, |project, cx| {
4411 let tree = project.worktrees(cx).next().unwrap();
4412 tree.read(cx)
4413 .entry_for_path(rel_path(path))
4414 .unwrap_or_else(|| panic!("no entry for path {}", path))
4415 .id
4416 })
4417 };
4418
4419 let buffer2 = buffer_for_path("a/file2", cx).await;
4420 let buffer3 = buffer_for_path("a/file3", cx).await;
4421 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4422 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4423
4424 let file2_id = id_for_path("a/file2", cx);
4425 let file3_id = id_for_path("a/file3", cx);
4426 let file4_id = id_for_path("b/c/file4", cx);
4427
4428 // Create a remote copy of this worktree.
4429 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4430 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4431
4432 let updates = Arc::new(Mutex::new(Vec::new()));
4433 tree.update(cx, |tree, cx| {
4434 let updates = updates.clone();
4435 tree.observe_updates(0, cx, move |update| {
4436 updates.lock().push(update);
4437 async { true }
4438 });
4439 });
4440
4441 let remote = cx.update(|cx| {
4442 Worktree::remote(
4443 0,
4444 ReplicaId::REMOTE_SERVER,
4445 metadata,
4446 project.read(cx).client().into(),
4447 project.read(cx).path_style(cx),
4448 cx,
4449 )
4450 });
4451
4452 cx.executor().run_until_parked();
4453
4454 cx.update(|cx| {
4455 assert!(!buffer2.read(cx).is_dirty());
4456 assert!(!buffer3.read(cx).is_dirty());
4457 assert!(!buffer4.read(cx).is_dirty());
4458 assert!(!buffer5.read(cx).is_dirty());
4459 });
4460
4461 // Rename and delete files and directories.
4462 tree.flush_fs_events(cx).await;
4463 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4464 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4465 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4466 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4467 tree.flush_fs_events(cx).await;
4468
4469 cx.update(|app| {
4470 assert_eq!(
4471 tree.read(app).paths().collect::<Vec<_>>(),
4472 vec![
4473 rel_path("a"),
4474 rel_path("a/file1"),
4475 rel_path("a/file2.new"),
4476 rel_path("b"),
4477 rel_path("d"),
4478 rel_path("d/file3"),
4479 rel_path("d/file4"),
4480 ]
4481 );
4482 });
4483
4484 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4485 assert_eq!(id_for_path("d/file3", cx), file3_id);
4486 assert_eq!(id_for_path("d/file4", cx), file4_id);
4487
4488 cx.update(|cx| {
4489 assert_eq!(
4490 buffer2.read(cx).file().unwrap().path().as_ref(),
4491 rel_path("a/file2.new")
4492 );
4493 assert_eq!(
4494 buffer3.read(cx).file().unwrap().path().as_ref(),
4495 rel_path("d/file3")
4496 );
4497 assert_eq!(
4498 buffer4.read(cx).file().unwrap().path().as_ref(),
4499 rel_path("d/file4")
4500 );
4501 assert_eq!(
4502 buffer5.read(cx).file().unwrap().path().as_ref(),
4503 rel_path("b/c/file5")
4504 );
4505
4506 assert_matches!(
4507 buffer2.read(cx).file().unwrap().disk_state(),
4508 DiskState::Present { .. }
4509 );
4510 assert_matches!(
4511 buffer3.read(cx).file().unwrap().disk_state(),
4512 DiskState::Present { .. }
4513 );
4514 assert_matches!(
4515 buffer4.read(cx).file().unwrap().disk_state(),
4516 DiskState::Present { .. }
4517 );
4518 assert_eq!(
4519 buffer5.read(cx).file().unwrap().disk_state(),
4520 DiskState::Deleted
4521 );
4522 });
4523
4524 // Update the remote worktree. Check that it becomes consistent with the
4525 // local worktree.
4526 cx.executor().run_until_parked();
4527
4528 remote.update(cx, |remote, _| {
4529 for update in updates.lock().drain(..) {
4530 remote.as_remote_mut().unwrap().update_from_remote(update);
4531 }
4532 });
4533 cx.executor().run_until_parked();
4534 remote.update(cx, |remote, _| {
4535 assert_eq!(
4536 remote.paths().collect::<Vec<_>>(),
4537 vec![
4538 rel_path("a"),
4539 rel_path("a/file1"),
4540 rel_path("a/file2.new"),
4541 rel_path("b"),
4542 rel_path("d"),
4543 rel_path("d/file3"),
4544 rel_path("d/file4"),
4545 ]
4546 );
4547 });
4548}
4549
4550#[gpui::test(iterations = 10)]
4551async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4552 init_test(cx);
4553
4554 let fs = FakeFs::new(cx.executor());
4555 fs.insert_tree(
4556 path!("/dir"),
4557 json!({
4558 "a": {
4559 "file1": "",
4560 }
4561 }),
4562 )
4563 .await;
4564
4565 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4566 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4567 let tree_id = tree.update(cx, |tree, _| tree.id());
4568
4569 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4570 project.update(cx, |project, cx| {
4571 let tree = project.worktrees(cx).next().unwrap();
4572 tree.read(cx)
4573 .entry_for_path(rel_path(path))
4574 .unwrap_or_else(|| panic!("no entry for path {}", path))
4575 .id
4576 })
4577 };
4578
4579 let dir_id = id_for_path("a", cx);
4580 let file_id = id_for_path("a/file1", cx);
4581 let buffer = project
4582 .update(cx, |p, cx| {
4583 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4584 })
4585 .await
4586 .unwrap();
4587 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4588
4589 project
4590 .update(cx, |project, cx| {
4591 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4592 })
4593 .unwrap()
4594 .await
4595 .into_included()
4596 .unwrap();
4597 cx.executor().run_until_parked();
4598
4599 assert_eq!(id_for_path("b", cx), dir_id);
4600 assert_eq!(id_for_path("b/file1", cx), file_id);
4601 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4602}
4603
4604#[gpui::test]
4605async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4606 init_test(cx);
4607
4608 let fs = FakeFs::new(cx.executor());
4609 fs.insert_tree(
4610 "/dir",
4611 json!({
4612 "a.txt": "a-contents",
4613 "b.txt": "b-contents",
4614 }),
4615 )
4616 .await;
4617
4618 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4619
4620 // Spawn multiple tasks to open paths, repeating some paths.
4621 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4622 (
4623 p.open_local_buffer("/dir/a.txt", cx),
4624 p.open_local_buffer("/dir/b.txt", cx),
4625 p.open_local_buffer("/dir/a.txt", cx),
4626 )
4627 });
4628
4629 let buffer_a_1 = buffer_a_1.await.unwrap();
4630 let buffer_a_2 = buffer_a_2.await.unwrap();
4631 let buffer_b = buffer_b.await.unwrap();
4632 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4633 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4634
4635 // There is only one buffer per path.
4636 let buffer_a_id = buffer_a_1.entity_id();
4637 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4638
4639 // Open the same path again while it is still open.
4640 drop(buffer_a_1);
4641 let buffer_a_3 = project
4642 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4643 .await
4644 .unwrap();
4645
4646 // There's still only one buffer per path.
4647 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4648}
4649
4650#[gpui::test]
4651async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4652 init_test(cx);
4653
4654 let fs = FakeFs::new(cx.executor());
4655 fs.insert_tree(
4656 path!("/dir"),
4657 json!({
4658 "file1": "abc",
4659 "file2": "def",
4660 "file3": "ghi",
4661 }),
4662 )
4663 .await;
4664
4665 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4666
4667 let buffer1 = project
4668 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4669 .await
4670 .unwrap();
4671 let events = Arc::new(Mutex::new(Vec::new()));
4672
4673 // initially, the buffer isn't dirty.
4674 buffer1.update(cx, |buffer, cx| {
4675 cx.subscribe(&buffer1, {
4676 let events = events.clone();
4677 move |_, _, event, _| match event {
4678 BufferEvent::Operation { .. } => {}
4679 _ => events.lock().push(event.clone()),
4680 }
4681 })
4682 .detach();
4683
4684 assert!(!buffer.is_dirty());
4685 assert!(events.lock().is_empty());
4686
4687 buffer.edit([(1..2, "")], None, cx);
4688 });
4689
4690 // after the first edit, the buffer is dirty, and emits a dirtied event.
4691 buffer1.update(cx, |buffer, cx| {
4692 assert!(buffer.text() == "ac");
4693 assert!(buffer.is_dirty());
4694 assert_eq!(
4695 *events.lock(),
4696 &[
4697 language::BufferEvent::Edited,
4698 language::BufferEvent::DirtyChanged
4699 ]
4700 );
4701 events.lock().clear();
4702 buffer.did_save(
4703 buffer.version(),
4704 buffer.file().unwrap().disk_state().mtime(),
4705 cx,
4706 );
4707 });
4708
4709 // after saving, the buffer is not dirty, and emits a saved event.
4710 buffer1.update(cx, |buffer, cx| {
4711 assert!(!buffer.is_dirty());
4712 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4713 events.lock().clear();
4714
4715 buffer.edit([(1..1, "B")], None, cx);
4716 buffer.edit([(2..2, "D")], None, cx);
4717 });
4718
4719 // after editing again, the buffer is dirty, and emits another dirty event.
4720 buffer1.update(cx, |buffer, cx| {
4721 assert!(buffer.text() == "aBDc");
4722 assert!(buffer.is_dirty());
4723 assert_eq!(
4724 *events.lock(),
4725 &[
4726 language::BufferEvent::Edited,
4727 language::BufferEvent::DirtyChanged,
4728 language::BufferEvent::Edited,
4729 ],
4730 );
4731 events.lock().clear();
4732
4733 // After restoring the buffer to its previously-saved state,
4734 // the buffer is not considered dirty anymore.
4735 buffer.edit([(1..3, "")], None, cx);
4736 assert!(buffer.text() == "ac");
4737 assert!(!buffer.is_dirty());
4738 });
4739
4740 assert_eq!(
4741 *events.lock(),
4742 &[
4743 language::BufferEvent::Edited,
4744 language::BufferEvent::DirtyChanged
4745 ]
4746 );
4747
4748 // When a file is deleted, it is not considered dirty.
4749 let events = Arc::new(Mutex::new(Vec::new()));
4750 let buffer2 = project
4751 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4752 .await
4753 .unwrap();
4754 buffer2.update(cx, |_, cx| {
4755 cx.subscribe(&buffer2, {
4756 let events = events.clone();
4757 move |_, _, event, _| match event {
4758 BufferEvent::Operation { .. } => {}
4759 _ => events.lock().push(event.clone()),
4760 }
4761 })
4762 .detach();
4763 });
4764
4765 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4766 .await
4767 .unwrap();
4768 cx.executor().run_until_parked();
4769 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4770 assert_eq!(
4771 mem::take(&mut *events.lock()),
4772 &[language::BufferEvent::FileHandleChanged]
4773 );
4774
4775 // Buffer becomes dirty when edited.
4776 buffer2.update(cx, |buffer, cx| {
4777 buffer.edit([(2..3, "")], None, cx);
4778 assert_eq!(buffer.is_dirty(), true);
4779 });
4780 assert_eq!(
4781 mem::take(&mut *events.lock()),
4782 &[
4783 language::BufferEvent::Edited,
4784 language::BufferEvent::DirtyChanged
4785 ]
4786 );
4787
4788 // Buffer becomes clean again when all of its content is removed, because
4789 // the file was deleted.
4790 buffer2.update(cx, |buffer, cx| {
4791 buffer.edit([(0..2, "")], None, cx);
4792 assert_eq!(buffer.is_empty(), true);
4793 assert_eq!(buffer.is_dirty(), false);
4794 });
4795 assert_eq!(
4796 *events.lock(),
4797 &[
4798 language::BufferEvent::Edited,
4799 language::BufferEvent::DirtyChanged
4800 ]
4801 );
4802
4803 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4804 let events = Arc::new(Mutex::new(Vec::new()));
4805 let buffer3 = project
4806 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4807 .await
4808 .unwrap();
4809 buffer3.update(cx, |_, cx| {
4810 cx.subscribe(&buffer3, {
4811 let events = events.clone();
4812 move |_, _, event, _| match event {
4813 BufferEvent::Operation { .. } => {}
4814 _ => events.lock().push(event.clone()),
4815 }
4816 })
4817 .detach();
4818 });
4819
4820 buffer3.update(cx, |buffer, cx| {
4821 buffer.edit([(0..0, "x")], None, cx);
4822 });
4823 events.lock().clear();
4824 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4825 .await
4826 .unwrap();
4827 cx.executor().run_until_parked();
4828 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4829 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4830}
4831
4832#[gpui::test]
4833async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4834 init_test(cx);
4835
4836 let (initial_contents, initial_offsets) =
4837 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4838 let fs = FakeFs::new(cx.executor());
4839 fs.insert_tree(
4840 path!("/dir"),
4841 json!({
4842 "the-file": initial_contents,
4843 }),
4844 )
4845 .await;
4846 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4847 let buffer = project
4848 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4849 .await
4850 .unwrap();
4851
4852 let anchors = initial_offsets
4853 .iter()
4854 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4855 .collect::<Vec<_>>();
4856
4857 // Change the file on disk, adding two new lines of text, and removing
4858 // one line.
4859 buffer.update(cx, |buffer, _| {
4860 assert!(!buffer.is_dirty());
4861 assert!(!buffer.has_conflict());
4862 });
4863
4864 let (new_contents, new_offsets) =
4865 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4866 fs.save(
4867 path!("/dir/the-file").as_ref(),
4868 &new_contents.as_str().into(),
4869 LineEnding::Unix,
4870 )
4871 .await
4872 .unwrap();
4873
4874 // Because the buffer was not modified, it is reloaded from disk. Its
4875 // contents are edited according to the diff between the old and new
4876 // file contents.
4877 cx.executor().run_until_parked();
4878 buffer.update(cx, |buffer, _| {
4879 assert_eq!(buffer.text(), new_contents);
4880 assert!(!buffer.is_dirty());
4881 assert!(!buffer.has_conflict());
4882
4883 let anchor_offsets = anchors
4884 .iter()
4885 .map(|anchor| anchor.to_offset(&*buffer))
4886 .collect::<Vec<_>>();
4887 assert_eq!(anchor_offsets, new_offsets);
4888 });
4889
4890 // Modify the buffer
4891 buffer.update(cx, |buffer, cx| {
4892 buffer.edit([(0..0, " ")], None, cx);
4893 assert!(buffer.is_dirty());
4894 assert!(!buffer.has_conflict());
4895 });
4896
4897 // Change the file on disk again, adding blank lines to the beginning.
4898 fs.save(
4899 path!("/dir/the-file").as_ref(),
4900 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4901 LineEnding::Unix,
4902 )
4903 .await
4904 .unwrap();
4905
4906 // Because the buffer is modified, it doesn't reload from disk, but is
4907 // marked as having a conflict.
4908 cx.executor().run_until_parked();
4909 buffer.update(cx, |buffer, _| {
4910 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4911 assert!(buffer.has_conflict());
4912 });
4913}
4914
4915#[gpui::test]
4916async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4917 init_test(cx);
4918
4919 let fs = FakeFs::new(cx.executor());
4920 fs.insert_tree(
4921 path!("/dir"),
4922 json!({
4923 "file1": "a\nb\nc\n",
4924 "file2": "one\r\ntwo\r\nthree\r\n",
4925 }),
4926 )
4927 .await;
4928
4929 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4930 let buffer1 = project
4931 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4932 .await
4933 .unwrap();
4934 let buffer2 = project
4935 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4936 .await
4937 .unwrap();
4938
4939 buffer1.update(cx, |buffer, _| {
4940 assert_eq!(buffer.text(), "a\nb\nc\n");
4941 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4942 });
4943 buffer2.update(cx, |buffer, _| {
4944 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4945 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4946 });
4947
4948 // Change a file's line endings on disk from unix to windows. The buffer's
4949 // state updates correctly.
4950 fs.save(
4951 path!("/dir/file1").as_ref(),
4952 &"aaa\nb\nc\n".into(),
4953 LineEnding::Windows,
4954 )
4955 .await
4956 .unwrap();
4957 cx.executor().run_until_parked();
4958 buffer1.update(cx, |buffer, _| {
4959 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4960 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4961 });
4962
4963 // Save a file with windows line endings. The file is written correctly.
4964 buffer2.update(cx, |buffer, cx| {
4965 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4966 });
4967 project
4968 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4969 .await
4970 .unwrap();
4971 assert_eq!(
4972 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4973 "one\r\ntwo\r\nthree\r\nfour\r\n",
4974 );
4975}
4976
4977#[gpui::test]
4978async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4979 init_test(cx);
4980
4981 let fs = FakeFs::new(cx.executor());
4982 fs.insert_tree(
4983 path!("/dir"),
4984 json!({
4985 "a.rs": "
4986 fn foo(mut v: Vec<usize>) {
4987 for x in &v {
4988 v.push(1);
4989 }
4990 }
4991 "
4992 .unindent(),
4993 }),
4994 )
4995 .await;
4996
4997 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4998 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4999 let buffer = project
5000 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
5001 .await
5002 .unwrap();
5003
5004 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
5005 let message = lsp::PublishDiagnosticsParams {
5006 uri: buffer_uri.clone(),
5007 diagnostics: vec![
5008 lsp::Diagnostic {
5009 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5010 severity: Some(DiagnosticSeverity::WARNING),
5011 message: "error 1".to_string(),
5012 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5013 location: lsp::Location {
5014 uri: buffer_uri.clone(),
5015 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5016 },
5017 message: "error 1 hint 1".to_string(),
5018 }]),
5019 ..Default::default()
5020 },
5021 lsp::Diagnostic {
5022 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5023 severity: Some(DiagnosticSeverity::HINT),
5024 message: "error 1 hint 1".to_string(),
5025 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5026 location: lsp::Location {
5027 uri: buffer_uri.clone(),
5028 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
5029 },
5030 message: "original diagnostic".to_string(),
5031 }]),
5032 ..Default::default()
5033 },
5034 lsp::Diagnostic {
5035 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5036 severity: Some(DiagnosticSeverity::ERROR),
5037 message: "error 2".to_string(),
5038 related_information: Some(vec![
5039 lsp::DiagnosticRelatedInformation {
5040 location: lsp::Location {
5041 uri: buffer_uri.clone(),
5042 range: lsp::Range::new(
5043 lsp::Position::new(1, 13),
5044 lsp::Position::new(1, 15),
5045 ),
5046 },
5047 message: "error 2 hint 1".to_string(),
5048 },
5049 lsp::DiagnosticRelatedInformation {
5050 location: lsp::Location {
5051 uri: buffer_uri.clone(),
5052 range: lsp::Range::new(
5053 lsp::Position::new(1, 13),
5054 lsp::Position::new(1, 15),
5055 ),
5056 },
5057 message: "error 2 hint 2".to_string(),
5058 },
5059 ]),
5060 ..Default::default()
5061 },
5062 lsp::Diagnostic {
5063 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5064 severity: Some(DiagnosticSeverity::HINT),
5065 message: "error 2 hint 1".to_string(),
5066 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5067 location: lsp::Location {
5068 uri: buffer_uri.clone(),
5069 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5070 },
5071 message: "original diagnostic".to_string(),
5072 }]),
5073 ..Default::default()
5074 },
5075 lsp::Diagnostic {
5076 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5077 severity: Some(DiagnosticSeverity::HINT),
5078 message: "error 2 hint 2".to_string(),
5079 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5080 location: lsp::Location {
5081 uri: buffer_uri,
5082 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5083 },
5084 message: "original diagnostic".to_string(),
5085 }]),
5086 ..Default::default()
5087 },
5088 ],
5089 version: None,
5090 };
5091
5092 lsp_store
5093 .update(cx, |lsp_store, cx| {
5094 lsp_store.update_diagnostics(
5095 LanguageServerId(0),
5096 message,
5097 None,
5098 DiagnosticSourceKind::Pushed,
5099 &[],
5100 cx,
5101 )
5102 })
5103 .unwrap();
5104 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5105
5106 assert_eq!(
5107 buffer
5108 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5109 .collect::<Vec<_>>(),
5110 &[
5111 DiagnosticEntry {
5112 range: Point::new(1, 8)..Point::new(1, 9),
5113 diagnostic: Diagnostic {
5114 severity: DiagnosticSeverity::WARNING,
5115 message: "error 1".to_string(),
5116 group_id: 1,
5117 is_primary: true,
5118 source_kind: DiagnosticSourceKind::Pushed,
5119 ..Diagnostic::default()
5120 }
5121 },
5122 DiagnosticEntry {
5123 range: Point::new(1, 8)..Point::new(1, 9),
5124 diagnostic: Diagnostic {
5125 severity: DiagnosticSeverity::HINT,
5126 message: "error 1 hint 1".to_string(),
5127 group_id: 1,
5128 is_primary: false,
5129 source_kind: DiagnosticSourceKind::Pushed,
5130 ..Diagnostic::default()
5131 }
5132 },
5133 DiagnosticEntry {
5134 range: Point::new(1, 13)..Point::new(1, 15),
5135 diagnostic: Diagnostic {
5136 severity: DiagnosticSeverity::HINT,
5137 message: "error 2 hint 1".to_string(),
5138 group_id: 0,
5139 is_primary: false,
5140 source_kind: DiagnosticSourceKind::Pushed,
5141 ..Diagnostic::default()
5142 }
5143 },
5144 DiagnosticEntry {
5145 range: Point::new(1, 13)..Point::new(1, 15),
5146 diagnostic: Diagnostic {
5147 severity: DiagnosticSeverity::HINT,
5148 message: "error 2 hint 2".to_string(),
5149 group_id: 0,
5150 is_primary: false,
5151 source_kind: DiagnosticSourceKind::Pushed,
5152 ..Diagnostic::default()
5153 }
5154 },
5155 DiagnosticEntry {
5156 range: Point::new(2, 8)..Point::new(2, 17),
5157 diagnostic: Diagnostic {
5158 severity: DiagnosticSeverity::ERROR,
5159 message: "error 2".to_string(),
5160 group_id: 0,
5161 is_primary: true,
5162 source_kind: DiagnosticSourceKind::Pushed,
5163 ..Diagnostic::default()
5164 }
5165 }
5166 ]
5167 );
5168
5169 assert_eq!(
5170 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5171 &[
5172 DiagnosticEntry {
5173 range: Point::new(1, 13)..Point::new(1, 15),
5174 diagnostic: Diagnostic {
5175 severity: DiagnosticSeverity::HINT,
5176 message: "error 2 hint 1".to_string(),
5177 group_id: 0,
5178 is_primary: false,
5179 source_kind: DiagnosticSourceKind::Pushed,
5180 ..Diagnostic::default()
5181 }
5182 },
5183 DiagnosticEntry {
5184 range: Point::new(1, 13)..Point::new(1, 15),
5185 diagnostic: Diagnostic {
5186 severity: DiagnosticSeverity::HINT,
5187 message: "error 2 hint 2".to_string(),
5188 group_id: 0,
5189 is_primary: false,
5190 source_kind: DiagnosticSourceKind::Pushed,
5191 ..Diagnostic::default()
5192 }
5193 },
5194 DiagnosticEntry {
5195 range: Point::new(2, 8)..Point::new(2, 17),
5196 diagnostic: Diagnostic {
5197 severity: DiagnosticSeverity::ERROR,
5198 message: "error 2".to_string(),
5199 group_id: 0,
5200 is_primary: true,
5201 source_kind: DiagnosticSourceKind::Pushed,
5202 ..Diagnostic::default()
5203 }
5204 }
5205 ]
5206 );
5207
5208 assert_eq!(
5209 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5210 &[
5211 DiagnosticEntry {
5212 range: Point::new(1, 8)..Point::new(1, 9),
5213 diagnostic: Diagnostic {
5214 severity: DiagnosticSeverity::WARNING,
5215 message: "error 1".to_string(),
5216 group_id: 1,
5217 is_primary: true,
5218 source_kind: DiagnosticSourceKind::Pushed,
5219 ..Diagnostic::default()
5220 }
5221 },
5222 DiagnosticEntry {
5223 range: Point::new(1, 8)..Point::new(1, 9),
5224 diagnostic: Diagnostic {
5225 severity: DiagnosticSeverity::HINT,
5226 message: "error 1 hint 1".to_string(),
5227 group_id: 1,
5228 is_primary: false,
5229 source_kind: DiagnosticSourceKind::Pushed,
5230 ..Diagnostic::default()
5231 }
5232 },
5233 ]
5234 );
5235}
5236
5237#[gpui::test]
5238async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5239 init_test(cx);
5240
5241 let fs = FakeFs::new(cx.executor());
5242 fs.insert_tree(
5243 path!("/dir"),
5244 json!({
5245 "one.rs": "const ONE: usize = 1;",
5246 "two": {
5247 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5248 }
5249
5250 }),
5251 )
5252 .await;
5253 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5254
5255 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5256 language_registry.add(rust_lang());
5257 let watched_paths = lsp::FileOperationRegistrationOptions {
5258 filters: vec![
5259 FileOperationFilter {
5260 scheme: Some("file".to_owned()),
5261 pattern: lsp::FileOperationPattern {
5262 glob: "**/*.rs".to_owned(),
5263 matches: Some(lsp::FileOperationPatternKind::File),
5264 options: None,
5265 },
5266 },
5267 FileOperationFilter {
5268 scheme: Some("file".to_owned()),
5269 pattern: lsp::FileOperationPattern {
5270 glob: "**/**".to_owned(),
5271 matches: Some(lsp::FileOperationPatternKind::Folder),
5272 options: None,
5273 },
5274 },
5275 ],
5276 };
5277 let mut fake_servers = language_registry.register_fake_lsp(
5278 "Rust",
5279 FakeLspAdapter {
5280 capabilities: lsp::ServerCapabilities {
5281 workspace: Some(lsp::WorkspaceServerCapabilities {
5282 workspace_folders: None,
5283 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5284 did_rename: Some(watched_paths.clone()),
5285 will_rename: Some(watched_paths),
5286 ..Default::default()
5287 }),
5288 }),
5289 ..Default::default()
5290 },
5291 ..Default::default()
5292 },
5293 );
5294
5295 let _ = project
5296 .update(cx, |project, cx| {
5297 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5298 })
5299 .await
5300 .unwrap();
5301
5302 let fake_server = fake_servers.next().await.unwrap();
5303 let response = project.update(cx, |project, cx| {
5304 let worktree = project.worktrees(cx).next().unwrap();
5305 let entry = worktree
5306 .read(cx)
5307 .entry_for_path(rel_path("one.rs"))
5308 .unwrap();
5309 project.rename_entry(
5310 entry.id,
5311 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5312 cx,
5313 )
5314 });
5315 let expected_edit = lsp::WorkspaceEdit {
5316 changes: None,
5317 document_changes: Some(DocumentChanges::Edits({
5318 vec![TextDocumentEdit {
5319 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5320 range: lsp::Range {
5321 start: lsp::Position {
5322 line: 0,
5323 character: 1,
5324 },
5325 end: lsp::Position {
5326 line: 0,
5327 character: 3,
5328 },
5329 },
5330 new_text: "This is not a drill".to_owned(),
5331 })],
5332 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5333 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5334 version: Some(1337),
5335 },
5336 }]
5337 })),
5338 change_annotations: None,
5339 };
5340 let resolved_workspace_edit = Arc::new(OnceLock::new());
5341 fake_server
5342 .set_request_handler::<WillRenameFiles, _, _>({
5343 let resolved_workspace_edit = resolved_workspace_edit.clone();
5344 let expected_edit = expected_edit.clone();
5345 move |params, _| {
5346 let resolved_workspace_edit = resolved_workspace_edit.clone();
5347 let expected_edit = expected_edit.clone();
5348 async move {
5349 assert_eq!(params.files.len(), 1);
5350 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5351 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5352 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5353 Ok(Some(expected_edit))
5354 }
5355 }
5356 })
5357 .next()
5358 .await
5359 .unwrap();
5360 let _ = response.await.unwrap();
5361 fake_server
5362 .handle_notification::<DidRenameFiles, _>(|params, _| {
5363 assert_eq!(params.files.len(), 1);
5364 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5365 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5366 })
5367 .next()
5368 .await
5369 .unwrap();
5370 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5371}
5372
5373#[gpui::test]
5374async fn test_rename(cx: &mut gpui::TestAppContext) {
5375 // hi
5376 init_test(cx);
5377
5378 let fs = FakeFs::new(cx.executor());
5379 fs.insert_tree(
5380 path!("/dir"),
5381 json!({
5382 "one.rs": "const ONE: usize = 1;",
5383 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5384 }),
5385 )
5386 .await;
5387
5388 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5389
5390 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5391 language_registry.add(rust_lang());
5392 let mut fake_servers = language_registry.register_fake_lsp(
5393 "Rust",
5394 FakeLspAdapter {
5395 capabilities: lsp::ServerCapabilities {
5396 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5397 prepare_provider: Some(true),
5398 work_done_progress_options: Default::default(),
5399 })),
5400 ..Default::default()
5401 },
5402 ..Default::default()
5403 },
5404 );
5405
5406 let (buffer, _handle) = project
5407 .update(cx, |project, cx| {
5408 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5409 })
5410 .await
5411 .unwrap();
5412
5413 let fake_server = fake_servers.next().await.unwrap();
5414
5415 let response = project.update(cx, |project, cx| {
5416 project.prepare_rename(buffer.clone(), 7, cx)
5417 });
5418 fake_server
5419 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5420 assert_eq!(
5421 params.text_document.uri.as_str(),
5422 uri!("file:///dir/one.rs")
5423 );
5424 assert_eq!(params.position, lsp::Position::new(0, 7));
5425 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5426 lsp::Position::new(0, 6),
5427 lsp::Position::new(0, 9),
5428 ))))
5429 })
5430 .next()
5431 .await
5432 .unwrap();
5433 let response = response.await.unwrap();
5434 let PrepareRenameResponse::Success(range) = response else {
5435 panic!("{:?}", response);
5436 };
5437 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5438 assert_eq!(range, 6..9);
5439
5440 let response = project.update(cx, |project, cx| {
5441 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5442 });
5443 fake_server
5444 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5445 assert_eq!(
5446 params.text_document_position.text_document.uri.as_str(),
5447 uri!("file:///dir/one.rs")
5448 );
5449 assert_eq!(
5450 params.text_document_position.position,
5451 lsp::Position::new(0, 7)
5452 );
5453 assert_eq!(params.new_name, "THREE");
5454 Ok(Some(lsp::WorkspaceEdit {
5455 changes: Some(
5456 [
5457 (
5458 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5459 vec![lsp::TextEdit::new(
5460 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5461 "THREE".to_string(),
5462 )],
5463 ),
5464 (
5465 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5466 vec![
5467 lsp::TextEdit::new(
5468 lsp::Range::new(
5469 lsp::Position::new(0, 24),
5470 lsp::Position::new(0, 27),
5471 ),
5472 "THREE".to_string(),
5473 ),
5474 lsp::TextEdit::new(
5475 lsp::Range::new(
5476 lsp::Position::new(0, 35),
5477 lsp::Position::new(0, 38),
5478 ),
5479 "THREE".to_string(),
5480 ),
5481 ],
5482 ),
5483 ]
5484 .into_iter()
5485 .collect(),
5486 ),
5487 ..Default::default()
5488 }))
5489 })
5490 .next()
5491 .await
5492 .unwrap();
5493 let mut transaction = response.await.unwrap().0;
5494 assert_eq!(transaction.len(), 2);
5495 assert_eq!(
5496 transaction
5497 .remove_entry(&buffer)
5498 .unwrap()
5499 .0
5500 .update(cx, |buffer, _| buffer.text()),
5501 "const THREE: usize = 1;"
5502 );
5503 assert_eq!(
5504 transaction
5505 .into_keys()
5506 .next()
5507 .unwrap()
5508 .update(cx, |buffer, _| buffer.text()),
5509 "const TWO: usize = one::THREE + one::THREE;"
5510 );
5511}
5512
5513#[gpui::test]
5514async fn test_search(cx: &mut gpui::TestAppContext) {
5515 init_test(cx);
5516
5517 let fs = FakeFs::new(cx.executor());
5518 fs.insert_tree(
5519 path!("/dir"),
5520 json!({
5521 "one.rs": "const ONE: usize = 1;",
5522 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5523 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5524 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5525 }),
5526 )
5527 .await;
5528 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5529 assert_eq!(
5530 search(
5531 &project,
5532 SearchQuery::text(
5533 "TWO",
5534 false,
5535 true,
5536 false,
5537 Default::default(),
5538 Default::default(),
5539 false,
5540 None
5541 )
5542 .unwrap(),
5543 cx
5544 )
5545 .await
5546 .unwrap(),
5547 HashMap::from_iter([
5548 (path!("dir/two.rs").to_string(), vec![6..9]),
5549 (path!("dir/three.rs").to_string(), vec![37..40])
5550 ])
5551 );
5552
5553 let buffer_4 = project
5554 .update(cx, |project, cx| {
5555 project.open_local_buffer(path!("/dir/four.rs"), cx)
5556 })
5557 .await
5558 .unwrap();
5559 buffer_4.update(cx, |buffer, cx| {
5560 let text = "two::TWO";
5561 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5562 });
5563
5564 assert_eq!(
5565 search(
5566 &project,
5567 SearchQuery::text(
5568 "TWO",
5569 false,
5570 true,
5571 false,
5572 Default::default(),
5573 Default::default(),
5574 false,
5575 None,
5576 )
5577 .unwrap(),
5578 cx
5579 )
5580 .await
5581 .unwrap(),
5582 HashMap::from_iter([
5583 (path!("dir/two.rs").to_string(), vec![6..9]),
5584 (path!("dir/three.rs").to_string(), vec![37..40]),
5585 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5586 ])
5587 );
5588}
5589
5590#[gpui::test]
5591async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5592 init_test(cx);
5593
5594 let search_query = "file";
5595
5596 let fs = FakeFs::new(cx.executor());
5597 fs.insert_tree(
5598 path!("/dir"),
5599 json!({
5600 "one.rs": r#"// Rust file one"#,
5601 "one.ts": r#"// TypeScript file one"#,
5602 "two.rs": r#"// Rust file two"#,
5603 "two.ts": r#"// TypeScript file two"#,
5604 }),
5605 )
5606 .await;
5607 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5608
5609 assert!(
5610 search(
5611 &project,
5612 SearchQuery::text(
5613 search_query,
5614 false,
5615 true,
5616 false,
5617 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5618 Default::default(),
5619 false,
5620 None
5621 )
5622 .unwrap(),
5623 cx
5624 )
5625 .await
5626 .unwrap()
5627 .is_empty(),
5628 "If no inclusions match, no files should be returned"
5629 );
5630
5631 assert_eq!(
5632 search(
5633 &project,
5634 SearchQuery::text(
5635 search_query,
5636 false,
5637 true,
5638 false,
5639 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5640 Default::default(),
5641 false,
5642 None
5643 )
5644 .unwrap(),
5645 cx
5646 )
5647 .await
5648 .unwrap(),
5649 HashMap::from_iter([
5650 (path!("dir/one.rs").to_string(), vec![8..12]),
5651 (path!("dir/two.rs").to_string(), vec![8..12]),
5652 ]),
5653 "Rust only search should give only Rust files"
5654 );
5655
5656 assert_eq!(
5657 search(
5658 &project,
5659 SearchQuery::text(
5660 search_query,
5661 false,
5662 true,
5663 false,
5664 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5665 .unwrap(),
5666 Default::default(),
5667 false,
5668 None,
5669 )
5670 .unwrap(),
5671 cx
5672 )
5673 .await
5674 .unwrap(),
5675 HashMap::from_iter([
5676 (path!("dir/one.ts").to_string(), vec![14..18]),
5677 (path!("dir/two.ts").to_string(), vec![14..18]),
5678 ]),
5679 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5680 );
5681
5682 assert_eq!(
5683 search(
5684 &project,
5685 SearchQuery::text(
5686 search_query,
5687 false,
5688 true,
5689 false,
5690 PathMatcher::new(
5691 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5692 PathStyle::local()
5693 )
5694 .unwrap(),
5695 Default::default(),
5696 false,
5697 None,
5698 )
5699 .unwrap(),
5700 cx
5701 )
5702 .await
5703 .unwrap(),
5704 HashMap::from_iter([
5705 (path!("dir/two.ts").to_string(), vec![14..18]),
5706 (path!("dir/one.rs").to_string(), vec![8..12]),
5707 (path!("dir/one.ts").to_string(), vec![14..18]),
5708 (path!("dir/two.rs").to_string(), vec![8..12]),
5709 ]),
5710 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5711 );
5712}
5713
5714#[gpui::test]
5715async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5716 init_test(cx);
5717
5718 let search_query = "file";
5719
5720 let fs = FakeFs::new(cx.executor());
5721 fs.insert_tree(
5722 path!("/dir"),
5723 json!({
5724 "one.rs": r#"// Rust file one"#,
5725 "one.ts": r#"// TypeScript file one"#,
5726 "two.rs": r#"// Rust file two"#,
5727 "two.ts": r#"// TypeScript file two"#,
5728 }),
5729 )
5730 .await;
5731 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5732
5733 assert_eq!(
5734 search(
5735 &project,
5736 SearchQuery::text(
5737 search_query,
5738 false,
5739 true,
5740 false,
5741 Default::default(),
5742 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5743 false,
5744 None,
5745 )
5746 .unwrap(),
5747 cx
5748 )
5749 .await
5750 .unwrap(),
5751 HashMap::from_iter([
5752 (path!("dir/one.rs").to_string(), vec![8..12]),
5753 (path!("dir/one.ts").to_string(), vec![14..18]),
5754 (path!("dir/two.rs").to_string(), vec![8..12]),
5755 (path!("dir/two.ts").to_string(), vec![14..18]),
5756 ]),
5757 "If no exclusions match, all files should be returned"
5758 );
5759
5760 assert_eq!(
5761 search(
5762 &project,
5763 SearchQuery::text(
5764 search_query,
5765 false,
5766 true,
5767 false,
5768 Default::default(),
5769 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5770 false,
5771 None,
5772 )
5773 .unwrap(),
5774 cx
5775 )
5776 .await
5777 .unwrap(),
5778 HashMap::from_iter([
5779 (path!("dir/one.ts").to_string(), vec![14..18]),
5780 (path!("dir/two.ts").to_string(), vec![14..18]),
5781 ]),
5782 "Rust exclusion search should give only TypeScript files"
5783 );
5784
5785 assert_eq!(
5786 search(
5787 &project,
5788 SearchQuery::text(
5789 search_query,
5790 false,
5791 true,
5792 false,
5793 Default::default(),
5794 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5795 .unwrap(),
5796 false,
5797 None,
5798 )
5799 .unwrap(),
5800 cx
5801 )
5802 .await
5803 .unwrap(),
5804 HashMap::from_iter([
5805 (path!("dir/one.rs").to_string(), vec![8..12]),
5806 (path!("dir/two.rs").to_string(), vec![8..12]),
5807 ]),
5808 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5809 );
5810
5811 assert!(
5812 search(
5813 &project,
5814 SearchQuery::text(
5815 search_query,
5816 false,
5817 true,
5818 false,
5819 Default::default(),
5820 PathMatcher::new(
5821 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5822 PathStyle::local(),
5823 )
5824 .unwrap(),
5825 false,
5826 None,
5827 )
5828 .unwrap(),
5829 cx
5830 )
5831 .await
5832 .unwrap()
5833 .is_empty(),
5834 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5835 );
5836}
5837
5838#[gpui::test]
5839async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5840 init_test(cx);
5841
5842 let search_query = "file";
5843
5844 let fs = FakeFs::new(cx.executor());
5845 fs.insert_tree(
5846 path!("/dir"),
5847 json!({
5848 "one.rs": r#"// Rust file one"#,
5849 "one.ts": r#"// TypeScript file one"#,
5850 "two.rs": r#"// Rust file two"#,
5851 "two.ts": r#"// TypeScript file two"#,
5852 }),
5853 )
5854 .await;
5855
5856 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5857 let path_style = PathStyle::local();
5858 let _buffer = project.update(cx, |project, cx| {
5859 project.create_local_buffer("file", None, false, cx)
5860 });
5861
5862 assert_eq!(
5863 search(
5864 &project,
5865 SearchQuery::text(
5866 search_query,
5867 false,
5868 true,
5869 false,
5870 Default::default(),
5871 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5872 false,
5873 None,
5874 )
5875 .unwrap(),
5876 cx
5877 )
5878 .await
5879 .unwrap(),
5880 HashMap::from_iter([
5881 (path!("dir/one.rs").to_string(), vec![8..12]),
5882 (path!("dir/one.ts").to_string(), vec![14..18]),
5883 (path!("dir/two.rs").to_string(), vec![8..12]),
5884 (path!("dir/two.ts").to_string(), vec![14..18]),
5885 ]),
5886 "If no exclusions match, all files should be returned"
5887 );
5888
5889 assert_eq!(
5890 search(
5891 &project,
5892 SearchQuery::text(
5893 search_query,
5894 false,
5895 true,
5896 false,
5897 Default::default(),
5898 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5899 false,
5900 None,
5901 )
5902 .unwrap(),
5903 cx
5904 )
5905 .await
5906 .unwrap(),
5907 HashMap::from_iter([
5908 (path!("dir/one.ts").to_string(), vec![14..18]),
5909 (path!("dir/two.ts").to_string(), vec![14..18]),
5910 ]),
5911 "Rust exclusion search should give only TypeScript files"
5912 );
5913
5914 assert_eq!(
5915 search(
5916 &project,
5917 SearchQuery::text(
5918 search_query,
5919 false,
5920 true,
5921 false,
5922 Default::default(),
5923 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5924 false,
5925 None,
5926 )
5927 .unwrap(),
5928 cx
5929 )
5930 .await
5931 .unwrap(),
5932 HashMap::from_iter([
5933 (path!("dir/one.rs").to_string(), vec![8..12]),
5934 (path!("dir/two.rs").to_string(), vec![8..12]),
5935 ]),
5936 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5937 );
5938
5939 assert!(
5940 search(
5941 &project,
5942 SearchQuery::text(
5943 search_query,
5944 false,
5945 true,
5946 false,
5947 Default::default(),
5948 PathMatcher::new(
5949 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5950 PathStyle::local(),
5951 )
5952 .unwrap(),
5953 false,
5954 None,
5955 )
5956 .unwrap(),
5957 cx
5958 )
5959 .await
5960 .unwrap()
5961 .is_empty(),
5962 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5963 );
5964}
5965
5966#[gpui::test]
5967async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5968 init_test(cx);
5969
5970 let search_query = "file";
5971
5972 let fs = FakeFs::new(cx.executor());
5973 fs.insert_tree(
5974 path!("/dir"),
5975 json!({
5976 "one.rs": r#"// Rust file one"#,
5977 "one.ts": r#"// TypeScript file one"#,
5978 "two.rs": r#"// Rust file two"#,
5979 "two.ts": r#"// TypeScript file two"#,
5980 }),
5981 )
5982 .await;
5983 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5984 assert!(
5985 search(
5986 &project,
5987 SearchQuery::text(
5988 search_query,
5989 false,
5990 true,
5991 false,
5992 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5993 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5994 false,
5995 None,
5996 )
5997 .unwrap(),
5998 cx
5999 )
6000 .await
6001 .unwrap()
6002 .is_empty(),
6003 "If both no exclusions and inclusions match, exclusions should win and return nothing"
6004 );
6005
6006 assert!(
6007 search(
6008 &project,
6009 SearchQuery::text(
6010 search_query,
6011 false,
6012 true,
6013 false,
6014 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6015 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
6016 false,
6017 None,
6018 )
6019 .unwrap(),
6020 cx
6021 )
6022 .await
6023 .unwrap()
6024 .is_empty(),
6025 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
6026 );
6027
6028 assert!(
6029 search(
6030 &project,
6031 SearchQuery::text(
6032 search_query,
6033 false,
6034 true,
6035 false,
6036 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6037 .unwrap(),
6038 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6039 .unwrap(),
6040 false,
6041 None,
6042 )
6043 .unwrap(),
6044 cx
6045 )
6046 .await
6047 .unwrap()
6048 .is_empty(),
6049 "Non-matching inclusions and exclusions should not change that."
6050 );
6051
6052 assert_eq!(
6053 search(
6054 &project,
6055 SearchQuery::text(
6056 search_query,
6057 false,
6058 true,
6059 false,
6060 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6061 .unwrap(),
6062 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6063 .unwrap(),
6064 false,
6065 None,
6066 )
6067 .unwrap(),
6068 cx
6069 )
6070 .await
6071 .unwrap(),
6072 HashMap::from_iter([
6073 (path!("dir/one.ts").to_string(), vec![14..18]),
6074 (path!("dir/two.ts").to_string(), vec![14..18]),
6075 ]),
6076 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6077 );
6078}
6079
6080#[gpui::test]
6081async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6082 init_test(cx);
6083
6084 let fs = FakeFs::new(cx.executor());
6085 fs.insert_tree(
6086 path!("/worktree-a"),
6087 json!({
6088 "haystack.rs": r#"// NEEDLE"#,
6089 "haystack.ts": r#"// NEEDLE"#,
6090 }),
6091 )
6092 .await;
6093 fs.insert_tree(
6094 path!("/worktree-b"),
6095 json!({
6096 "haystack.rs": r#"// NEEDLE"#,
6097 "haystack.ts": r#"// NEEDLE"#,
6098 }),
6099 )
6100 .await;
6101
6102 let path_style = PathStyle::local();
6103 let project = Project::test(
6104 fs.clone(),
6105 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6106 cx,
6107 )
6108 .await;
6109
6110 assert_eq!(
6111 search(
6112 &project,
6113 SearchQuery::text(
6114 "NEEDLE",
6115 false,
6116 true,
6117 false,
6118 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6119 Default::default(),
6120 true,
6121 None,
6122 )
6123 .unwrap(),
6124 cx
6125 )
6126 .await
6127 .unwrap(),
6128 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6129 "should only return results from included worktree"
6130 );
6131 assert_eq!(
6132 search(
6133 &project,
6134 SearchQuery::text(
6135 "NEEDLE",
6136 false,
6137 true,
6138 false,
6139 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6140 Default::default(),
6141 true,
6142 None,
6143 )
6144 .unwrap(),
6145 cx
6146 )
6147 .await
6148 .unwrap(),
6149 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6150 "should only return results from included worktree"
6151 );
6152
6153 assert_eq!(
6154 search(
6155 &project,
6156 SearchQuery::text(
6157 "NEEDLE",
6158 false,
6159 true,
6160 false,
6161 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6162 Default::default(),
6163 false,
6164 None,
6165 )
6166 .unwrap(),
6167 cx
6168 )
6169 .await
6170 .unwrap(),
6171 HashMap::from_iter([
6172 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6173 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6174 ]),
6175 "should return results from both worktrees"
6176 );
6177}
6178
6179#[gpui::test]
6180async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6181 init_test(cx);
6182
6183 let fs = FakeFs::new(cx.background_executor.clone());
6184 fs.insert_tree(
6185 path!("/dir"),
6186 json!({
6187 ".git": {},
6188 ".gitignore": "**/target\n/node_modules\n",
6189 "target": {
6190 "index.txt": "index_key:index_value"
6191 },
6192 "node_modules": {
6193 "eslint": {
6194 "index.ts": "const eslint_key = 'eslint value'",
6195 "package.json": r#"{ "some_key": "some value" }"#,
6196 },
6197 "prettier": {
6198 "index.ts": "const prettier_key = 'prettier value'",
6199 "package.json": r#"{ "other_key": "other value" }"#,
6200 },
6201 },
6202 "package.json": r#"{ "main_key": "main value" }"#,
6203 }),
6204 )
6205 .await;
6206 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6207
6208 let query = "key";
6209 assert_eq!(
6210 search(
6211 &project,
6212 SearchQuery::text(
6213 query,
6214 false,
6215 false,
6216 false,
6217 Default::default(),
6218 Default::default(),
6219 false,
6220 None,
6221 )
6222 .unwrap(),
6223 cx
6224 )
6225 .await
6226 .unwrap(),
6227 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6228 "Only one non-ignored file should have the query"
6229 );
6230
6231 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6232 let path_style = PathStyle::local();
6233 assert_eq!(
6234 search(
6235 &project,
6236 SearchQuery::text(
6237 query,
6238 false,
6239 false,
6240 true,
6241 Default::default(),
6242 Default::default(),
6243 false,
6244 None,
6245 )
6246 .unwrap(),
6247 cx
6248 )
6249 .await
6250 .unwrap(),
6251 HashMap::from_iter([
6252 (path!("dir/package.json").to_string(), vec![8..11]),
6253 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6254 (
6255 path!("dir/node_modules/prettier/package.json").to_string(),
6256 vec![9..12]
6257 ),
6258 (
6259 path!("dir/node_modules/prettier/index.ts").to_string(),
6260 vec![15..18]
6261 ),
6262 (
6263 path!("dir/node_modules/eslint/index.ts").to_string(),
6264 vec![13..16]
6265 ),
6266 (
6267 path!("dir/node_modules/eslint/package.json").to_string(),
6268 vec![8..11]
6269 ),
6270 ]),
6271 "Unrestricted search with ignored directories should find every file with the query"
6272 );
6273
6274 let files_to_include =
6275 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6276 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6278 assert_eq!(
6279 search(
6280 &project,
6281 SearchQuery::text(
6282 query,
6283 false,
6284 false,
6285 true,
6286 files_to_include,
6287 files_to_exclude,
6288 false,
6289 None,
6290 )
6291 .unwrap(),
6292 cx
6293 )
6294 .await
6295 .unwrap(),
6296 HashMap::from_iter([(
6297 path!("dir/node_modules/prettier/package.json").to_string(),
6298 vec![9..12]
6299 )]),
6300 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6301 );
6302}
6303
6304#[gpui::test]
6305async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6306 init_test(cx);
6307
6308 let fs = FakeFs::new(cx.executor());
6309 fs.insert_tree(
6310 path!("/dir"),
6311 json!({
6312 "one.rs": "// ПРИВЕТ? привет!",
6313 "two.rs": "// ПРИВЕТ.",
6314 "three.rs": "// привет",
6315 }),
6316 )
6317 .await;
6318 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6319 let unicode_case_sensitive_query = SearchQuery::text(
6320 "привет",
6321 false,
6322 true,
6323 false,
6324 Default::default(),
6325 Default::default(),
6326 false,
6327 None,
6328 );
6329 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6330 assert_eq!(
6331 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6332 .await
6333 .unwrap(),
6334 HashMap::from_iter([
6335 (path!("dir/one.rs").to_string(), vec![17..29]),
6336 (path!("dir/three.rs").to_string(), vec![3..15]),
6337 ])
6338 );
6339
6340 let unicode_case_insensitive_query = SearchQuery::text(
6341 "привет",
6342 false,
6343 false,
6344 false,
6345 Default::default(),
6346 Default::default(),
6347 false,
6348 None,
6349 );
6350 assert_matches!(
6351 unicode_case_insensitive_query,
6352 Ok(SearchQuery::Regex { .. })
6353 );
6354 assert_eq!(
6355 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6356 .await
6357 .unwrap(),
6358 HashMap::from_iter([
6359 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6360 (path!("dir/two.rs").to_string(), vec![3..15]),
6361 (path!("dir/three.rs").to_string(), vec![3..15]),
6362 ])
6363 );
6364
6365 assert_eq!(
6366 search(
6367 &project,
6368 SearchQuery::text(
6369 "привет.",
6370 false,
6371 false,
6372 false,
6373 Default::default(),
6374 Default::default(),
6375 false,
6376 None,
6377 )
6378 .unwrap(),
6379 cx
6380 )
6381 .await
6382 .unwrap(),
6383 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6384 );
6385}
6386
6387#[gpui::test]
6388async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6389 init_test(cx);
6390
6391 let fs = FakeFs::new(cx.executor());
6392 fs.insert_tree(
6393 "/one/two",
6394 json!({
6395 "three": {
6396 "a.txt": "",
6397 "four": {}
6398 },
6399 "c.rs": ""
6400 }),
6401 )
6402 .await;
6403
6404 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6405 project
6406 .update(cx, |project, cx| {
6407 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6408 project.create_entry((id, rel_path("b..")), true, cx)
6409 })
6410 .await
6411 .unwrap()
6412 .into_included()
6413 .unwrap();
6414
6415 assert_eq!(
6416 fs.paths(true),
6417 vec![
6418 PathBuf::from(path!("/")),
6419 PathBuf::from(path!("/one")),
6420 PathBuf::from(path!("/one/two")),
6421 PathBuf::from(path!("/one/two/c.rs")),
6422 PathBuf::from(path!("/one/two/three")),
6423 PathBuf::from(path!("/one/two/three/a.txt")),
6424 PathBuf::from(path!("/one/two/three/b..")),
6425 PathBuf::from(path!("/one/two/three/four")),
6426 ]
6427 );
6428}
6429
6430#[gpui::test]
6431async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6432 init_test(cx);
6433
6434 let fs = FakeFs::new(cx.executor());
6435 fs.insert_tree(
6436 path!("/dir"),
6437 json!({
6438 "a.tsx": "a",
6439 }),
6440 )
6441 .await;
6442
6443 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6444
6445 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6446 language_registry.add(tsx_lang());
6447 let language_server_names = [
6448 "TypeScriptServer",
6449 "TailwindServer",
6450 "ESLintServer",
6451 "NoHoverCapabilitiesServer",
6452 ];
6453 let mut language_servers = [
6454 language_registry.register_fake_lsp(
6455 "tsx",
6456 FakeLspAdapter {
6457 name: language_server_names[0],
6458 capabilities: lsp::ServerCapabilities {
6459 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6460 ..lsp::ServerCapabilities::default()
6461 },
6462 ..FakeLspAdapter::default()
6463 },
6464 ),
6465 language_registry.register_fake_lsp(
6466 "tsx",
6467 FakeLspAdapter {
6468 name: language_server_names[1],
6469 capabilities: lsp::ServerCapabilities {
6470 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6471 ..lsp::ServerCapabilities::default()
6472 },
6473 ..FakeLspAdapter::default()
6474 },
6475 ),
6476 language_registry.register_fake_lsp(
6477 "tsx",
6478 FakeLspAdapter {
6479 name: language_server_names[2],
6480 capabilities: lsp::ServerCapabilities {
6481 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6482 ..lsp::ServerCapabilities::default()
6483 },
6484 ..FakeLspAdapter::default()
6485 },
6486 ),
6487 language_registry.register_fake_lsp(
6488 "tsx",
6489 FakeLspAdapter {
6490 name: language_server_names[3],
6491 capabilities: lsp::ServerCapabilities {
6492 hover_provider: None,
6493 ..lsp::ServerCapabilities::default()
6494 },
6495 ..FakeLspAdapter::default()
6496 },
6497 ),
6498 ];
6499
6500 let (buffer, _handle) = project
6501 .update(cx, |p, cx| {
6502 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6503 })
6504 .await
6505 .unwrap();
6506 cx.executor().run_until_parked();
6507
6508 let mut servers_with_hover_requests = HashMap::default();
6509 for i in 0..language_server_names.len() {
6510 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6511 panic!(
6512 "Failed to get language server #{i} with name {}",
6513 &language_server_names[i]
6514 )
6515 });
6516 let new_server_name = new_server.server.name();
6517 assert!(
6518 !servers_with_hover_requests.contains_key(&new_server_name),
6519 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6520 );
6521 match new_server_name.as_ref() {
6522 "TailwindServer" | "TypeScriptServer" => {
6523 servers_with_hover_requests.insert(
6524 new_server_name.clone(),
6525 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6526 move |_, _| {
6527 let name = new_server_name.clone();
6528 async move {
6529 Ok(Some(lsp::Hover {
6530 contents: lsp::HoverContents::Scalar(
6531 lsp::MarkedString::String(format!("{name} hover")),
6532 ),
6533 range: None,
6534 }))
6535 }
6536 },
6537 ),
6538 );
6539 }
6540 "ESLintServer" => {
6541 servers_with_hover_requests.insert(
6542 new_server_name,
6543 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6544 |_, _| async move { Ok(None) },
6545 ),
6546 );
6547 }
6548 "NoHoverCapabilitiesServer" => {
6549 let _never_handled = new_server
6550 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6551 panic!(
6552 "Should not call for hovers server with no corresponding capabilities"
6553 )
6554 });
6555 }
6556 unexpected => panic!("Unexpected server name: {unexpected}"),
6557 }
6558 }
6559
6560 let hover_task = project.update(cx, |project, cx| {
6561 project.hover(&buffer, Point::new(0, 0), cx)
6562 });
6563 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6564 |mut hover_request| async move {
6565 hover_request
6566 .next()
6567 .await
6568 .expect("All hover requests should have been triggered")
6569 },
6570 ))
6571 .await;
6572 assert_eq!(
6573 vec!["TailwindServer hover", "TypeScriptServer hover"],
6574 hover_task
6575 .await
6576 .into_iter()
6577 .flatten()
6578 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6579 .sorted()
6580 .collect::<Vec<_>>(),
6581 "Should receive hover responses from all related servers with hover capabilities"
6582 );
6583}
6584
6585#[gpui::test]
6586async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6587 init_test(cx);
6588
6589 let fs = FakeFs::new(cx.executor());
6590 fs.insert_tree(
6591 path!("/dir"),
6592 json!({
6593 "a.ts": "a",
6594 }),
6595 )
6596 .await;
6597
6598 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6599
6600 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6601 language_registry.add(typescript_lang());
6602 let mut fake_language_servers = language_registry.register_fake_lsp(
6603 "TypeScript",
6604 FakeLspAdapter {
6605 capabilities: lsp::ServerCapabilities {
6606 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6607 ..lsp::ServerCapabilities::default()
6608 },
6609 ..FakeLspAdapter::default()
6610 },
6611 );
6612
6613 let (buffer, _handle) = project
6614 .update(cx, |p, cx| {
6615 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6616 })
6617 .await
6618 .unwrap();
6619 cx.executor().run_until_parked();
6620
6621 let fake_server = fake_language_servers
6622 .next()
6623 .await
6624 .expect("failed to get the language server");
6625
6626 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6627 move |_, _| async move {
6628 Ok(Some(lsp::Hover {
6629 contents: lsp::HoverContents::Array(vec![
6630 lsp::MarkedString::String("".to_string()),
6631 lsp::MarkedString::String(" ".to_string()),
6632 lsp::MarkedString::String("\n\n\n".to_string()),
6633 ]),
6634 range: None,
6635 }))
6636 },
6637 );
6638
6639 let hover_task = project.update(cx, |project, cx| {
6640 project.hover(&buffer, Point::new(0, 0), cx)
6641 });
6642 let () = request_handled
6643 .next()
6644 .await
6645 .expect("All hover requests should have been triggered");
6646 assert_eq!(
6647 Vec::<String>::new(),
6648 hover_task
6649 .await
6650 .into_iter()
6651 .flatten()
6652 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6653 .sorted()
6654 .collect::<Vec<_>>(),
6655 "Empty hover parts should be ignored"
6656 );
6657}
6658
6659#[gpui::test]
6660async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6661 init_test(cx);
6662
6663 let fs = FakeFs::new(cx.executor());
6664 fs.insert_tree(
6665 path!("/dir"),
6666 json!({
6667 "a.ts": "a",
6668 }),
6669 )
6670 .await;
6671
6672 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6673
6674 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6675 language_registry.add(typescript_lang());
6676 let mut fake_language_servers = language_registry.register_fake_lsp(
6677 "TypeScript",
6678 FakeLspAdapter {
6679 capabilities: lsp::ServerCapabilities {
6680 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6681 ..lsp::ServerCapabilities::default()
6682 },
6683 ..FakeLspAdapter::default()
6684 },
6685 );
6686
6687 let (buffer, _handle) = project
6688 .update(cx, |p, cx| {
6689 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6690 })
6691 .await
6692 .unwrap();
6693 cx.executor().run_until_parked();
6694
6695 let fake_server = fake_language_servers
6696 .next()
6697 .await
6698 .expect("failed to get the language server");
6699
6700 let mut request_handled = fake_server
6701 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6702 Ok(Some(vec![
6703 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6704 title: "organize imports".to_string(),
6705 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6706 ..lsp::CodeAction::default()
6707 }),
6708 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6709 title: "fix code".to_string(),
6710 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6711 ..lsp::CodeAction::default()
6712 }),
6713 ]))
6714 });
6715
6716 let code_actions_task = project.update(cx, |project, cx| {
6717 project.code_actions(
6718 &buffer,
6719 0..buffer.read(cx).len(),
6720 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6721 cx,
6722 )
6723 });
6724
6725 let () = request_handled
6726 .next()
6727 .await
6728 .expect("The code action request should have been triggered");
6729
6730 let code_actions = code_actions_task.await.unwrap().unwrap();
6731 assert_eq!(code_actions.len(), 1);
6732 assert_eq!(
6733 code_actions[0].lsp_action.action_kind(),
6734 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6735 );
6736}
6737
6738#[gpui::test]
6739async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6740 init_test(cx);
6741
6742 let fs = FakeFs::new(cx.executor());
6743 fs.insert_tree(
6744 path!("/dir"),
6745 json!({
6746 "a.tsx": "a",
6747 }),
6748 )
6749 .await;
6750
6751 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6752
6753 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6754 language_registry.add(tsx_lang());
6755 let language_server_names = [
6756 "TypeScriptServer",
6757 "TailwindServer",
6758 "ESLintServer",
6759 "NoActionsCapabilitiesServer",
6760 ];
6761
6762 let mut language_server_rxs = [
6763 language_registry.register_fake_lsp(
6764 "tsx",
6765 FakeLspAdapter {
6766 name: language_server_names[0],
6767 capabilities: lsp::ServerCapabilities {
6768 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6769 ..lsp::ServerCapabilities::default()
6770 },
6771 ..FakeLspAdapter::default()
6772 },
6773 ),
6774 language_registry.register_fake_lsp(
6775 "tsx",
6776 FakeLspAdapter {
6777 name: language_server_names[1],
6778 capabilities: lsp::ServerCapabilities {
6779 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6780 ..lsp::ServerCapabilities::default()
6781 },
6782 ..FakeLspAdapter::default()
6783 },
6784 ),
6785 language_registry.register_fake_lsp(
6786 "tsx",
6787 FakeLspAdapter {
6788 name: language_server_names[2],
6789 capabilities: lsp::ServerCapabilities {
6790 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6791 ..lsp::ServerCapabilities::default()
6792 },
6793 ..FakeLspAdapter::default()
6794 },
6795 ),
6796 language_registry.register_fake_lsp(
6797 "tsx",
6798 FakeLspAdapter {
6799 name: language_server_names[3],
6800 capabilities: lsp::ServerCapabilities {
6801 code_action_provider: None,
6802 ..lsp::ServerCapabilities::default()
6803 },
6804 ..FakeLspAdapter::default()
6805 },
6806 ),
6807 ];
6808
6809 let (buffer, _handle) = project
6810 .update(cx, |p, cx| {
6811 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6812 })
6813 .await
6814 .unwrap();
6815 cx.executor().run_until_parked();
6816
6817 let mut servers_with_actions_requests = HashMap::default();
6818 for i in 0..language_server_names.len() {
6819 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6820 panic!(
6821 "Failed to get language server #{i} with name {}",
6822 &language_server_names[i]
6823 )
6824 });
6825 let new_server_name = new_server.server.name();
6826
6827 assert!(
6828 !servers_with_actions_requests.contains_key(&new_server_name),
6829 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6830 );
6831 match new_server_name.0.as_ref() {
6832 "TailwindServer" | "TypeScriptServer" => {
6833 servers_with_actions_requests.insert(
6834 new_server_name.clone(),
6835 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6836 move |_, _| {
6837 let name = new_server_name.clone();
6838 async move {
6839 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6840 lsp::CodeAction {
6841 title: format!("{name} code action"),
6842 ..lsp::CodeAction::default()
6843 },
6844 )]))
6845 }
6846 },
6847 ),
6848 );
6849 }
6850 "ESLintServer" => {
6851 servers_with_actions_requests.insert(
6852 new_server_name,
6853 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6854 |_, _| async move { Ok(None) },
6855 ),
6856 );
6857 }
6858 "NoActionsCapabilitiesServer" => {
6859 let _never_handled = new_server
6860 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6861 panic!(
6862 "Should not call for code actions server with no corresponding capabilities"
6863 )
6864 });
6865 }
6866 unexpected => panic!("Unexpected server name: {unexpected}"),
6867 }
6868 }
6869
6870 let code_actions_task = project.update(cx, |project, cx| {
6871 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6872 });
6873
6874 // cx.run_until_parked();
6875 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6876 |mut code_actions_request| async move {
6877 code_actions_request
6878 .next()
6879 .await
6880 .expect("All code actions requests should have been triggered")
6881 },
6882 ))
6883 .await;
6884 assert_eq!(
6885 vec!["TailwindServer code action", "TypeScriptServer code action"],
6886 code_actions_task
6887 .await
6888 .unwrap()
6889 .unwrap()
6890 .into_iter()
6891 .map(|code_action| code_action.lsp_action.title().to_owned())
6892 .sorted()
6893 .collect::<Vec<_>>(),
6894 "Should receive code actions responses from all related servers with hover capabilities"
6895 );
6896}
6897
6898#[gpui::test]
6899async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6900 init_test(cx);
6901
6902 let fs = FakeFs::new(cx.executor());
6903 fs.insert_tree(
6904 "/dir",
6905 json!({
6906 "a.rs": "let a = 1;",
6907 "b.rs": "let b = 2;",
6908 "c.rs": "let c = 2;",
6909 }),
6910 )
6911 .await;
6912
6913 let project = Project::test(
6914 fs,
6915 [
6916 "/dir/a.rs".as_ref(),
6917 "/dir/b.rs".as_ref(),
6918 "/dir/c.rs".as_ref(),
6919 ],
6920 cx,
6921 )
6922 .await;
6923
6924 // check the initial state and get the worktrees
6925 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6926 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6927 assert_eq!(worktrees.len(), 3);
6928
6929 let worktree_a = worktrees[0].read(cx);
6930 let worktree_b = worktrees[1].read(cx);
6931 let worktree_c = worktrees[2].read(cx);
6932
6933 // check they start in the right order
6934 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6935 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6936 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6937
6938 (
6939 worktrees[0].clone(),
6940 worktrees[1].clone(),
6941 worktrees[2].clone(),
6942 )
6943 });
6944
6945 // move first worktree to after the second
6946 // [a, b, c] -> [b, a, c]
6947 project
6948 .update(cx, |project, cx| {
6949 let first = worktree_a.read(cx);
6950 let second = worktree_b.read(cx);
6951 project.move_worktree(first.id(), second.id(), cx)
6952 })
6953 .expect("moving first after second");
6954
6955 // check the state after moving
6956 project.update(cx, |project, cx| {
6957 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6958 assert_eq!(worktrees.len(), 3);
6959
6960 let first = worktrees[0].read(cx);
6961 let second = worktrees[1].read(cx);
6962 let third = worktrees[2].read(cx);
6963
6964 // check they are now in the right order
6965 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6966 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6967 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6968 });
6969
6970 // move the second worktree to before the first
6971 // [b, a, c] -> [a, b, c]
6972 project
6973 .update(cx, |project, cx| {
6974 let second = worktree_a.read(cx);
6975 let first = worktree_b.read(cx);
6976 project.move_worktree(first.id(), second.id(), cx)
6977 })
6978 .expect("moving second before first");
6979
6980 // check the state after moving
6981 project.update(cx, |project, cx| {
6982 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6983 assert_eq!(worktrees.len(), 3);
6984
6985 let first = worktrees[0].read(cx);
6986 let second = worktrees[1].read(cx);
6987 let third = worktrees[2].read(cx);
6988
6989 // check they are now in the right order
6990 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6991 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6992 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6993 });
6994
6995 // move the second worktree to after the third
6996 // [a, b, c] -> [a, c, b]
6997 project
6998 .update(cx, |project, cx| {
6999 let second = worktree_b.read(cx);
7000 let third = worktree_c.read(cx);
7001 project.move_worktree(second.id(), third.id(), cx)
7002 })
7003 .expect("moving second after third");
7004
7005 // check the state after moving
7006 project.update(cx, |project, cx| {
7007 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7008 assert_eq!(worktrees.len(), 3);
7009
7010 let first = worktrees[0].read(cx);
7011 let second = worktrees[1].read(cx);
7012 let third = worktrees[2].read(cx);
7013
7014 // check they are now in the right order
7015 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7016 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7017 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
7018 });
7019
7020 // move the third worktree to before the second
7021 // [a, c, b] -> [a, b, c]
7022 project
7023 .update(cx, |project, cx| {
7024 let third = worktree_c.read(cx);
7025 let second = worktree_b.read(cx);
7026 project.move_worktree(third.id(), second.id(), cx)
7027 })
7028 .expect("moving third before second");
7029
7030 // check the state after moving
7031 project.update(cx, |project, cx| {
7032 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7033 assert_eq!(worktrees.len(), 3);
7034
7035 let first = worktrees[0].read(cx);
7036 let second = worktrees[1].read(cx);
7037 let third = worktrees[2].read(cx);
7038
7039 // check they are now in the right order
7040 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7041 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7042 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7043 });
7044
7045 // move the first worktree to after the third
7046 // [a, b, c] -> [b, c, a]
7047 project
7048 .update(cx, |project, cx| {
7049 let first = worktree_a.read(cx);
7050 let third = worktree_c.read(cx);
7051 project.move_worktree(first.id(), third.id(), cx)
7052 })
7053 .expect("moving first after third");
7054
7055 // check the state after moving
7056 project.update(cx, |project, cx| {
7057 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7058 assert_eq!(worktrees.len(), 3);
7059
7060 let first = worktrees[0].read(cx);
7061 let second = worktrees[1].read(cx);
7062 let third = worktrees[2].read(cx);
7063
7064 // check they are now in the right order
7065 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7066 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7067 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7068 });
7069
7070 // move the third worktree to before the first
7071 // [b, c, a] -> [a, b, c]
7072 project
7073 .update(cx, |project, cx| {
7074 let third = worktree_a.read(cx);
7075 let first = worktree_b.read(cx);
7076 project.move_worktree(third.id(), first.id(), cx)
7077 })
7078 .expect("moving third before first");
7079
7080 // check the state after moving
7081 project.update(cx, |project, cx| {
7082 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7083 assert_eq!(worktrees.len(), 3);
7084
7085 let first = worktrees[0].read(cx);
7086 let second = worktrees[1].read(cx);
7087 let third = worktrees[2].read(cx);
7088
7089 // check they are now in the right order
7090 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7091 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7092 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7093 });
7094}
7095
7096#[gpui::test]
7097async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7098 init_test(cx);
7099
7100 let staged_contents = r#"
7101 fn main() {
7102 println!("hello world");
7103 }
7104 "#
7105 .unindent();
7106 let file_contents = r#"
7107 // print goodbye
7108 fn main() {
7109 println!("goodbye world");
7110 }
7111 "#
7112 .unindent();
7113
7114 let fs = FakeFs::new(cx.background_executor.clone());
7115 fs.insert_tree(
7116 "/dir",
7117 json!({
7118 ".git": {},
7119 "src": {
7120 "main.rs": file_contents,
7121 }
7122 }),
7123 )
7124 .await;
7125
7126 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7127
7128 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7129
7130 let buffer = project
7131 .update(cx, |project, cx| {
7132 project.open_local_buffer("/dir/src/main.rs", cx)
7133 })
7134 .await
7135 .unwrap();
7136 let unstaged_diff = project
7137 .update(cx, |project, cx| {
7138 project.open_unstaged_diff(buffer.clone(), cx)
7139 })
7140 .await
7141 .unwrap();
7142
7143 cx.run_until_parked();
7144 unstaged_diff.update(cx, |unstaged_diff, cx| {
7145 let snapshot = buffer.read(cx).snapshot();
7146 assert_hunks(
7147 unstaged_diff.hunks(&snapshot, cx),
7148 &snapshot,
7149 &unstaged_diff.base_text_string().unwrap(),
7150 &[
7151 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7152 (
7153 2..3,
7154 " println!(\"hello world\");\n",
7155 " println!(\"goodbye world\");\n",
7156 DiffHunkStatus::modified_none(),
7157 ),
7158 ],
7159 );
7160 });
7161
7162 let staged_contents = r#"
7163 // print goodbye
7164 fn main() {
7165 }
7166 "#
7167 .unindent();
7168
7169 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7170
7171 cx.run_until_parked();
7172 unstaged_diff.update(cx, |unstaged_diff, cx| {
7173 let snapshot = buffer.read(cx).snapshot();
7174 assert_hunks(
7175 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7176 &snapshot,
7177 &unstaged_diff.base_text().text(),
7178 &[(
7179 2..3,
7180 "",
7181 " println!(\"goodbye world\");\n",
7182 DiffHunkStatus::added_none(),
7183 )],
7184 );
7185 });
7186}
7187
7188#[gpui::test]
7189async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7190 init_test(cx);
7191
7192 let committed_contents = r#"
7193 fn main() {
7194 println!("hello world");
7195 }
7196 "#
7197 .unindent();
7198 let staged_contents = r#"
7199 fn main() {
7200 println!("goodbye world");
7201 }
7202 "#
7203 .unindent();
7204 let file_contents = r#"
7205 // print goodbye
7206 fn main() {
7207 println!("goodbye world");
7208 }
7209 "#
7210 .unindent();
7211
7212 let fs = FakeFs::new(cx.background_executor.clone());
7213 fs.insert_tree(
7214 "/dir",
7215 json!({
7216 ".git": {},
7217 "src": {
7218 "modification.rs": file_contents,
7219 }
7220 }),
7221 )
7222 .await;
7223
7224 fs.set_head_for_repo(
7225 Path::new("/dir/.git"),
7226 &[
7227 ("src/modification.rs", committed_contents),
7228 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7229 ],
7230 "deadbeef",
7231 );
7232 fs.set_index_for_repo(
7233 Path::new("/dir/.git"),
7234 &[
7235 ("src/modification.rs", staged_contents),
7236 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7237 ],
7238 );
7239
7240 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7241 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7242 let language = rust_lang();
7243 language_registry.add(language.clone());
7244
7245 let buffer_1 = project
7246 .update(cx, |project, cx| {
7247 project.open_local_buffer("/dir/src/modification.rs", cx)
7248 })
7249 .await
7250 .unwrap();
7251 let diff_1 = project
7252 .update(cx, |project, cx| {
7253 project.open_uncommitted_diff(buffer_1.clone(), cx)
7254 })
7255 .await
7256 .unwrap();
7257 diff_1.read_with(cx, |diff, _| {
7258 assert_eq!(diff.base_text().language().cloned(), Some(language))
7259 });
7260 cx.run_until_parked();
7261 diff_1.update(cx, |diff, cx| {
7262 let snapshot = buffer_1.read(cx).snapshot();
7263 assert_hunks(
7264 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7265 &snapshot,
7266 &diff.base_text_string().unwrap(),
7267 &[
7268 (
7269 0..1,
7270 "",
7271 "// print goodbye\n",
7272 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7273 ),
7274 (
7275 2..3,
7276 " println!(\"hello world\");\n",
7277 " println!(\"goodbye world\");\n",
7278 DiffHunkStatus::modified_none(),
7279 ),
7280 ],
7281 );
7282 });
7283
7284 // Reset HEAD to a version that differs from both the buffer and the index.
7285 let committed_contents = r#"
7286 // print goodbye
7287 fn main() {
7288 }
7289 "#
7290 .unindent();
7291 fs.set_head_for_repo(
7292 Path::new("/dir/.git"),
7293 &[
7294 ("src/modification.rs", committed_contents.clone()),
7295 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7296 ],
7297 "deadbeef",
7298 );
7299
7300 // Buffer now has an unstaged hunk.
7301 cx.run_until_parked();
7302 diff_1.update(cx, |diff, cx| {
7303 let snapshot = buffer_1.read(cx).snapshot();
7304 assert_hunks(
7305 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7306 &snapshot,
7307 &diff.base_text().text(),
7308 &[(
7309 2..3,
7310 "",
7311 " println!(\"goodbye world\");\n",
7312 DiffHunkStatus::added_none(),
7313 )],
7314 );
7315 });
7316
7317 // Open a buffer for a file that's been deleted.
7318 let buffer_2 = project
7319 .update(cx, |project, cx| {
7320 project.open_local_buffer("/dir/src/deletion.rs", cx)
7321 })
7322 .await
7323 .unwrap();
7324 let diff_2 = project
7325 .update(cx, |project, cx| {
7326 project.open_uncommitted_diff(buffer_2.clone(), cx)
7327 })
7328 .await
7329 .unwrap();
7330 cx.run_until_parked();
7331 diff_2.update(cx, |diff, cx| {
7332 let snapshot = buffer_2.read(cx).snapshot();
7333 assert_hunks(
7334 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7335 &snapshot,
7336 &diff.base_text_string().unwrap(),
7337 &[(
7338 0..0,
7339 "// the-deleted-contents\n",
7340 "",
7341 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7342 )],
7343 );
7344 });
7345
7346 // Stage the deletion of this file
7347 fs.set_index_for_repo(
7348 Path::new("/dir/.git"),
7349 &[("src/modification.rs", committed_contents.clone())],
7350 );
7351 cx.run_until_parked();
7352 diff_2.update(cx, |diff, cx| {
7353 let snapshot = buffer_2.read(cx).snapshot();
7354 assert_hunks(
7355 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7356 &snapshot,
7357 &diff.base_text_string().unwrap(),
7358 &[(
7359 0..0,
7360 "// the-deleted-contents\n",
7361 "",
7362 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7363 )],
7364 );
7365 });
7366}
7367
7368#[gpui::test]
7369async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7370 use DiffHunkSecondaryStatus::*;
7371 init_test(cx);
7372
7373 let committed_contents = r#"
7374 zero
7375 one
7376 two
7377 three
7378 four
7379 five
7380 "#
7381 .unindent();
7382 let file_contents = r#"
7383 one
7384 TWO
7385 three
7386 FOUR
7387 five
7388 "#
7389 .unindent();
7390
7391 let fs = FakeFs::new(cx.background_executor.clone());
7392 fs.insert_tree(
7393 "/dir",
7394 json!({
7395 ".git": {},
7396 "file.txt": file_contents.clone()
7397 }),
7398 )
7399 .await;
7400
7401 fs.set_head_and_index_for_repo(
7402 path!("/dir/.git").as_ref(),
7403 &[("file.txt", committed_contents.clone())],
7404 );
7405
7406 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7407
7408 let buffer = project
7409 .update(cx, |project, cx| {
7410 project.open_local_buffer("/dir/file.txt", cx)
7411 })
7412 .await
7413 .unwrap();
7414 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7415 let uncommitted_diff = project
7416 .update(cx, |project, cx| {
7417 project.open_uncommitted_diff(buffer.clone(), cx)
7418 })
7419 .await
7420 .unwrap();
7421 let mut diff_events = cx.events(&uncommitted_diff);
7422
7423 // The hunks are initially unstaged.
7424 uncommitted_diff.read_with(cx, |diff, cx| {
7425 assert_hunks(
7426 diff.hunks(&snapshot, cx),
7427 &snapshot,
7428 &diff.base_text_string().unwrap(),
7429 &[
7430 (
7431 0..0,
7432 "zero\n",
7433 "",
7434 DiffHunkStatus::deleted(HasSecondaryHunk),
7435 ),
7436 (
7437 1..2,
7438 "two\n",
7439 "TWO\n",
7440 DiffHunkStatus::modified(HasSecondaryHunk),
7441 ),
7442 (
7443 3..4,
7444 "four\n",
7445 "FOUR\n",
7446 DiffHunkStatus::modified(HasSecondaryHunk),
7447 ),
7448 ],
7449 );
7450 });
7451
7452 // Stage a hunk. It appears as optimistically staged.
7453 uncommitted_diff.update(cx, |diff, cx| {
7454 let range =
7455 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7456 let hunks = diff
7457 .hunks_intersecting_range(range, &snapshot, cx)
7458 .collect::<Vec<_>>();
7459 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7460
7461 assert_hunks(
7462 diff.hunks(&snapshot, cx),
7463 &snapshot,
7464 &diff.base_text_string().unwrap(),
7465 &[
7466 (
7467 0..0,
7468 "zero\n",
7469 "",
7470 DiffHunkStatus::deleted(HasSecondaryHunk),
7471 ),
7472 (
7473 1..2,
7474 "two\n",
7475 "TWO\n",
7476 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7477 ),
7478 (
7479 3..4,
7480 "four\n",
7481 "FOUR\n",
7482 DiffHunkStatus::modified(HasSecondaryHunk),
7483 ),
7484 ],
7485 );
7486 });
7487
7488 // The diff emits a change event for the range of the staged hunk.
7489 assert!(matches!(
7490 diff_events.next().await.unwrap(),
7491 BufferDiffEvent::HunksStagedOrUnstaged(_)
7492 ));
7493 let event = diff_events.next().await.unwrap();
7494 if let BufferDiffEvent::DiffChanged {
7495 changed_range: Some(changed_range),
7496 } = event
7497 {
7498 let changed_range = changed_range.to_point(&snapshot);
7499 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7500 } else {
7501 panic!("Unexpected event {event:?}");
7502 }
7503
7504 // When the write to the index completes, it appears as staged.
7505 cx.run_until_parked();
7506 uncommitted_diff.update(cx, |diff, cx| {
7507 assert_hunks(
7508 diff.hunks(&snapshot, cx),
7509 &snapshot,
7510 &diff.base_text_string().unwrap(),
7511 &[
7512 (
7513 0..0,
7514 "zero\n",
7515 "",
7516 DiffHunkStatus::deleted(HasSecondaryHunk),
7517 ),
7518 (
7519 1..2,
7520 "two\n",
7521 "TWO\n",
7522 DiffHunkStatus::modified(NoSecondaryHunk),
7523 ),
7524 (
7525 3..4,
7526 "four\n",
7527 "FOUR\n",
7528 DiffHunkStatus::modified(HasSecondaryHunk),
7529 ),
7530 ],
7531 );
7532 });
7533
7534 // The diff emits a change event for the changed index text.
7535 let event = diff_events.next().await.unwrap();
7536 if let BufferDiffEvent::DiffChanged {
7537 changed_range: Some(changed_range),
7538 } = event
7539 {
7540 let changed_range = changed_range.to_point(&snapshot);
7541 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7542 } else {
7543 panic!("Unexpected event {event:?}");
7544 }
7545
7546 // Simulate a problem writing to the git index.
7547 fs.set_error_message_for_index_write(
7548 "/dir/.git".as_ref(),
7549 Some("failed to write git index".into()),
7550 );
7551
7552 // Stage another hunk.
7553 uncommitted_diff.update(cx, |diff, cx| {
7554 let range =
7555 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7556 let hunks = diff
7557 .hunks_intersecting_range(range, &snapshot, cx)
7558 .collect::<Vec<_>>();
7559 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7560
7561 assert_hunks(
7562 diff.hunks(&snapshot, cx),
7563 &snapshot,
7564 &diff.base_text_string().unwrap(),
7565 &[
7566 (
7567 0..0,
7568 "zero\n",
7569 "",
7570 DiffHunkStatus::deleted(HasSecondaryHunk),
7571 ),
7572 (
7573 1..2,
7574 "two\n",
7575 "TWO\n",
7576 DiffHunkStatus::modified(NoSecondaryHunk),
7577 ),
7578 (
7579 3..4,
7580 "four\n",
7581 "FOUR\n",
7582 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7583 ),
7584 ],
7585 );
7586 });
7587 assert!(matches!(
7588 diff_events.next().await.unwrap(),
7589 BufferDiffEvent::HunksStagedOrUnstaged(_)
7590 ));
7591 let event = diff_events.next().await.unwrap();
7592 if let BufferDiffEvent::DiffChanged {
7593 changed_range: Some(changed_range),
7594 } = event
7595 {
7596 let changed_range = changed_range.to_point(&snapshot);
7597 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7598 } else {
7599 panic!("Unexpected event {event:?}");
7600 }
7601
7602 // When the write fails, the hunk returns to being unstaged.
7603 cx.run_until_parked();
7604 uncommitted_diff.update(cx, |diff, cx| {
7605 assert_hunks(
7606 diff.hunks(&snapshot, cx),
7607 &snapshot,
7608 &diff.base_text_string().unwrap(),
7609 &[
7610 (
7611 0..0,
7612 "zero\n",
7613 "",
7614 DiffHunkStatus::deleted(HasSecondaryHunk),
7615 ),
7616 (
7617 1..2,
7618 "two\n",
7619 "TWO\n",
7620 DiffHunkStatus::modified(NoSecondaryHunk),
7621 ),
7622 (
7623 3..4,
7624 "four\n",
7625 "FOUR\n",
7626 DiffHunkStatus::modified(HasSecondaryHunk),
7627 ),
7628 ],
7629 );
7630 });
7631
7632 let event = diff_events.next().await.unwrap();
7633 if let BufferDiffEvent::DiffChanged {
7634 changed_range: Some(changed_range),
7635 } = event
7636 {
7637 let changed_range = changed_range.to_point(&snapshot);
7638 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7639 } else {
7640 panic!("Unexpected event {event:?}");
7641 }
7642
7643 // Allow writing to the git index to succeed again.
7644 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7645
7646 // Stage two hunks with separate operations.
7647 uncommitted_diff.update(cx, |diff, cx| {
7648 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7649 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7650 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7651 });
7652
7653 // Both staged hunks appear as pending.
7654 uncommitted_diff.update(cx, |diff, cx| {
7655 assert_hunks(
7656 diff.hunks(&snapshot, cx),
7657 &snapshot,
7658 &diff.base_text_string().unwrap(),
7659 &[
7660 (
7661 0..0,
7662 "zero\n",
7663 "",
7664 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7665 ),
7666 (
7667 1..2,
7668 "two\n",
7669 "TWO\n",
7670 DiffHunkStatus::modified(NoSecondaryHunk),
7671 ),
7672 (
7673 3..4,
7674 "four\n",
7675 "FOUR\n",
7676 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7677 ),
7678 ],
7679 );
7680 });
7681
7682 // Both staging operations take effect.
7683 cx.run_until_parked();
7684 uncommitted_diff.update(cx, |diff, cx| {
7685 assert_hunks(
7686 diff.hunks(&snapshot, cx),
7687 &snapshot,
7688 &diff.base_text_string().unwrap(),
7689 &[
7690 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7691 (
7692 1..2,
7693 "two\n",
7694 "TWO\n",
7695 DiffHunkStatus::modified(NoSecondaryHunk),
7696 ),
7697 (
7698 3..4,
7699 "four\n",
7700 "FOUR\n",
7701 DiffHunkStatus::modified(NoSecondaryHunk),
7702 ),
7703 ],
7704 );
7705 });
7706}
7707
7708#[gpui::test(seeds(340, 472))]
7709async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7710 use DiffHunkSecondaryStatus::*;
7711 init_test(cx);
7712
7713 let committed_contents = r#"
7714 zero
7715 one
7716 two
7717 three
7718 four
7719 five
7720 "#
7721 .unindent();
7722 let file_contents = r#"
7723 one
7724 TWO
7725 three
7726 FOUR
7727 five
7728 "#
7729 .unindent();
7730
7731 let fs = FakeFs::new(cx.background_executor.clone());
7732 fs.insert_tree(
7733 "/dir",
7734 json!({
7735 ".git": {},
7736 "file.txt": file_contents.clone()
7737 }),
7738 )
7739 .await;
7740
7741 fs.set_head_for_repo(
7742 "/dir/.git".as_ref(),
7743 &[("file.txt", committed_contents.clone())],
7744 "deadbeef",
7745 );
7746 fs.set_index_for_repo(
7747 "/dir/.git".as_ref(),
7748 &[("file.txt", committed_contents.clone())],
7749 );
7750
7751 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7752
7753 let buffer = project
7754 .update(cx, |project, cx| {
7755 project.open_local_buffer("/dir/file.txt", cx)
7756 })
7757 .await
7758 .unwrap();
7759 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7760 let uncommitted_diff = project
7761 .update(cx, |project, cx| {
7762 project.open_uncommitted_diff(buffer.clone(), cx)
7763 })
7764 .await
7765 .unwrap();
7766
7767 // The hunks are initially unstaged.
7768 uncommitted_diff.read_with(cx, |diff, cx| {
7769 assert_hunks(
7770 diff.hunks(&snapshot, cx),
7771 &snapshot,
7772 &diff.base_text_string().unwrap(),
7773 &[
7774 (
7775 0..0,
7776 "zero\n",
7777 "",
7778 DiffHunkStatus::deleted(HasSecondaryHunk),
7779 ),
7780 (
7781 1..2,
7782 "two\n",
7783 "TWO\n",
7784 DiffHunkStatus::modified(HasSecondaryHunk),
7785 ),
7786 (
7787 3..4,
7788 "four\n",
7789 "FOUR\n",
7790 DiffHunkStatus::modified(HasSecondaryHunk),
7791 ),
7792 ],
7793 );
7794 });
7795
7796 // Pause IO events
7797 fs.pause_events();
7798
7799 // Stage the first hunk.
7800 uncommitted_diff.update(cx, |diff, cx| {
7801 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7802 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7803 assert_hunks(
7804 diff.hunks(&snapshot, cx),
7805 &snapshot,
7806 &diff.base_text_string().unwrap(),
7807 &[
7808 (
7809 0..0,
7810 "zero\n",
7811 "",
7812 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7813 ),
7814 (
7815 1..2,
7816 "two\n",
7817 "TWO\n",
7818 DiffHunkStatus::modified(HasSecondaryHunk),
7819 ),
7820 (
7821 3..4,
7822 "four\n",
7823 "FOUR\n",
7824 DiffHunkStatus::modified(HasSecondaryHunk),
7825 ),
7826 ],
7827 );
7828 });
7829
7830 // Stage the second hunk *before* receiving the FS event for the first hunk.
7831 cx.run_until_parked();
7832 uncommitted_diff.update(cx, |diff, cx| {
7833 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7834 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7835 assert_hunks(
7836 diff.hunks(&snapshot, cx),
7837 &snapshot,
7838 &diff.base_text_string().unwrap(),
7839 &[
7840 (
7841 0..0,
7842 "zero\n",
7843 "",
7844 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7845 ),
7846 (
7847 1..2,
7848 "two\n",
7849 "TWO\n",
7850 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7851 ),
7852 (
7853 3..4,
7854 "four\n",
7855 "FOUR\n",
7856 DiffHunkStatus::modified(HasSecondaryHunk),
7857 ),
7858 ],
7859 );
7860 });
7861
7862 // Process the FS event for staging the first hunk (second event is still pending).
7863 fs.flush_events(1);
7864 cx.run_until_parked();
7865
7866 // Stage the third hunk before receiving the second FS event.
7867 uncommitted_diff.update(cx, |diff, cx| {
7868 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7869 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7870 });
7871
7872 // Wait for all remaining IO.
7873 cx.run_until_parked();
7874 fs.flush_events(fs.buffered_event_count());
7875
7876 // Now all hunks are staged.
7877 cx.run_until_parked();
7878 uncommitted_diff.update(cx, |diff, cx| {
7879 assert_hunks(
7880 diff.hunks(&snapshot, cx),
7881 &snapshot,
7882 &diff.base_text_string().unwrap(),
7883 &[
7884 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7885 (
7886 1..2,
7887 "two\n",
7888 "TWO\n",
7889 DiffHunkStatus::modified(NoSecondaryHunk),
7890 ),
7891 (
7892 3..4,
7893 "four\n",
7894 "FOUR\n",
7895 DiffHunkStatus::modified(NoSecondaryHunk),
7896 ),
7897 ],
7898 );
7899 });
7900}
7901
7902#[gpui::test(iterations = 25)]
7903async fn test_staging_random_hunks(
7904 mut rng: StdRng,
7905 executor: BackgroundExecutor,
7906 cx: &mut gpui::TestAppContext,
7907) {
7908 let operations = env::var("OPERATIONS")
7909 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7910 .unwrap_or(20);
7911
7912 // Try to induce races between diff recalculation and index writes.
7913 if rng.random_bool(0.5) {
7914 executor.deprioritize(*CALCULATE_DIFF_TASK);
7915 }
7916
7917 use DiffHunkSecondaryStatus::*;
7918 init_test(cx);
7919
7920 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7921 let index_text = committed_text.clone();
7922 let buffer_text = (0..30)
7923 .map(|i| match i % 5 {
7924 0 => format!("line {i} (modified)\n"),
7925 _ => format!("line {i}\n"),
7926 })
7927 .collect::<String>();
7928
7929 let fs = FakeFs::new(cx.background_executor.clone());
7930 fs.insert_tree(
7931 path!("/dir"),
7932 json!({
7933 ".git": {},
7934 "file.txt": buffer_text.clone()
7935 }),
7936 )
7937 .await;
7938 fs.set_head_for_repo(
7939 path!("/dir/.git").as_ref(),
7940 &[("file.txt", committed_text.clone())],
7941 "deadbeef",
7942 );
7943 fs.set_index_for_repo(
7944 path!("/dir/.git").as_ref(),
7945 &[("file.txt", index_text.clone())],
7946 );
7947 let repo = fs
7948 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7949 .unwrap();
7950
7951 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7952 let buffer = project
7953 .update(cx, |project, cx| {
7954 project.open_local_buffer(path!("/dir/file.txt"), cx)
7955 })
7956 .await
7957 .unwrap();
7958 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7959 let uncommitted_diff = project
7960 .update(cx, |project, cx| {
7961 project.open_uncommitted_diff(buffer.clone(), cx)
7962 })
7963 .await
7964 .unwrap();
7965
7966 let mut hunks =
7967 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7968 assert_eq!(hunks.len(), 6);
7969
7970 for _i in 0..operations {
7971 let hunk_ix = rng.random_range(0..hunks.len());
7972 let hunk = &mut hunks[hunk_ix];
7973 let row = hunk.range.start.row;
7974
7975 if hunk.status().has_secondary_hunk() {
7976 log::info!("staging hunk at {row}");
7977 uncommitted_diff.update(cx, |diff, cx| {
7978 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7979 });
7980 hunk.secondary_status = SecondaryHunkRemovalPending;
7981 } else {
7982 log::info!("unstaging hunk at {row}");
7983 uncommitted_diff.update(cx, |diff, cx| {
7984 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7985 });
7986 hunk.secondary_status = SecondaryHunkAdditionPending;
7987 }
7988
7989 for _ in 0..rng.random_range(0..10) {
7990 log::info!("yielding");
7991 cx.executor().simulate_random_delay().await;
7992 }
7993 }
7994
7995 cx.executor().run_until_parked();
7996
7997 for hunk in &mut hunks {
7998 if hunk.secondary_status == SecondaryHunkRemovalPending {
7999 hunk.secondary_status = NoSecondaryHunk;
8000 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
8001 hunk.secondary_status = HasSecondaryHunk;
8002 }
8003 }
8004
8005 log::info!(
8006 "index text:\n{}",
8007 repo.load_index_text(RepoPath::from_rel_path(rel_path("file.txt")))
8008 .await
8009 .unwrap()
8010 );
8011
8012 uncommitted_diff.update(cx, |diff, cx| {
8013 let expected_hunks = hunks
8014 .iter()
8015 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8016 .collect::<Vec<_>>();
8017 let actual_hunks = diff
8018 .hunks(&snapshot, cx)
8019 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
8020 .collect::<Vec<_>>();
8021 assert_eq!(actual_hunks, expected_hunks);
8022 });
8023}
8024
8025#[gpui::test]
8026async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
8027 init_test(cx);
8028
8029 let committed_contents = r#"
8030 fn main() {
8031 println!("hello from HEAD");
8032 }
8033 "#
8034 .unindent();
8035 let file_contents = r#"
8036 fn main() {
8037 println!("hello from the working copy");
8038 }
8039 "#
8040 .unindent();
8041
8042 let fs = FakeFs::new(cx.background_executor.clone());
8043 fs.insert_tree(
8044 "/dir",
8045 json!({
8046 ".git": {},
8047 "src": {
8048 "main.rs": file_contents,
8049 }
8050 }),
8051 )
8052 .await;
8053
8054 fs.set_head_for_repo(
8055 Path::new("/dir/.git"),
8056 &[("src/main.rs", committed_contents.clone())],
8057 "deadbeef",
8058 );
8059 fs.set_index_for_repo(
8060 Path::new("/dir/.git"),
8061 &[("src/main.rs", committed_contents.clone())],
8062 );
8063
8064 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8065
8066 let buffer = project
8067 .update(cx, |project, cx| {
8068 project.open_local_buffer("/dir/src/main.rs", cx)
8069 })
8070 .await
8071 .unwrap();
8072 let uncommitted_diff = project
8073 .update(cx, |project, cx| {
8074 project.open_uncommitted_diff(buffer.clone(), cx)
8075 })
8076 .await
8077 .unwrap();
8078
8079 cx.run_until_parked();
8080 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8081 let snapshot = buffer.read(cx).snapshot();
8082 assert_hunks(
8083 uncommitted_diff.hunks(&snapshot, cx),
8084 &snapshot,
8085 &uncommitted_diff.base_text_string().unwrap(),
8086 &[(
8087 1..2,
8088 " println!(\"hello from HEAD\");\n",
8089 " println!(\"hello from the working copy\");\n",
8090 DiffHunkStatus {
8091 kind: DiffHunkStatusKind::Modified,
8092 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8093 },
8094 )],
8095 );
8096 });
8097}
8098
8099#[gpui::test]
8100async fn test_repository_and_path_for_project_path(
8101 background_executor: BackgroundExecutor,
8102 cx: &mut gpui::TestAppContext,
8103) {
8104 init_test(cx);
8105 let fs = FakeFs::new(background_executor);
8106 fs.insert_tree(
8107 path!("/root"),
8108 json!({
8109 "c.txt": "",
8110 "dir1": {
8111 ".git": {},
8112 "deps": {
8113 "dep1": {
8114 ".git": {},
8115 "src": {
8116 "a.txt": ""
8117 }
8118 }
8119 },
8120 "src": {
8121 "b.txt": ""
8122 }
8123 },
8124 }),
8125 )
8126 .await;
8127
8128 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8129 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8130 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8131 project
8132 .update(cx, |project, cx| project.git_scans_complete(cx))
8133 .await;
8134 cx.run_until_parked();
8135
8136 project.read_with(cx, |project, cx| {
8137 let git_store = project.git_store().read(cx);
8138 let pairs = [
8139 ("c.txt", None),
8140 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8141 (
8142 "dir1/deps/dep1/src/a.txt",
8143 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8144 ),
8145 ];
8146 let expected = pairs
8147 .iter()
8148 .map(|(path, result)| {
8149 (
8150 path,
8151 result.map(|(repo, repo_path)| {
8152 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8153 }),
8154 )
8155 })
8156 .collect::<Vec<_>>();
8157 let actual = pairs
8158 .iter()
8159 .map(|(path, _)| {
8160 let project_path = (tree_id, rel_path(path)).into();
8161 let result = maybe!({
8162 let (repo, repo_path) =
8163 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8164 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8165 });
8166 (path, result)
8167 })
8168 .collect::<Vec<_>>();
8169 pretty_assertions::assert_eq!(expected, actual);
8170 });
8171
8172 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8173 .await
8174 .unwrap();
8175 cx.run_until_parked();
8176
8177 project.read_with(cx, |project, cx| {
8178 let git_store = project.git_store().read(cx);
8179 assert_eq!(
8180 git_store.repository_and_path_for_project_path(
8181 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8182 cx
8183 ),
8184 None
8185 );
8186 });
8187}
8188
8189#[gpui::test]
8190async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8191 init_test(cx);
8192 let fs = FakeFs::new(cx.background_executor.clone());
8193 let home = paths::home_dir();
8194 fs.insert_tree(
8195 home,
8196 json!({
8197 ".git": {},
8198 "project": {
8199 "a.txt": "A"
8200 },
8201 }),
8202 )
8203 .await;
8204
8205 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8206 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8207 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8208
8209 project
8210 .update(cx, |project, cx| project.git_scans_complete(cx))
8211 .await;
8212 tree.flush_fs_events(cx).await;
8213
8214 project.read_with(cx, |project, cx| {
8215 let containing = project
8216 .git_store()
8217 .read(cx)
8218 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8219 assert!(containing.is_none());
8220 });
8221
8222 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8223 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8224 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8225 project
8226 .update(cx, |project, cx| project.git_scans_complete(cx))
8227 .await;
8228 tree.flush_fs_events(cx).await;
8229
8230 project.read_with(cx, |project, cx| {
8231 let containing = project
8232 .git_store()
8233 .read(cx)
8234 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8235 assert_eq!(
8236 containing
8237 .unwrap()
8238 .0
8239 .read(cx)
8240 .work_directory_abs_path
8241 .as_ref(),
8242 home,
8243 );
8244 });
8245}
8246
8247#[gpui::test]
8248async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8249 init_test(cx);
8250 cx.executor().allow_parking();
8251
8252 let root = TempTree::new(json!({
8253 "project": {
8254 "a.txt": "a", // Modified
8255 "b.txt": "bb", // Added
8256 "c.txt": "ccc", // Unchanged
8257 "d.txt": "dddd", // Deleted
8258 },
8259 }));
8260
8261 // Set up git repository before creating the project.
8262 let work_dir = root.path().join("project");
8263 let repo = git_init(work_dir.as_path());
8264 git_add("a.txt", &repo);
8265 git_add("c.txt", &repo);
8266 git_add("d.txt", &repo);
8267 git_commit("Initial commit", &repo);
8268 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8269 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8270
8271 let project = Project::test(
8272 Arc::new(RealFs::new(None, cx.executor())),
8273 [root.path()],
8274 cx,
8275 )
8276 .await;
8277
8278 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8279 tree.flush_fs_events(cx).await;
8280 project
8281 .update(cx, |project, cx| project.git_scans_complete(cx))
8282 .await;
8283 cx.executor().run_until_parked();
8284
8285 let repository = project.read_with(cx, |project, cx| {
8286 project.repositories(cx).values().next().unwrap().clone()
8287 });
8288
8289 // Check that the right git state is observed on startup
8290 repository.read_with(cx, |repository, _| {
8291 let entries = repository.cached_status().collect::<Vec<_>>();
8292 assert_eq!(
8293 entries,
8294 [
8295 StatusEntry {
8296 repo_path: repo_path("a.txt"),
8297 status: StatusCode::Modified.worktree(),
8298 },
8299 StatusEntry {
8300 repo_path: repo_path("b.txt"),
8301 status: FileStatus::Untracked,
8302 },
8303 StatusEntry {
8304 repo_path: repo_path("d.txt"),
8305 status: StatusCode::Deleted.worktree(),
8306 },
8307 ]
8308 );
8309 });
8310
8311 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8312
8313 tree.flush_fs_events(cx).await;
8314 project
8315 .update(cx, |project, cx| project.git_scans_complete(cx))
8316 .await;
8317 cx.executor().run_until_parked();
8318
8319 repository.read_with(cx, |repository, _| {
8320 let entries = repository.cached_status().collect::<Vec<_>>();
8321 assert_eq!(
8322 entries,
8323 [
8324 StatusEntry {
8325 repo_path: repo_path("a.txt"),
8326 status: StatusCode::Modified.worktree(),
8327 },
8328 StatusEntry {
8329 repo_path: repo_path("b.txt"),
8330 status: FileStatus::Untracked,
8331 },
8332 StatusEntry {
8333 repo_path: repo_path("c.txt"),
8334 status: StatusCode::Modified.worktree(),
8335 },
8336 StatusEntry {
8337 repo_path: repo_path("d.txt"),
8338 status: StatusCode::Deleted.worktree(),
8339 },
8340 ]
8341 );
8342 });
8343
8344 git_add("a.txt", &repo);
8345 git_add("c.txt", &repo);
8346 git_remove_index(Path::new("d.txt"), &repo);
8347 git_commit("Another commit", &repo);
8348 tree.flush_fs_events(cx).await;
8349 project
8350 .update(cx, |project, cx| project.git_scans_complete(cx))
8351 .await;
8352 cx.executor().run_until_parked();
8353
8354 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8355 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8356 tree.flush_fs_events(cx).await;
8357 project
8358 .update(cx, |project, cx| project.git_scans_complete(cx))
8359 .await;
8360 cx.executor().run_until_parked();
8361
8362 repository.read_with(cx, |repository, _cx| {
8363 let entries = repository.cached_status().collect::<Vec<_>>();
8364
8365 // Deleting an untracked entry, b.txt, should leave no status
8366 // a.txt was tracked, and so should have a status
8367 assert_eq!(
8368 entries,
8369 [StatusEntry {
8370 repo_path: repo_path("a.txt"),
8371 status: StatusCode::Deleted.worktree(),
8372 }]
8373 );
8374 });
8375}
8376
8377#[gpui::test]
8378async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8379 init_test(cx);
8380 cx.executor().allow_parking();
8381
8382 let root = TempTree::new(json!({
8383 "project": {
8384 "sub": {},
8385 "a.txt": "",
8386 },
8387 }));
8388
8389 let work_dir = root.path().join("project");
8390 let repo = git_init(work_dir.as_path());
8391 // a.txt exists in HEAD and the working copy but is deleted in the index.
8392 git_add("a.txt", &repo);
8393 git_commit("Initial commit", &repo);
8394 git_remove_index("a.txt".as_ref(), &repo);
8395 // `sub` is a nested git repository.
8396 let _sub = git_init(&work_dir.join("sub"));
8397
8398 let project = Project::test(
8399 Arc::new(RealFs::new(None, cx.executor())),
8400 [root.path()],
8401 cx,
8402 )
8403 .await;
8404
8405 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8406 tree.flush_fs_events(cx).await;
8407 project
8408 .update(cx, |project, cx| project.git_scans_complete(cx))
8409 .await;
8410 cx.executor().run_until_parked();
8411
8412 let repository = project.read_with(cx, |project, cx| {
8413 project
8414 .repositories(cx)
8415 .values()
8416 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8417 .unwrap()
8418 .clone()
8419 });
8420
8421 repository.read_with(cx, |repository, _cx| {
8422 let entries = repository.cached_status().collect::<Vec<_>>();
8423
8424 // `sub` doesn't appear in our computed statuses.
8425 // a.txt appears with a combined `DA` status.
8426 assert_eq!(
8427 entries,
8428 [StatusEntry {
8429 repo_path: repo_path("a.txt"),
8430 status: TrackedStatus {
8431 index_status: StatusCode::Deleted,
8432 worktree_status: StatusCode::Added
8433 }
8434 .into(),
8435 }]
8436 )
8437 });
8438}
8439
8440#[track_caller]
8441/// We merge lhs into rhs.
8442fn merge_pending_ops_snapshots(
8443 source: Vec<pending_op::PendingOps>,
8444 mut target: Vec<pending_op::PendingOps>,
8445) -> Vec<pending_op::PendingOps> {
8446 for s_ops in source {
8447 if let Some(idx) = target.iter().zip(0..).find_map(|(ops, idx)| {
8448 if ops.repo_path == s_ops.repo_path {
8449 Some(idx)
8450 } else {
8451 None
8452 }
8453 }) {
8454 let t_ops = &mut target[idx];
8455 for s_op in s_ops.ops {
8456 if let Some(op_idx) = t_ops
8457 .ops
8458 .iter()
8459 .zip(0..)
8460 .find_map(|(op, idx)| if op.id == s_op.id { Some(idx) } else { None })
8461 {
8462 let t_op = &mut t_ops.ops[op_idx];
8463 match (s_op.job_status, t_op.job_status) {
8464 (pending_op::JobStatus::Running, _) => {}
8465 (s_st, pending_op::JobStatus::Running) => t_op.job_status = s_st,
8466 (s_st, t_st) if s_st == t_st => {}
8467 _ => unreachable!(),
8468 }
8469 } else {
8470 t_ops.ops.push(s_op);
8471 }
8472 }
8473 t_ops.ops.sort_by(|l, r| l.id.cmp(&r.id));
8474 } else {
8475 target.push(s_ops);
8476 }
8477 }
8478 target
8479}
8480
8481#[gpui::test]
8482async fn test_repository_pending_ops_staging(
8483 executor: gpui::BackgroundExecutor,
8484 cx: &mut gpui::TestAppContext,
8485) {
8486 init_test(cx);
8487
8488 let fs = FakeFs::new(executor);
8489 fs.insert_tree(
8490 path!("/root"),
8491 json!({
8492 "my-repo": {
8493 ".git": {},
8494 "a.txt": "a",
8495 }
8496
8497 }),
8498 )
8499 .await;
8500
8501 fs.set_status_for_repo(
8502 path!("/root/my-repo/.git").as_ref(),
8503 &[("a.txt", FileStatus::Untracked)],
8504 );
8505
8506 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8507 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8508 project.update(cx, |project, cx| {
8509 let pending_ops_all = pending_ops_all.clone();
8510 cx.subscribe(project.git_store(), move |_, _, e, _| {
8511 if let GitStoreEvent::RepositoryUpdated(
8512 _,
8513 RepositoryEvent::PendingOpsChanged { pending_ops },
8514 _,
8515 ) = e
8516 {
8517 let merged = merge_pending_ops_snapshots(
8518 pending_ops.items(()),
8519 pending_ops_all.lock().items(()),
8520 );
8521 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8522 }
8523 })
8524 .detach();
8525 });
8526 project
8527 .update(cx, |project, cx| project.git_scans_complete(cx))
8528 .await;
8529
8530 let repo = project.read_with(cx, |project, cx| {
8531 project.repositories(cx).values().next().unwrap().clone()
8532 });
8533
8534 // Ensure we have no pending ops for any of the untracked files
8535 repo.read_with(cx, |repo, _cx| {
8536 assert!(repo.pending_ops_by_path.is_empty());
8537 });
8538
8539 let mut id = 1u16;
8540
8541 let mut assert_stage = async |path: RepoPath, stage| {
8542 let git_status = if stage {
8543 pending_op::GitStatus::Staged
8544 } else {
8545 pending_op::GitStatus::Unstaged
8546 };
8547 repo.update(cx, |repo, cx| {
8548 let task = if stage {
8549 repo.stage_entries(vec![path.clone()], cx)
8550 } else {
8551 repo.unstage_entries(vec![path.clone()], cx)
8552 };
8553 let ops = repo.pending_ops_for_path(&path).unwrap();
8554 assert_eq!(
8555 ops.ops.last(),
8556 Some(&pending_op::PendingOp {
8557 id: id.into(),
8558 git_status,
8559 job_status: pending_op::JobStatus::Running
8560 })
8561 );
8562 task
8563 })
8564 .await
8565 .unwrap();
8566
8567 repo.read_with(cx, |repo, _cx| {
8568 let ops = repo.pending_ops_for_path(&path).unwrap();
8569 assert_eq!(
8570 ops.ops.last(),
8571 Some(&pending_op::PendingOp {
8572 id: id.into(),
8573 git_status,
8574 job_status: pending_op::JobStatus::Finished
8575 })
8576 );
8577 });
8578
8579 id += 1;
8580 };
8581
8582 assert_stage(repo_path("a.txt"), true).await;
8583 assert_stage(repo_path("a.txt"), false).await;
8584 assert_stage(repo_path("a.txt"), true).await;
8585 assert_stage(repo_path("a.txt"), false).await;
8586 assert_stage(repo_path("a.txt"), true).await;
8587
8588 cx.run_until_parked();
8589
8590 assert_eq!(
8591 pending_ops_all
8592 .lock()
8593 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8594 .unwrap()
8595 .ops,
8596 vec![
8597 pending_op::PendingOp {
8598 id: 1u16.into(),
8599 git_status: pending_op::GitStatus::Staged,
8600 job_status: pending_op::JobStatus::Finished
8601 },
8602 pending_op::PendingOp {
8603 id: 2u16.into(),
8604 git_status: pending_op::GitStatus::Unstaged,
8605 job_status: pending_op::JobStatus::Finished
8606 },
8607 pending_op::PendingOp {
8608 id: 3u16.into(),
8609 git_status: pending_op::GitStatus::Staged,
8610 job_status: pending_op::JobStatus::Finished
8611 },
8612 pending_op::PendingOp {
8613 id: 4u16.into(),
8614 git_status: pending_op::GitStatus::Unstaged,
8615 job_status: pending_op::JobStatus::Finished
8616 },
8617 pending_op::PendingOp {
8618 id: 5u16.into(),
8619 git_status: pending_op::GitStatus::Staged,
8620 job_status: pending_op::JobStatus::Finished
8621 }
8622 ],
8623 );
8624
8625 repo.update(cx, |repo, _cx| {
8626 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8627
8628 assert_eq!(
8629 git_statuses,
8630 [StatusEntry {
8631 repo_path: repo_path("a.txt"),
8632 status: TrackedStatus {
8633 index_status: StatusCode::Added,
8634 worktree_status: StatusCode::Unmodified
8635 }
8636 .into(),
8637 }]
8638 );
8639 });
8640}
8641
8642#[gpui::test]
8643async fn test_repository_pending_ops_long_running_staging(
8644 executor: gpui::BackgroundExecutor,
8645 cx: &mut gpui::TestAppContext,
8646) {
8647 init_test(cx);
8648
8649 let fs = FakeFs::new(executor);
8650 fs.insert_tree(
8651 path!("/root"),
8652 json!({
8653 "my-repo": {
8654 ".git": {},
8655 "a.txt": "a",
8656 }
8657
8658 }),
8659 )
8660 .await;
8661
8662 fs.set_status_for_repo(
8663 path!("/root/my-repo/.git").as_ref(),
8664 &[("a.txt", FileStatus::Untracked)],
8665 );
8666
8667 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8668 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8669 project.update(cx, |project, cx| {
8670 let pending_ops_all = pending_ops_all.clone();
8671 cx.subscribe(project.git_store(), move |_, _, e, _| {
8672 if let GitStoreEvent::RepositoryUpdated(
8673 _,
8674 RepositoryEvent::PendingOpsChanged { pending_ops },
8675 _,
8676 ) = e
8677 {
8678 let merged = merge_pending_ops_snapshots(
8679 pending_ops.items(()),
8680 pending_ops_all.lock().items(()),
8681 );
8682 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8683 }
8684 })
8685 .detach();
8686 });
8687
8688 project
8689 .update(cx, |project, cx| project.git_scans_complete(cx))
8690 .await;
8691
8692 let repo = project.read_with(cx, |project, cx| {
8693 project.repositories(cx).values().next().unwrap().clone()
8694 });
8695
8696 repo.update(cx, |repo, cx| {
8697 repo.stage_entries(vec![repo_path("a.txt")], cx)
8698 })
8699 .detach();
8700
8701 repo.update(cx, |repo, cx| {
8702 repo.stage_entries(vec![repo_path("a.txt")], cx)
8703 })
8704 .unwrap()
8705 .with_timeout(Duration::from_secs(1), &cx.executor())
8706 .await
8707 .unwrap();
8708
8709 cx.run_until_parked();
8710
8711 assert_eq!(
8712 pending_ops_all
8713 .lock()
8714 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8715 .unwrap()
8716 .ops,
8717 vec![
8718 pending_op::PendingOp {
8719 id: 1u16.into(),
8720 git_status: pending_op::GitStatus::Staged,
8721 job_status: pending_op::JobStatus::Skipped
8722 },
8723 pending_op::PendingOp {
8724 id: 2u16.into(),
8725 git_status: pending_op::GitStatus::Staged,
8726 job_status: pending_op::JobStatus::Finished
8727 }
8728 ],
8729 );
8730
8731 repo.update(cx, |repo, _cx| {
8732 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8733
8734 assert_eq!(
8735 git_statuses,
8736 [StatusEntry {
8737 repo_path: repo_path("a.txt"),
8738 status: TrackedStatus {
8739 index_status: StatusCode::Added,
8740 worktree_status: StatusCode::Unmodified
8741 }
8742 .into(),
8743 }]
8744 );
8745 });
8746}
8747
8748#[gpui::test]
8749async fn test_repository_pending_ops_stage_all(
8750 executor: gpui::BackgroundExecutor,
8751 cx: &mut gpui::TestAppContext,
8752) {
8753 init_test(cx);
8754
8755 let fs = FakeFs::new(executor);
8756 fs.insert_tree(
8757 path!("/root"),
8758 json!({
8759 "my-repo": {
8760 ".git": {},
8761 "a.txt": "a",
8762 "b.txt": "b"
8763 }
8764
8765 }),
8766 )
8767 .await;
8768
8769 fs.set_status_for_repo(
8770 path!("/root/my-repo/.git").as_ref(),
8771 &[
8772 ("a.txt", FileStatus::Untracked),
8773 ("b.txt", FileStatus::Untracked),
8774 ],
8775 );
8776
8777 let project = Project::test(fs.clone(), [path!("/root/my-repo").as_ref()], cx).await;
8778 let pending_ops_all = Arc::new(Mutex::new(SumTree::default()));
8779 project.update(cx, |project, cx| {
8780 let pending_ops_all = pending_ops_all.clone();
8781 cx.subscribe(project.git_store(), move |_, _, e, _| {
8782 if let GitStoreEvent::RepositoryUpdated(
8783 _,
8784 RepositoryEvent::PendingOpsChanged { pending_ops },
8785 _,
8786 ) = e
8787 {
8788 let merged = merge_pending_ops_snapshots(
8789 pending_ops.items(()),
8790 pending_ops_all.lock().items(()),
8791 );
8792 *pending_ops_all.lock() = SumTree::from_iter(merged.into_iter(), ());
8793 }
8794 })
8795 .detach();
8796 });
8797 project
8798 .update(cx, |project, cx| project.git_scans_complete(cx))
8799 .await;
8800
8801 let repo = project.read_with(cx, |project, cx| {
8802 project.repositories(cx).values().next().unwrap().clone()
8803 });
8804
8805 repo.update(cx, |repo, cx| {
8806 repo.stage_entries(vec![repo_path("a.txt")], cx)
8807 })
8808 .await
8809 .unwrap();
8810 repo.update(cx, |repo, cx| repo.stage_all(cx))
8811 .await
8812 .unwrap();
8813 repo.update(cx, |repo, cx| repo.unstage_all(cx))
8814 .await
8815 .unwrap();
8816
8817 cx.run_until_parked();
8818
8819 assert_eq!(
8820 pending_ops_all
8821 .lock()
8822 .get(&worktree::PathKey(repo_path("a.txt").as_ref().clone()), ())
8823 .unwrap()
8824 .ops,
8825 vec![
8826 pending_op::PendingOp {
8827 id: 1u16.into(),
8828 git_status: pending_op::GitStatus::Staged,
8829 job_status: pending_op::JobStatus::Finished
8830 },
8831 pending_op::PendingOp {
8832 id: 2u16.into(),
8833 git_status: pending_op::GitStatus::Unstaged,
8834 job_status: pending_op::JobStatus::Finished
8835 },
8836 ],
8837 );
8838 assert_eq!(
8839 pending_ops_all
8840 .lock()
8841 .get(&worktree::PathKey(repo_path("b.txt").as_ref().clone()), ())
8842 .unwrap()
8843 .ops,
8844 vec![
8845 pending_op::PendingOp {
8846 id: 1u16.into(),
8847 git_status: pending_op::GitStatus::Staged,
8848 job_status: pending_op::JobStatus::Finished
8849 },
8850 pending_op::PendingOp {
8851 id: 2u16.into(),
8852 git_status: pending_op::GitStatus::Unstaged,
8853 job_status: pending_op::JobStatus::Finished
8854 },
8855 ],
8856 );
8857
8858 repo.update(cx, |repo, _cx| {
8859 let git_statuses = repo.cached_status().collect::<Vec<_>>();
8860
8861 assert_eq!(
8862 git_statuses,
8863 [
8864 StatusEntry {
8865 repo_path: repo_path("a.txt"),
8866 status: FileStatus::Untracked,
8867 },
8868 StatusEntry {
8869 repo_path: repo_path("b.txt"),
8870 status: FileStatus::Untracked,
8871 },
8872 ]
8873 );
8874 });
8875}
8876
8877#[gpui::test]
8878async fn test_repository_subfolder_git_status(
8879 executor: gpui::BackgroundExecutor,
8880 cx: &mut gpui::TestAppContext,
8881) {
8882 init_test(cx);
8883
8884 let fs = FakeFs::new(executor);
8885 fs.insert_tree(
8886 path!("/root"),
8887 json!({
8888 "my-repo": {
8889 ".git": {},
8890 "a.txt": "a",
8891 "sub-folder-1": {
8892 "sub-folder-2": {
8893 "c.txt": "cc",
8894 "d": {
8895 "e.txt": "eee"
8896 }
8897 },
8898 }
8899 },
8900 }),
8901 )
8902 .await;
8903
8904 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8905 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8906
8907 fs.set_status_for_repo(
8908 path!("/root/my-repo/.git").as_ref(),
8909 &[(E_TXT, FileStatus::Untracked)],
8910 );
8911
8912 let project = Project::test(
8913 fs.clone(),
8914 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8915 cx,
8916 )
8917 .await;
8918
8919 project
8920 .update(cx, |project, cx| project.git_scans_complete(cx))
8921 .await;
8922 cx.run_until_parked();
8923
8924 let repository = project.read_with(cx, |project, cx| {
8925 project.repositories(cx).values().next().unwrap().clone()
8926 });
8927
8928 // Ensure that the git status is loaded correctly
8929 repository.read_with(cx, |repository, _cx| {
8930 assert_eq!(
8931 repository.work_directory_abs_path,
8932 Path::new(path!("/root/my-repo")).into()
8933 );
8934
8935 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8936 assert_eq!(
8937 repository
8938 .status_for_path(&repo_path(E_TXT))
8939 .unwrap()
8940 .status,
8941 FileStatus::Untracked
8942 );
8943 });
8944
8945 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8946 project
8947 .update(cx, |project, cx| project.git_scans_complete(cx))
8948 .await;
8949 cx.run_until_parked();
8950
8951 repository.read_with(cx, |repository, _cx| {
8952 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8953 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8954 });
8955}
8956
8957// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8958#[cfg(any())]
8959#[gpui::test]
8960async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8961 init_test(cx);
8962 cx.executor().allow_parking();
8963
8964 let root = TempTree::new(json!({
8965 "project": {
8966 "a.txt": "a",
8967 },
8968 }));
8969 let root_path = root.path();
8970
8971 let repo = git_init(&root_path.join("project"));
8972 git_add("a.txt", &repo);
8973 git_commit("init", &repo);
8974
8975 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8976
8977 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8978 tree.flush_fs_events(cx).await;
8979 project
8980 .update(cx, |project, cx| project.git_scans_complete(cx))
8981 .await;
8982 cx.executor().run_until_parked();
8983
8984 let repository = project.read_with(cx, |project, cx| {
8985 project.repositories(cx).values().next().unwrap().clone()
8986 });
8987
8988 git_branch("other-branch", &repo);
8989 git_checkout("refs/heads/other-branch", &repo);
8990 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8991 git_add("a.txt", &repo);
8992 git_commit("capitalize", &repo);
8993 let commit = repo
8994 .head()
8995 .expect("Failed to get HEAD")
8996 .peel_to_commit()
8997 .expect("HEAD is not a commit");
8998 git_checkout("refs/heads/main", &repo);
8999 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
9000 git_add("a.txt", &repo);
9001 git_commit("improve letter", &repo);
9002 git_cherry_pick(&commit, &repo);
9003 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
9004 .expect("No CHERRY_PICK_HEAD");
9005 pretty_assertions::assert_eq!(
9006 git_status(&repo),
9007 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
9008 );
9009 tree.flush_fs_events(cx).await;
9010 project
9011 .update(cx, |project, cx| project.git_scans_complete(cx))
9012 .await;
9013 cx.executor().run_until_parked();
9014 let conflicts = repository.update(cx, |repository, _| {
9015 repository
9016 .merge_conflicts
9017 .iter()
9018 .cloned()
9019 .collect::<Vec<_>>()
9020 });
9021 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
9022
9023 git_add("a.txt", &repo);
9024 // Attempt to manually simulate what `git cherry-pick --continue` would do.
9025 git_commit("whatevs", &repo);
9026 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
9027 .expect("Failed to remove CHERRY_PICK_HEAD");
9028 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
9029 tree.flush_fs_events(cx).await;
9030 let conflicts = repository.update(cx, |repository, _| {
9031 repository
9032 .merge_conflicts
9033 .iter()
9034 .cloned()
9035 .collect::<Vec<_>>()
9036 });
9037 pretty_assertions::assert_eq!(conflicts, []);
9038}
9039
9040#[gpui::test]
9041async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
9042 init_test(cx);
9043 let fs = FakeFs::new(cx.background_executor.clone());
9044 fs.insert_tree(
9045 path!("/root"),
9046 json!({
9047 ".git": {},
9048 ".gitignore": "*.txt\n",
9049 "a.xml": "<a></a>",
9050 "b.txt": "Some text"
9051 }),
9052 )
9053 .await;
9054
9055 fs.set_head_and_index_for_repo(
9056 path!("/root/.git").as_ref(),
9057 &[
9058 (".gitignore", "*.txt\n".into()),
9059 ("a.xml", "<a></a>".into()),
9060 ],
9061 );
9062
9063 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9064
9065 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9066 tree.flush_fs_events(cx).await;
9067 project
9068 .update(cx, |project, cx| project.git_scans_complete(cx))
9069 .await;
9070 cx.executor().run_until_parked();
9071
9072 let repository = project.read_with(cx, |project, cx| {
9073 project.repositories(cx).values().next().unwrap().clone()
9074 });
9075
9076 // One file is unmodified, the other is ignored.
9077 cx.read(|cx| {
9078 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
9079 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
9080 });
9081
9082 // Change the gitignore, and stage the newly non-ignored file.
9083 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
9084 .await
9085 .unwrap();
9086 fs.set_index_for_repo(
9087 Path::new(path!("/root/.git")),
9088 &[
9089 (".gitignore", "*.txt\n".into()),
9090 ("a.xml", "<a></a>".into()),
9091 ("b.txt", "Some text".into()),
9092 ],
9093 );
9094
9095 cx.executor().run_until_parked();
9096 cx.read(|cx| {
9097 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
9098 assert_entry_git_state(
9099 tree.read(cx),
9100 repository.read(cx),
9101 "b.txt",
9102 Some(StatusCode::Added),
9103 false,
9104 );
9105 });
9106}
9107
9108// NOTE:
9109// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
9110// a directory which some program has already open.
9111// This is a limitation of the Windows.
9112// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9113// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9114#[gpui::test]
9115#[cfg_attr(target_os = "windows", ignore)]
9116async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
9117 init_test(cx);
9118 cx.executor().allow_parking();
9119 let root = TempTree::new(json!({
9120 "projects": {
9121 "project1": {
9122 "a": "",
9123 "b": "",
9124 }
9125 },
9126
9127 }));
9128 let root_path = root.path();
9129
9130 let repo = git_init(&root_path.join("projects/project1"));
9131 git_add("a", &repo);
9132 git_commit("init", &repo);
9133 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
9134
9135 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9136
9137 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9138 tree.flush_fs_events(cx).await;
9139 project
9140 .update(cx, |project, cx| project.git_scans_complete(cx))
9141 .await;
9142 cx.executor().run_until_parked();
9143
9144 let repository = project.read_with(cx, |project, cx| {
9145 project.repositories(cx).values().next().unwrap().clone()
9146 });
9147
9148 repository.read_with(cx, |repository, _| {
9149 assert_eq!(
9150 repository.work_directory_abs_path.as_ref(),
9151 root_path.join("projects/project1").as_path()
9152 );
9153 assert_eq!(
9154 repository
9155 .status_for_path(&repo_path("a"))
9156 .map(|entry| entry.status),
9157 Some(StatusCode::Modified.worktree()),
9158 );
9159 assert_eq!(
9160 repository
9161 .status_for_path(&repo_path("b"))
9162 .map(|entry| entry.status),
9163 Some(FileStatus::Untracked),
9164 );
9165 });
9166
9167 std::fs::rename(
9168 root_path.join("projects/project1"),
9169 root_path.join("projects/project2"),
9170 )
9171 .unwrap();
9172 tree.flush_fs_events(cx).await;
9173
9174 repository.read_with(cx, |repository, _| {
9175 assert_eq!(
9176 repository.work_directory_abs_path.as_ref(),
9177 root_path.join("projects/project2").as_path()
9178 );
9179 assert_eq!(
9180 repository.status_for_path(&repo_path("a")).unwrap().status,
9181 StatusCode::Modified.worktree(),
9182 );
9183 assert_eq!(
9184 repository.status_for_path(&repo_path("b")).unwrap().status,
9185 FileStatus::Untracked,
9186 );
9187 });
9188}
9189
9190// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
9191// you can't rename a directory which some program has already open. This is a
9192// limitation of the Windows. See:
9193// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
9194// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
9195#[gpui::test]
9196#[cfg_attr(target_os = "windows", ignore)]
9197async fn test_file_status(cx: &mut gpui::TestAppContext) {
9198 init_test(cx);
9199 cx.executor().allow_parking();
9200 const IGNORE_RULE: &str = "**/target";
9201
9202 let root = TempTree::new(json!({
9203 "project": {
9204 "a.txt": "a",
9205 "b.txt": "bb",
9206 "c": {
9207 "d": {
9208 "e.txt": "eee"
9209 }
9210 },
9211 "f.txt": "ffff",
9212 "target": {
9213 "build_file": "???"
9214 },
9215 ".gitignore": IGNORE_RULE
9216 },
9217
9218 }));
9219 let root_path = root.path();
9220
9221 const A_TXT: &str = "a.txt";
9222 const B_TXT: &str = "b.txt";
9223 const E_TXT: &str = "c/d/e.txt";
9224 const F_TXT: &str = "f.txt";
9225 const DOTGITIGNORE: &str = ".gitignore";
9226 const BUILD_FILE: &str = "target/build_file";
9227
9228 // Set up git repository before creating the worktree.
9229 let work_dir = root.path().join("project");
9230 let mut repo = git_init(work_dir.as_path());
9231 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9232 git_add(A_TXT, &repo);
9233 git_add(E_TXT, &repo);
9234 git_add(DOTGITIGNORE, &repo);
9235 git_commit("Initial commit", &repo);
9236
9237 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9238
9239 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9240 tree.flush_fs_events(cx).await;
9241 project
9242 .update(cx, |project, cx| project.git_scans_complete(cx))
9243 .await;
9244 cx.executor().run_until_parked();
9245
9246 let repository = project.read_with(cx, |project, cx| {
9247 project.repositories(cx).values().next().unwrap().clone()
9248 });
9249
9250 // Check that the right git state is observed on startup
9251 repository.read_with(cx, |repository, _cx| {
9252 assert_eq!(
9253 repository.work_directory_abs_path.as_ref(),
9254 root_path.join("project").as_path()
9255 );
9256
9257 assert_eq!(
9258 repository
9259 .status_for_path(&repo_path(B_TXT))
9260 .unwrap()
9261 .status,
9262 FileStatus::Untracked,
9263 );
9264 assert_eq!(
9265 repository
9266 .status_for_path(&repo_path(F_TXT))
9267 .unwrap()
9268 .status,
9269 FileStatus::Untracked,
9270 );
9271 });
9272
9273 // Modify a file in the working copy.
9274 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
9275 tree.flush_fs_events(cx).await;
9276 project
9277 .update(cx, |project, cx| project.git_scans_complete(cx))
9278 .await;
9279 cx.executor().run_until_parked();
9280
9281 // The worktree detects that the file's git status has changed.
9282 repository.read_with(cx, |repository, _| {
9283 assert_eq!(
9284 repository
9285 .status_for_path(&repo_path(A_TXT))
9286 .unwrap()
9287 .status,
9288 StatusCode::Modified.worktree(),
9289 );
9290 });
9291
9292 // Create a commit in the git repository.
9293 git_add(A_TXT, &repo);
9294 git_add(B_TXT, &repo);
9295 git_commit("Committing modified and added", &repo);
9296 tree.flush_fs_events(cx).await;
9297 project
9298 .update(cx, |project, cx| project.git_scans_complete(cx))
9299 .await;
9300 cx.executor().run_until_parked();
9301
9302 // The worktree detects that the files' git status have changed.
9303 repository.read_with(cx, |repository, _cx| {
9304 assert_eq!(
9305 repository
9306 .status_for_path(&repo_path(F_TXT))
9307 .unwrap()
9308 .status,
9309 FileStatus::Untracked,
9310 );
9311 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
9312 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9313 });
9314
9315 // Modify files in the working copy and perform git operations on other files.
9316 git_reset(0, &repo);
9317 git_remove_index(Path::new(B_TXT), &repo);
9318 git_stash(&mut repo);
9319 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
9320 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
9321 tree.flush_fs_events(cx).await;
9322 project
9323 .update(cx, |project, cx| project.git_scans_complete(cx))
9324 .await;
9325 cx.executor().run_until_parked();
9326
9327 // Check that more complex repo changes are tracked
9328 repository.read_with(cx, |repository, _cx| {
9329 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
9330 assert_eq!(
9331 repository
9332 .status_for_path(&repo_path(B_TXT))
9333 .unwrap()
9334 .status,
9335 FileStatus::Untracked,
9336 );
9337 assert_eq!(
9338 repository
9339 .status_for_path(&repo_path(E_TXT))
9340 .unwrap()
9341 .status,
9342 StatusCode::Modified.worktree(),
9343 );
9344 });
9345
9346 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
9347 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
9348 std::fs::write(
9349 work_dir.join(DOTGITIGNORE),
9350 [IGNORE_RULE, "f.txt"].join("\n"),
9351 )
9352 .unwrap();
9353
9354 git_add(Path::new(DOTGITIGNORE), &repo);
9355 git_commit("Committing modified git ignore", &repo);
9356
9357 tree.flush_fs_events(cx).await;
9358 cx.executor().run_until_parked();
9359
9360 let mut renamed_dir_name = "first_directory/second_directory";
9361 const RENAMED_FILE: &str = "rf.txt";
9362
9363 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
9364 std::fs::write(
9365 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
9366 "new-contents",
9367 )
9368 .unwrap();
9369
9370 tree.flush_fs_events(cx).await;
9371 project
9372 .update(cx, |project, cx| project.git_scans_complete(cx))
9373 .await;
9374 cx.executor().run_until_parked();
9375
9376 repository.read_with(cx, |repository, _cx| {
9377 assert_eq!(
9378 repository
9379 .status_for_path(&RepoPath::from_rel_path(
9380 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9381 ))
9382 .unwrap()
9383 .status,
9384 FileStatus::Untracked,
9385 );
9386 });
9387
9388 renamed_dir_name = "new_first_directory/second_directory";
9389
9390 std::fs::rename(
9391 work_dir.join("first_directory"),
9392 work_dir.join("new_first_directory"),
9393 )
9394 .unwrap();
9395
9396 tree.flush_fs_events(cx).await;
9397 project
9398 .update(cx, |project, cx| project.git_scans_complete(cx))
9399 .await;
9400 cx.executor().run_until_parked();
9401
9402 repository.read_with(cx, |repository, _cx| {
9403 assert_eq!(
9404 repository
9405 .status_for_path(&RepoPath::from_rel_path(
9406 &rel_path(renamed_dir_name).join(rel_path(RENAMED_FILE))
9407 ))
9408 .unwrap()
9409 .status,
9410 FileStatus::Untracked,
9411 );
9412 });
9413}
9414
9415#[gpui::test]
9416#[ignore]
9417async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
9418 init_test(cx);
9419 cx.executor().allow_parking();
9420
9421 const IGNORE_RULE: &str = "**/target";
9422
9423 let root = TempTree::new(json!({
9424 "project": {
9425 "src": {
9426 "main.rs": "fn main() {}"
9427 },
9428 "target": {
9429 "debug": {
9430 "important_text.txt": "important text",
9431 },
9432 },
9433 ".gitignore": IGNORE_RULE
9434 },
9435
9436 }));
9437 let root_path = root.path();
9438
9439 // Set up git repository before creating the worktree.
9440 let work_dir = root.path().join("project");
9441 let repo = git_init(work_dir.as_path());
9442 repo.add_ignore_rule(IGNORE_RULE).unwrap();
9443 git_add("src/main.rs", &repo);
9444 git_add(".gitignore", &repo);
9445 git_commit("Initial commit", &repo);
9446
9447 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
9448 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9449 let project_events = Arc::new(Mutex::new(Vec::new()));
9450 project.update(cx, |project, cx| {
9451 let repo_events = repository_updates.clone();
9452 cx.subscribe(project.git_store(), move |_, _, e, _| {
9453 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9454 repo_events.lock().push(e.clone());
9455 }
9456 })
9457 .detach();
9458 let project_events = project_events.clone();
9459 cx.subscribe_self(move |_, e, _| {
9460 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9461 project_events.lock().extend(
9462 updates
9463 .iter()
9464 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9465 .filter(|(path, _)| path != "fs-event-sentinel"),
9466 );
9467 }
9468 })
9469 .detach();
9470 });
9471
9472 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9473 tree.flush_fs_events(cx).await;
9474 tree.update(cx, |tree, cx| {
9475 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
9476 })
9477 .await
9478 .unwrap();
9479 tree.update(cx, |tree, _| {
9480 assert_eq!(
9481 tree.entries(true, 0)
9482 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9483 .collect::<Vec<_>>(),
9484 vec![
9485 (rel_path(""), false),
9486 (rel_path("project/"), false),
9487 (rel_path("project/.gitignore"), false),
9488 (rel_path("project/src"), false),
9489 (rel_path("project/src/main.rs"), false),
9490 (rel_path("project/target"), true),
9491 (rel_path("project/target/debug"), true),
9492 (rel_path("project/target/debug/important_text.txt"), true),
9493 ]
9494 );
9495 });
9496
9497 assert_eq!(
9498 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9499 vec![
9500 RepositoryEvent::StatusesChanged { full_scan: true },
9501 RepositoryEvent::MergeHeadsChanged,
9502 ],
9503 "Initial worktree scan should produce a repo update event"
9504 );
9505 assert_eq!(
9506 project_events.lock().drain(..).collect::<Vec<_>>(),
9507 vec![
9508 ("project/target".to_string(), PathChange::Loaded),
9509 ("project/target/debug".to_string(), PathChange::Loaded),
9510 (
9511 "project/target/debug/important_text.txt".to_string(),
9512 PathChange::Loaded
9513 ),
9514 ],
9515 "Initial project changes should show that all not-ignored and all opened files are loaded"
9516 );
9517
9518 let deps_dir = work_dir.join("target").join("debug").join("deps");
9519 std::fs::create_dir_all(&deps_dir).unwrap();
9520 tree.flush_fs_events(cx).await;
9521 project
9522 .update(cx, |project, cx| project.git_scans_complete(cx))
9523 .await;
9524 cx.executor().run_until_parked();
9525 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9526 tree.flush_fs_events(cx).await;
9527 project
9528 .update(cx, |project, cx| project.git_scans_complete(cx))
9529 .await;
9530 cx.executor().run_until_parked();
9531 std::fs::remove_dir_all(&deps_dir).unwrap();
9532 tree.flush_fs_events(cx).await;
9533 project
9534 .update(cx, |project, cx| project.git_scans_complete(cx))
9535 .await;
9536 cx.executor().run_until_parked();
9537
9538 tree.update(cx, |tree, _| {
9539 assert_eq!(
9540 tree.entries(true, 0)
9541 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9542 .collect::<Vec<_>>(),
9543 vec![
9544 (rel_path(""), false),
9545 (rel_path("project/"), false),
9546 (rel_path("project/.gitignore"), false),
9547 (rel_path("project/src"), false),
9548 (rel_path("project/src/main.rs"), false),
9549 (rel_path("project/target"), true),
9550 (rel_path("project/target/debug"), true),
9551 (rel_path("project/target/debug/important_text.txt"), true),
9552 ],
9553 "No stray temp files should be left after the flycheck changes"
9554 );
9555 });
9556
9557 assert_eq!(
9558 repository_updates
9559 .lock()
9560 .iter()
9561 .cloned()
9562 .collect::<Vec<_>>(),
9563 Vec::new(),
9564 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9565 );
9566 assert_eq!(
9567 project_events.lock().as_slice(),
9568 vec![
9569 ("project/target/debug/deps".to_string(), PathChange::Added),
9570 ("project/target/debug/deps".to_string(), PathChange::Removed),
9571 ],
9572 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9573 No updates for more nested directories should happen as those are ignored",
9574 );
9575}
9576
9577#[gpui::test]
9578async fn test_odd_events_for_ignored_dirs(
9579 executor: BackgroundExecutor,
9580 cx: &mut gpui::TestAppContext,
9581) {
9582 init_test(cx);
9583 let fs = FakeFs::new(executor);
9584 fs.insert_tree(
9585 path!("/root"),
9586 json!({
9587 ".git": {},
9588 ".gitignore": "**/target/",
9589 "src": {
9590 "main.rs": "fn main() {}",
9591 },
9592 "target": {
9593 "debug": {
9594 "foo.txt": "foo",
9595 "deps": {}
9596 }
9597 }
9598 }),
9599 )
9600 .await;
9601 fs.set_head_and_index_for_repo(
9602 path!("/root/.git").as_ref(),
9603 &[
9604 (".gitignore", "**/target/".into()),
9605 ("src/main.rs", "fn main() {}".into()),
9606 ],
9607 );
9608
9609 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9610 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9611 let project_events = Arc::new(Mutex::new(Vec::new()));
9612 project.update(cx, |project, cx| {
9613 let repository_updates = repository_updates.clone();
9614 cx.subscribe(project.git_store(), move |_, _, e, _| {
9615 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9616 repository_updates.lock().push(e.clone());
9617 }
9618 })
9619 .detach();
9620 let project_events = project_events.clone();
9621 cx.subscribe_self(move |_, e, _| {
9622 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9623 project_events.lock().extend(
9624 updates
9625 .iter()
9626 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9627 .filter(|(path, _)| path != "fs-event-sentinel"),
9628 );
9629 }
9630 })
9631 .detach();
9632 });
9633
9634 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9635 tree.update(cx, |tree, cx| {
9636 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9637 })
9638 .await
9639 .unwrap();
9640 tree.flush_fs_events(cx).await;
9641 project
9642 .update(cx, |project, cx| project.git_scans_complete(cx))
9643 .await;
9644 cx.run_until_parked();
9645 tree.update(cx, |tree, _| {
9646 assert_eq!(
9647 tree.entries(true, 0)
9648 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9649 .collect::<Vec<_>>(),
9650 vec![
9651 (rel_path(""), false),
9652 (rel_path(".gitignore"), false),
9653 (rel_path("src"), false),
9654 (rel_path("src/main.rs"), false),
9655 (rel_path("target"), true),
9656 (rel_path("target/debug"), true),
9657 (rel_path("target/debug/deps"), true),
9658 (rel_path("target/debug/foo.txt"), true),
9659 ]
9660 );
9661 });
9662
9663 assert_eq!(
9664 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9665 vec![
9666 RepositoryEvent::MergeHeadsChanged,
9667 RepositoryEvent::BranchChanged,
9668 RepositoryEvent::StatusesChanged { full_scan: false },
9669 RepositoryEvent::StatusesChanged { full_scan: false },
9670 ],
9671 "Initial worktree scan should produce a repo update event"
9672 );
9673 assert_eq!(
9674 project_events.lock().drain(..).collect::<Vec<_>>(),
9675 vec![
9676 ("target".to_string(), PathChange::Loaded),
9677 ("target/debug".to_string(), PathChange::Loaded),
9678 ("target/debug/deps".to_string(), PathChange::Loaded),
9679 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9680 ],
9681 "All non-ignored entries and all opened firs should be getting a project event",
9682 );
9683
9684 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9685 // This may happen multiple times during a single flycheck, but once is enough for testing.
9686 fs.emit_fs_event("/root/target/debug/deps", None);
9687 tree.flush_fs_events(cx).await;
9688 project
9689 .update(cx, |project, cx| project.git_scans_complete(cx))
9690 .await;
9691 cx.executor().run_until_parked();
9692
9693 assert_eq!(
9694 repository_updates
9695 .lock()
9696 .iter()
9697 .cloned()
9698 .collect::<Vec<_>>(),
9699 Vec::new(),
9700 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9701 );
9702 assert_eq!(
9703 project_events.lock().as_slice(),
9704 Vec::new(),
9705 "No further project events should happen, as only ignored dirs received FS events",
9706 );
9707}
9708
9709#[gpui::test]
9710async fn test_repos_in_invisible_worktrees(
9711 executor: BackgroundExecutor,
9712 cx: &mut gpui::TestAppContext,
9713) {
9714 init_test(cx);
9715 let fs = FakeFs::new(executor);
9716 fs.insert_tree(
9717 path!("/root"),
9718 json!({
9719 "dir1": {
9720 ".git": {},
9721 "dep1": {
9722 ".git": {},
9723 "src": {
9724 "a.txt": "",
9725 },
9726 },
9727 "b.txt": "",
9728 },
9729 }),
9730 )
9731 .await;
9732
9733 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9734 let _visible_worktree =
9735 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9736 project
9737 .update(cx, |project, cx| project.git_scans_complete(cx))
9738 .await;
9739
9740 let repos = project.read_with(cx, |project, cx| {
9741 project
9742 .repositories(cx)
9743 .values()
9744 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9745 .collect::<Vec<_>>()
9746 });
9747 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9748
9749 let (_invisible_worktree, _) = project
9750 .update(cx, |project, cx| {
9751 project.worktree_store.update(cx, |worktree_store, cx| {
9752 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9753 })
9754 })
9755 .await
9756 .expect("failed to create worktree");
9757 project
9758 .update(cx, |project, cx| project.git_scans_complete(cx))
9759 .await;
9760
9761 let repos = project.read_with(cx, |project, cx| {
9762 project
9763 .repositories(cx)
9764 .values()
9765 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9766 .collect::<Vec<_>>()
9767 });
9768 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9769}
9770
9771#[gpui::test(iterations = 10)]
9772async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9773 init_test(cx);
9774 cx.update(|cx| {
9775 cx.update_global::<SettingsStore, _>(|store, cx| {
9776 store.update_user_settings(cx, |settings| {
9777 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9778 });
9779 });
9780 });
9781 let fs = FakeFs::new(cx.background_executor.clone());
9782 fs.insert_tree(
9783 path!("/root"),
9784 json!({
9785 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9786 "tree": {
9787 ".git": {},
9788 ".gitignore": "ignored-dir\n",
9789 "tracked-dir": {
9790 "tracked-file1": "",
9791 "ancestor-ignored-file1": "",
9792 },
9793 "ignored-dir": {
9794 "ignored-file1": ""
9795 }
9796 }
9797 }),
9798 )
9799 .await;
9800 fs.set_head_and_index_for_repo(
9801 path!("/root/tree/.git").as_ref(),
9802 &[
9803 (".gitignore", "ignored-dir\n".into()),
9804 ("tracked-dir/tracked-file1", "".into()),
9805 ],
9806 );
9807
9808 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9809
9810 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9811 tree.flush_fs_events(cx).await;
9812 project
9813 .update(cx, |project, cx| project.git_scans_complete(cx))
9814 .await;
9815 cx.executor().run_until_parked();
9816
9817 let repository = project.read_with(cx, |project, cx| {
9818 project.repositories(cx).values().next().unwrap().clone()
9819 });
9820
9821 tree.read_with(cx, |tree, _| {
9822 tree.as_local()
9823 .unwrap()
9824 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9825 })
9826 .recv()
9827 .await;
9828
9829 cx.read(|cx| {
9830 assert_entry_git_state(
9831 tree.read(cx),
9832 repository.read(cx),
9833 "tracked-dir/tracked-file1",
9834 None,
9835 false,
9836 );
9837 assert_entry_git_state(
9838 tree.read(cx),
9839 repository.read(cx),
9840 "tracked-dir/ancestor-ignored-file1",
9841 None,
9842 false,
9843 );
9844 assert_entry_git_state(
9845 tree.read(cx),
9846 repository.read(cx),
9847 "ignored-dir/ignored-file1",
9848 None,
9849 true,
9850 );
9851 });
9852
9853 fs.create_file(
9854 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9855 Default::default(),
9856 )
9857 .await
9858 .unwrap();
9859 fs.set_index_for_repo(
9860 path!("/root/tree/.git").as_ref(),
9861 &[
9862 (".gitignore", "ignored-dir\n".into()),
9863 ("tracked-dir/tracked-file1", "".into()),
9864 ("tracked-dir/tracked-file2", "".into()),
9865 ],
9866 );
9867 fs.create_file(
9868 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9869 Default::default(),
9870 )
9871 .await
9872 .unwrap();
9873 fs.create_file(
9874 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9875 Default::default(),
9876 )
9877 .await
9878 .unwrap();
9879
9880 cx.executor().run_until_parked();
9881 cx.read(|cx| {
9882 assert_entry_git_state(
9883 tree.read(cx),
9884 repository.read(cx),
9885 "tracked-dir/tracked-file2",
9886 Some(StatusCode::Added),
9887 false,
9888 );
9889 assert_entry_git_state(
9890 tree.read(cx),
9891 repository.read(cx),
9892 "tracked-dir/ancestor-ignored-file2",
9893 None,
9894 false,
9895 );
9896 assert_entry_git_state(
9897 tree.read(cx),
9898 repository.read(cx),
9899 "ignored-dir/ignored-file2",
9900 None,
9901 true,
9902 );
9903 assert!(
9904 tree.read(cx)
9905 .entry_for_path(&rel_path(".git"))
9906 .unwrap()
9907 .is_ignored
9908 );
9909 });
9910}
9911
9912#[gpui::test]
9913async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9914 init_test(cx);
9915
9916 let fs = FakeFs::new(cx.executor());
9917 fs.insert_tree(
9918 path!("/project"),
9919 json!({
9920 ".git": {
9921 "worktrees": {
9922 "some-worktree": {
9923 "commondir": "../..\n",
9924 // For is_git_dir
9925 "HEAD": "",
9926 "config": ""
9927 }
9928 },
9929 "modules": {
9930 "subdir": {
9931 "some-submodule": {
9932 // For is_git_dir
9933 "HEAD": "",
9934 "config": "",
9935 }
9936 }
9937 }
9938 },
9939 "src": {
9940 "a.txt": "A",
9941 },
9942 "some-worktree": {
9943 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9944 "src": {
9945 "b.txt": "B",
9946 }
9947 },
9948 "subdir": {
9949 "some-submodule": {
9950 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9951 "c.txt": "C",
9952 }
9953 }
9954 }),
9955 )
9956 .await;
9957
9958 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9959 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9960 scan_complete.await;
9961
9962 let mut repositories = project.update(cx, |project, cx| {
9963 project
9964 .repositories(cx)
9965 .values()
9966 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9967 .collect::<Vec<_>>()
9968 });
9969 repositories.sort();
9970 pretty_assertions::assert_eq!(
9971 repositories,
9972 [
9973 Path::new(path!("/project")).into(),
9974 Path::new(path!("/project/some-worktree")).into(),
9975 Path::new(path!("/project/subdir/some-submodule")).into(),
9976 ]
9977 );
9978
9979 // Generate a git-related event for the worktree and check that it's refreshed.
9980 fs.with_git_state(
9981 path!("/project/some-worktree/.git").as_ref(),
9982 true,
9983 |state| {
9984 state
9985 .head_contents
9986 .insert(repo_path("src/b.txt"), "b".to_owned());
9987 state
9988 .index_contents
9989 .insert(repo_path("src/b.txt"), "b".to_owned());
9990 },
9991 )
9992 .unwrap();
9993 cx.run_until_parked();
9994
9995 let buffer = project
9996 .update(cx, |project, cx| {
9997 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9998 })
9999 .await
10000 .unwrap();
10001 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
10002 let (repo, _) = project
10003 .git_store()
10004 .read(cx)
10005 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10006 .unwrap();
10007 pretty_assertions::assert_eq!(
10008 repo.read(cx).work_directory_abs_path,
10009 Path::new(path!("/project/some-worktree")).into(),
10010 );
10011 let barrier = repo.update(cx, |repo, _| repo.barrier());
10012 (repo.clone(), barrier)
10013 });
10014 barrier.await.unwrap();
10015 worktree_repo.update(cx, |repo, _| {
10016 pretty_assertions::assert_eq!(
10017 repo.status_for_path(&repo_path("src/b.txt"))
10018 .unwrap()
10019 .status,
10020 StatusCode::Modified.worktree(),
10021 );
10022 });
10023
10024 // The same for the submodule.
10025 fs.with_git_state(
10026 path!("/project/subdir/some-submodule/.git").as_ref(),
10027 true,
10028 |state| {
10029 state
10030 .head_contents
10031 .insert(repo_path("c.txt"), "c".to_owned());
10032 state
10033 .index_contents
10034 .insert(repo_path("c.txt"), "c".to_owned());
10035 },
10036 )
10037 .unwrap();
10038 cx.run_until_parked();
10039
10040 let buffer = project
10041 .update(cx, |project, cx| {
10042 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
10043 })
10044 .await
10045 .unwrap();
10046 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
10047 let (repo, _) = project
10048 .git_store()
10049 .read(cx)
10050 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
10051 .unwrap();
10052 pretty_assertions::assert_eq!(
10053 repo.read(cx).work_directory_abs_path,
10054 Path::new(path!("/project/subdir/some-submodule")).into(),
10055 );
10056 let barrier = repo.update(cx, |repo, _| repo.barrier());
10057 (repo.clone(), barrier)
10058 });
10059 barrier.await.unwrap();
10060 submodule_repo.update(cx, |repo, _| {
10061 pretty_assertions::assert_eq!(
10062 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
10063 StatusCode::Modified.worktree(),
10064 );
10065 });
10066}
10067
10068#[gpui::test]
10069async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
10070 init_test(cx);
10071 let fs = FakeFs::new(cx.background_executor.clone());
10072 fs.insert_tree(
10073 path!("/root"),
10074 json!({
10075 "project": {
10076 ".git": {},
10077 "child1": {
10078 "a.txt": "A",
10079 },
10080 "child2": {
10081 "b.txt": "B",
10082 }
10083 }
10084 }),
10085 )
10086 .await;
10087
10088 let project = Project::test(
10089 fs.clone(),
10090 [
10091 path!("/root/project/child1").as_ref(),
10092 path!("/root/project/child2").as_ref(),
10093 ],
10094 cx,
10095 )
10096 .await;
10097
10098 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
10099 tree.flush_fs_events(cx).await;
10100 project
10101 .update(cx, |project, cx| project.git_scans_complete(cx))
10102 .await;
10103 cx.executor().run_until_parked();
10104
10105 let repos = project.read_with(cx, |project, cx| {
10106 project
10107 .repositories(cx)
10108 .values()
10109 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
10110 .collect::<Vec<_>>()
10111 });
10112 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
10113}
10114
10115#[gpui::test]
10116async fn test_buffer_changed_file_path_updates_git_diff(cx: &mut gpui::TestAppContext) {
10117 init_test(cx);
10118
10119 let file_1_committed = String::from(r#"file_1_committed"#);
10120 let file_1_staged = String::from(r#"file_1_staged"#);
10121 let file_2_committed = String::from(r#"file_2_committed"#);
10122 let file_2_staged = String::from(r#"file_2_staged"#);
10123 let buffer_contents = String::from(r#"buffer"#);
10124
10125 let fs = FakeFs::new(cx.background_executor.clone());
10126 fs.insert_tree(
10127 path!("/dir"),
10128 json!({
10129 ".git": {},
10130 "src": {
10131 "file_1.rs": file_1_committed.clone(),
10132 "file_2.rs": file_2_committed.clone(),
10133 }
10134 }),
10135 )
10136 .await;
10137
10138 fs.set_head_for_repo(
10139 path!("/dir/.git").as_ref(),
10140 &[
10141 ("src/file_1.rs", file_1_committed.clone()),
10142 ("src/file_2.rs", file_2_committed.clone()),
10143 ],
10144 "deadbeef",
10145 );
10146 fs.set_index_for_repo(
10147 path!("/dir/.git").as_ref(),
10148 &[
10149 ("src/file_1.rs", file_1_staged.clone()),
10150 ("src/file_2.rs", file_2_staged.clone()),
10151 ],
10152 );
10153
10154 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
10155
10156 let buffer = project
10157 .update(cx, |project, cx| {
10158 project.open_local_buffer(path!("/dir/src/file_1.rs"), cx)
10159 })
10160 .await
10161 .unwrap();
10162
10163 buffer.update(cx, |buffer, cx| {
10164 buffer.edit([(0..buffer.len(), buffer_contents.as_str())], None, cx);
10165 });
10166
10167 let unstaged_diff = project
10168 .update(cx, |project, cx| {
10169 project.open_unstaged_diff(buffer.clone(), cx)
10170 })
10171 .await
10172 .unwrap();
10173
10174 cx.run_until_parked();
10175
10176 unstaged_diff.update(cx, |unstaged_diff, _cx| {
10177 let base_text = unstaged_diff.base_text_string().unwrap();
10178 assert_eq!(base_text, file_1_staged, "Should start with file_1 staged");
10179 });
10180
10181 // Save the buffer as `file_2.rs`, which should trigger the
10182 // `BufferChangedFilePath` event.
10183 project
10184 .update(cx, |project, cx| {
10185 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
10186 let path = ProjectPath {
10187 worktree_id,
10188 path: rel_path("src/file_2.rs").into(),
10189 };
10190 project.save_buffer_as(buffer.clone(), path, cx)
10191 })
10192 .await
10193 .unwrap();
10194
10195 cx.run_until_parked();
10196
10197 // Verify that the diff bases have been updated to file_2's contents due to
10198 // the `BufferChangedFilePath` event being handled.
10199 unstaged_diff.update(cx, |unstaged_diff, cx| {
10200 let snapshot = buffer.read(cx).snapshot();
10201 let base_text = unstaged_diff.base_text_string().unwrap();
10202 assert_eq!(
10203 base_text, file_2_staged,
10204 "Diff bases should be automatically updated to file_2 staged content"
10205 );
10206
10207 let hunks: Vec<_> = unstaged_diff.hunks(&snapshot, cx).collect();
10208 assert!(!hunks.is_empty(), "Should have diff hunks for file_2");
10209 });
10210
10211 let uncommitted_diff = project
10212 .update(cx, |project, cx| {
10213 project.open_uncommitted_diff(buffer.clone(), cx)
10214 })
10215 .await
10216 .unwrap();
10217
10218 cx.run_until_parked();
10219
10220 uncommitted_diff.update(cx, |uncommitted_diff, _cx| {
10221 let base_text = uncommitted_diff.base_text_string().unwrap();
10222 assert_eq!(
10223 base_text, file_2_committed,
10224 "Uncommitted diff should compare against file_2 committed content"
10225 );
10226 });
10227}
10228
10229async fn search(
10230 project: &Entity<Project>,
10231 query: SearchQuery,
10232 cx: &mut gpui::TestAppContext,
10233) -> Result<HashMap<String, Vec<Range<usize>>>> {
10234 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
10235 let mut results = HashMap::default();
10236 while let Ok(search_result) = search_rx.recv().await {
10237 match search_result {
10238 SearchResult::Buffer { buffer, ranges } => {
10239 results.entry(buffer).or_insert(ranges);
10240 }
10241 SearchResult::LimitReached => {}
10242 }
10243 }
10244 Ok(results
10245 .into_iter()
10246 .map(|(buffer, ranges)| {
10247 buffer.update(cx, |buffer, cx| {
10248 let path = buffer
10249 .file()
10250 .unwrap()
10251 .full_path(cx)
10252 .to_string_lossy()
10253 .to_string();
10254 let ranges = ranges
10255 .into_iter()
10256 .map(|range| range.to_offset(buffer))
10257 .collect::<Vec<_>>();
10258 (path, ranges)
10259 })
10260 })
10261 .collect())
10262}
10263
10264pub fn init_test(cx: &mut gpui::TestAppContext) {
10265 zlog::init_test();
10266
10267 cx.update(|cx| {
10268 let settings_store = SettingsStore::test(cx);
10269 cx.set_global(settings_store);
10270 release_channel::init(SemanticVersion::default(), cx);
10271 });
10272}
10273
10274fn json_lang() -> Arc<Language> {
10275 Arc::new(Language::new(
10276 LanguageConfig {
10277 name: "JSON".into(),
10278 matcher: LanguageMatcher {
10279 path_suffixes: vec!["json".to_string()],
10280 ..Default::default()
10281 },
10282 ..Default::default()
10283 },
10284 None,
10285 ))
10286}
10287
10288fn js_lang() -> Arc<Language> {
10289 Arc::new(Language::new(
10290 LanguageConfig {
10291 name: "JavaScript".into(),
10292 matcher: LanguageMatcher {
10293 path_suffixes: vec!["js".to_string()],
10294 ..Default::default()
10295 },
10296 ..Default::default()
10297 },
10298 None,
10299 ))
10300}
10301
10302fn rust_lang() -> Arc<Language> {
10303 Arc::new(Language::new(
10304 LanguageConfig {
10305 name: "Rust".into(),
10306 matcher: LanguageMatcher {
10307 path_suffixes: vec!["rs".to_string()],
10308 ..Default::default()
10309 },
10310 ..Default::default()
10311 },
10312 Some(tree_sitter_rust::LANGUAGE.into()),
10313 ))
10314}
10315
10316fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
10317 struct PythonMootToolchainLister(Arc<FakeFs>);
10318 #[async_trait]
10319 impl ToolchainLister for PythonMootToolchainLister {
10320 async fn list(
10321 &self,
10322 worktree_root: PathBuf,
10323 subroot_relative_path: Arc<RelPath>,
10324 _: Option<HashMap<String, String>>,
10325 _: &dyn Fs,
10326 ) -> ToolchainList {
10327 // This lister will always return a path .venv directories within ancestors
10328 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
10329 let mut toolchains = vec![];
10330 for ancestor in ancestors {
10331 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
10332 if self.0.is_dir(&venv_path).await {
10333 toolchains.push(Toolchain {
10334 name: SharedString::new("Python Venv"),
10335 path: venv_path.to_string_lossy().into_owned().into(),
10336 language_name: LanguageName(SharedString::new_static("Python")),
10337 as_json: serde_json::Value::Null,
10338 })
10339 }
10340 }
10341 ToolchainList {
10342 toolchains,
10343 ..Default::default()
10344 }
10345 }
10346 async fn resolve(
10347 &self,
10348 _: PathBuf,
10349 _: Option<HashMap<String, String>>,
10350 _: &dyn Fs,
10351 ) -> anyhow::Result<Toolchain> {
10352 Err(anyhow::anyhow!("Not implemented"))
10353 }
10354 fn meta(&self) -> ToolchainMetadata {
10355 ToolchainMetadata {
10356 term: SharedString::new_static("Virtual Environment"),
10357 new_toolchain_placeholder: SharedString::new_static(
10358 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
10359 ),
10360 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
10361 }
10362 }
10363 fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &gpui::App) -> Vec<String> {
10364 vec![]
10365 }
10366 }
10367 Arc::new(
10368 Language::new(
10369 LanguageConfig {
10370 name: "Python".into(),
10371 matcher: LanguageMatcher {
10372 path_suffixes: vec!["py".to_string()],
10373 ..Default::default()
10374 },
10375 ..Default::default()
10376 },
10377 None, // We're not testing Python parsing with this language.
10378 )
10379 .with_manifest(Some(ManifestName::from(SharedString::new_static(
10380 "pyproject.toml",
10381 ))))
10382 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
10383 )
10384}
10385
10386fn typescript_lang() -> Arc<Language> {
10387 Arc::new(Language::new(
10388 LanguageConfig {
10389 name: "TypeScript".into(),
10390 matcher: LanguageMatcher {
10391 path_suffixes: vec!["ts".to_string()],
10392 ..Default::default()
10393 },
10394 ..Default::default()
10395 },
10396 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
10397 ))
10398}
10399
10400fn tsx_lang() -> Arc<Language> {
10401 Arc::new(Language::new(
10402 LanguageConfig {
10403 name: "tsx".into(),
10404 matcher: LanguageMatcher {
10405 path_suffixes: vec!["tsx".to_string()],
10406 ..Default::default()
10407 },
10408 ..Default::default()
10409 },
10410 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
10411 ))
10412}
10413
10414fn get_all_tasks(
10415 project: &Entity<Project>,
10416 task_contexts: Arc<TaskContexts>,
10417 cx: &mut App,
10418) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
10419 let new_tasks = project.update(cx, |project, cx| {
10420 project.task_store.update(cx, |task_store, cx| {
10421 task_store.task_inventory().unwrap().update(cx, |this, cx| {
10422 this.used_and_current_resolved_tasks(task_contexts, cx)
10423 })
10424 })
10425 });
10426
10427 cx.background_spawn(async move {
10428 let (mut old, new) = new_tasks.await;
10429 old.extend(new);
10430 old
10431 })
10432}
10433
10434#[track_caller]
10435fn assert_entry_git_state(
10436 tree: &Worktree,
10437 repository: &Repository,
10438 path: &str,
10439 index_status: Option<StatusCode>,
10440 is_ignored: bool,
10441) {
10442 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
10443 let entry = tree
10444 .entry_for_path(&rel_path(path))
10445 .unwrap_or_else(|| panic!("entry {path} not found"));
10446 let status = repository
10447 .status_for_path(&repo_path(path))
10448 .map(|entry| entry.status);
10449 let expected = index_status.map(|index_status| {
10450 TrackedStatus {
10451 index_status,
10452 worktree_status: StatusCode::Unmodified,
10453 }
10454 .into()
10455 });
10456 assert_eq!(
10457 status, expected,
10458 "expected {path} to have git status: {expected:?}"
10459 );
10460 assert_eq!(
10461 entry.is_ignored, is_ignored,
10462 "expected {path} to have is_ignored: {is_ignored}"
10463 );
10464}
10465
10466#[track_caller]
10467fn git_init(path: &Path) -> git2::Repository {
10468 let mut init_opts = RepositoryInitOptions::new();
10469 init_opts.initial_head("main");
10470 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
10471}
10472
10473#[track_caller]
10474fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
10475 let path = path.as_ref();
10476 let mut index = repo.index().expect("Failed to get index");
10477 index.add_path(path).expect("Failed to add file");
10478 index.write().expect("Failed to write index");
10479}
10480
10481#[track_caller]
10482fn git_remove_index(path: &Path, repo: &git2::Repository) {
10483 let mut index = repo.index().expect("Failed to get index");
10484 index.remove_path(path).expect("Failed to add file");
10485 index.write().expect("Failed to write index");
10486}
10487
10488#[track_caller]
10489fn git_commit(msg: &'static str, repo: &git2::Repository) {
10490 use git2::Signature;
10491
10492 let signature = Signature::now("test", "test@zed.dev").unwrap();
10493 let oid = repo.index().unwrap().write_tree().unwrap();
10494 let tree = repo.find_tree(oid).unwrap();
10495 if let Ok(head) = repo.head() {
10496 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
10497
10498 let parent_commit = parent_obj.as_commit().unwrap();
10499
10500 repo.commit(
10501 Some("HEAD"),
10502 &signature,
10503 &signature,
10504 msg,
10505 &tree,
10506 &[parent_commit],
10507 )
10508 .expect("Failed to commit with parent");
10509 } else {
10510 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
10511 .expect("Failed to commit");
10512 }
10513}
10514
10515#[cfg(any())]
10516#[track_caller]
10517fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
10518 repo.cherrypick(commit, None).expect("Failed to cherrypick");
10519}
10520
10521#[track_caller]
10522fn git_stash(repo: &mut git2::Repository) {
10523 use git2::Signature;
10524
10525 let signature = Signature::now("test", "test@zed.dev").unwrap();
10526 repo.stash_save(&signature, "N/A", None)
10527 .expect("Failed to stash");
10528}
10529
10530#[track_caller]
10531fn git_reset(offset: usize, repo: &git2::Repository) {
10532 let head = repo.head().expect("Couldn't get repo head");
10533 let object = head.peel(git2::ObjectType::Commit).unwrap();
10534 let commit = object.as_commit().unwrap();
10535 let new_head = commit
10536 .parents()
10537 .inspect(|parnet| {
10538 parnet.message();
10539 })
10540 .nth(offset)
10541 .expect("Not enough history");
10542 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
10543 .expect("Could not reset");
10544}
10545
10546#[cfg(any())]
10547#[track_caller]
10548fn git_branch(name: &str, repo: &git2::Repository) {
10549 let head = repo
10550 .head()
10551 .expect("Couldn't get repo head")
10552 .peel_to_commit()
10553 .expect("HEAD is not a commit");
10554 repo.branch(name, &head, false).expect("Failed to commit");
10555}
10556
10557#[cfg(any())]
10558#[track_caller]
10559fn git_checkout(name: &str, repo: &git2::Repository) {
10560 repo.set_head(name).expect("Failed to set head");
10561 repo.checkout_head(None).expect("Failed to check out head");
10562}
10563
10564#[cfg(any())]
10565#[track_caller]
10566fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
10567 repo.statuses(None)
10568 .unwrap()
10569 .iter()
10570 .map(|status| (status.path().unwrap().to_string(), status.status()))
10571 .collect()
10572}
10573
10574#[gpui::test]
10575async fn test_find_project_path_abs(
10576 background_executor: BackgroundExecutor,
10577 cx: &mut gpui::TestAppContext,
10578) {
10579 // find_project_path should work with absolute paths
10580 init_test(cx);
10581
10582 let fs = FakeFs::new(background_executor);
10583 fs.insert_tree(
10584 path!("/root"),
10585 json!({
10586 "project1": {
10587 "file1.txt": "content1",
10588 "subdir": {
10589 "file2.txt": "content2"
10590 }
10591 },
10592 "project2": {
10593 "file3.txt": "content3"
10594 }
10595 }),
10596 )
10597 .await;
10598
10599 let project = Project::test(
10600 fs.clone(),
10601 [
10602 path!("/root/project1").as_ref(),
10603 path!("/root/project2").as_ref(),
10604 ],
10605 cx,
10606 )
10607 .await;
10608
10609 // Make sure the worktrees are fully initialized
10610 project
10611 .update(cx, |project, cx| project.git_scans_complete(cx))
10612 .await;
10613 cx.run_until_parked();
10614
10615 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10616 project.read_with(cx, |project, cx| {
10617 let worktrees: Vec<_> = project.worktrees(cx).collect();
10618 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10619 let id1 = worktrees[0].read(cx).id();
10620 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10621 let id2 = worktrees[1].read(cx).id();
10622 (abs_path1, id1, abs_path2, id2)
10623 });
10624
10625 project.update(cx, |project, cx| {
10626 let abs_path = project1_abs_path.join("file1.txt");
10627 let found_path = project.find_project_path(abs_path, cx).unwrap();
10628 assert_eq!(found_path.worktree_id, project1_id);
10629 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10630
10631 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10632 let found_path = project.find_project_path(abs_path, cx).unwrap();
10633 assert_eq!(found_path.worktree_id, project1_id);
10634 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10635
10636 let abs_path = project2_abs_path.join("file3.txt");
10637 let found_path = project.find_project_path(abs_path, cx).unwrap();
10638 assert_eq!(found_path.worktree_id, project2_id);
10639 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10640
10641 let abs_path = project1_abs_path.join("nonexistent.txt");
10642 let found_path = project.find_project_path(abs_path, cx);
10643 assert!(
10644 found_path.is_some(),
10645 "Should find project path for nonexistent file in worktree"
10646 );
10647
10648 // Test with an absolute path outside any worktree
10649 let abs_path = Path::new("/some/other/path");
10650 let found_path = project.find_project_path(abs_path, cx);
10651 assert!(
10652 found_path.is_none(),
10653 "Should not find project path for path outside any worktree"
10654 );
10655 });
10656}
10657
10658#[gpui::test]
10659async fn test_git_worktree_remove(cx: &mut gpui::TestAppContext) {
10660 init_test(cx);
10661
10662 let fs = FakeFs::new(cx.executor());
10663 fs.insert_tree(
10664 path!("/root"),
10665 json!({
10666 "a": {
10667 ".git": {},
10668 "src": {
10669 "main.rs": "fn main() {}",
10670 }
10671 },
10672 "b": {
10673 ".git": {},
10674 "src": {
10675 "main.rs": "fn main() {}",
10676 },
10677 "script": {
10678 "run.sh": "#!/bin/bash"
10679 }
10680 }
10681 }),
10682 )
10683 .await;
10684
10685 let project = Project::test(
10686 fs.clone(),
10687 [
10688 path!("/root/a").as_ref(),
10689 path!("/root/b/script").as_ref(),
10690 path!("/root/b").as_ref(),
10691 ],
10692 cx,
10693 )
10694 .await;
10695 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
10696 scan_complete.await;
10697
10698 let worktrees = project.update(cx, |project, cx| project.worktrees(cx).collect::<Vec<_>>());
10699 assert_eq!(worktrees.len(), 3);
10700
10701 let worktree_id_by_abs_path = worktrees
10702 .into_iter()
10703 .map(|worktree| worktree.read_with(cx, |w, _| (w.abs_path(), w.id())))
10704 .collect::<HashMap<_, _>>();
10705 let worktree_id = worktree_id_by_abs_path
10706 .get(Path::new(path!("/root/b/script")))
10707 .unwrap();
10708
10709 let repos = project.update(cx, |p, cx| p.git_store().read(cx).repositories().clone());
10710 assert_eq!(repos.len(), 2);
10711
10712 project.update(cx, |project, cx| {
10713 project.remove_worktree(*worktree_id, cx);
10714 });
10715 cx.run_until_parked();
10716
10717 let mut repo_paths = project
10718 .update(cx, |p, cx| p.git_store().read(cx).repositories().clone())
10719 .values()
10720 .map(|repo| repo.read_with(cx, |r, _| r.work_directory_abs_path.clone()))
10721 .collect::<Vec<_>>();
10722 repo_paths.sort();
10723
10724 pretty_assertions::assert_eq!(
10725 repo_paths,
10726 [
10727 Path::new(path!("/root/a")).into(),
10728 Path::new(path!("/root/b")).into(),
10729 ]
10730 );
10731
10732 let active_repo_path = project
10733 .read_with(cx, |p, cx| {
10734 p.active_repository(cx)
10735 .map(|r| r.read(cx).work_directory_abs_path.clone())
10736 })
10737 .unwrap();
10738 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/a")));
10739
10740 let worktree_id = worktree_id_by_abs_path
10741 .get(Path::new(path!("/root/a")))
10742 .unwrap();
10743 project.update(cx, |project, cx| {
10744 project.remove_worktree(*worktree_id, cx);
10745 });
10746 cx.run_until_parked();
10747
10748 let active_repo_path = project
10749 .read_with(cx, |p, cx| {
10750 p.active_repository(cx)
10751 .map(|r| r.read(cx).work_directory_abs_path.clone())
10752 })
10753 .unwrap();
10754 assert_eq!(active_repo_path.as_ref(), Path::new(path!("/root/b")));
10755
10756 let worktree_id = worktree_id_by_abs_path
10757 .get(Path::new(path!("/root/b")))
10758 .unwrap();
10759 project.update(cx, |project, cx| {
10760 project.remove_worktree(*worktree_id, cx);
10761 });
10762 cx.run_until_parked();
10763
10764 let active_repo_path = project.read_with(cx, |p, cx| {
10765 p.active_repository(cx)
10766 .map(|r| r.read(cx).work_directory_abs_path.clone())
10767 });
10768 assert!(active_repo_path.is_none());
10769}