1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use async_trait::async_trait;
8use buffer_diff::{
9 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
10 DiffHunkStatusKind, assert_hunks,
11};
12use fs::FakeFs;
13use futures::{StreamExt, future};
14use git::{
15 GitHostingProviderRegistry,
16 repository::{RepoPath, repo_path},
17 status::{StatusCode, TrackedStatus},
18};
19use git2::RepositoryInitOptions;
20use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
24 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
25 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
26 ToolchainLister,
27 language_settings::{LanguageSettingsContent, language_settings},
28 tree_sitter_rust, tree_sitter_typescript,
29};
30use lsp::{
31 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
32 Uri, WillRenameFiles, notification::DidRenameFiles,
33};
34use parking_lot::Mutex;
35use paths::{config_dir, global_gitignore_path, tasks_file};
36use postage::stream::Stream as _;
37use pretty_assertions::{assert_eq, assert_matches};
38use rand::{Rng as _, rngs::StdRng};
39use serde_json::json;
40#[cfg(not(windows))]
41use std::os;
42use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
43use task::{ResolvedTask, ShellKind, TaskContext};
44use unindent::Unindent as _;
45use util::{
46 TryFutureExt as _, assert_set_eq, maybe, path,
47 paths::PathMatcher,
48 rel_path::rel_path,
49 test::{TempTree, marked_text_offsets},
50 uri,
51};
52use worktree::WorktreeModelHandle as _;
53
54#[gpui::test]
55async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
56 cx.executor().allow_parking();
57
58 let (tx, mut rx) = futures::channel::mpsc::unbounded();
59 let _thread = std::thread::spawn(move || {
60 #[cfg(not(target_os = "windows"))]
61 std::fs::metadata("/tmp").unwrap();
62 #[cfg(target_os = "windows")]
63 std::fs::metadata("C:/Windows").unwrap();
64 std::thread::sleep(Duration::from_millis(1000));
65 tx.unbounded_send(1).unwrap();
66 });
67 rx.next().await.unwrap();
68}
69
70#[gpui::test]
71async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
72 cx.executor().allow_parking();
73
74 let io_task = smol::unblock(move || {
75 println!("sleeping on thread {:?}", std::thread::current().id());
76 std::thread::sleep(Duration::from_millis(10));
77 1
78 });
79
80 let task = cx.foreground_executor().spawn(async move {
81 io_task.await;
82 });
83
84 task.await;
85}
86
87#[cfg(not(windows))]
88#[gpui::test]
89async fn test_symlinks(cx: &mut gpui::TestAppContext) {
90 init_test(cx);
91 cx.executor().allow_parking();
92
93 let dir = TempTree::new(json!({
94 "root": {
95 "apple": "",
96 "banana": {
97 "carrot": {
98 "date": "",
99 "endive": "",
100 }
101 },
102 "fennel": {
103 "grape": "",
104 }
105 }
106 }));
107
108 let root_link_path = dir.path().join("root_link");
109 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
110 os::unix::fs::symlink(
111 dir.path().join("root/fennel"),
112 dir.path().join("root/finnochio"),
113 )
114 .unwrap();
115
116 let project = Project::test(
117 Arc::new(RealFs::new(None, cx.executor())),
118 [root_link_path.as_ref()],
119 cx,
120 )
121 .await;
122
123 project.update(cx, |project, cx| {
124 let tree = project.worktrees(cx).next().unwrap().read(cx);
125 assert_eq!(tree.file_count(), 5);
126 assert_eq!(
127 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
128 tree.entry_for_path(rel_path("finnochio/grape"))
129 .unwrap()
130 .inode
131 );
132 });
133}
134
135#[gpui::test]
136async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
137 init_test(cx);
138
139 let dir = TempTree::new(json!({
140 ".editorconfig": r#"
141 root = true
142 [*.rs]
143 indent_style = tab
144 indent_size = 3
145 end_of_line = lf
146 insert_final_newline = true
147 trim_trailing_whitespace = true
148 max_line_length = 120
149 [*.js]
150 tab_width = 10
151 max_line_length = off
152 "#,
153 ".zed": {
154 "settings.json": r#"{
155 "tab_size": 8,
156 "hard_tabs": false,
157 "ensure_final_newline_on_save": false,
158 "remove_trailing_whitespace_on_save": false,
159 "preferred_line_length": 64,
160 "soft_wrap": "editor_width",
161 }"#,
162 },
163 "a.rs": "fn a() {\n A\n}",
164 "b": {
165 ".editorconfig": r#"
166 [*.rs]
167 indent_size = 2
168 max_line_length = off,
169 "#,
170 "b.rs": "fn b() {\n B\n}",
171 },
172 "c.js": "def c\n C\nend",
173 "README.json": "tabs are better\n",
174 }));
175
176 let path = dir.path();
177 let fs = FakeFs::new(cx.executor());
178 fs.insert_tree_from_real_fs(path, path).await;
179 let project = Project::test(fs, [path], cx).await;
180
181 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
182 language_registry.add(js_lang());
183 language_registry.add(json_lang());
184 language_registry.add(rust_lang());
185
186 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
187
188 cx.executor().run_until_parked();
189
190 cx.update(|cx| {
191 let tree = worktree.read(cx);
192 let settings_for = |path: &str| {
193 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
194 let file = File::for_entry(file_entry, worktree.clone());
195 let file_language = project
196 .read(cx)
197 .languages()
198 .load_language_for_file_path(file.path.as_std_path());
199 let file_language = cx
200 .background_executor()
201 .block(file_language)
202 .expect("Failed to get file language");
203 let file = file as _;
204 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
205 };
206
207 let settings_a = settings_for("a.rs");
208 let settings_b = settings_for("b/b.rs");
209 let settings_c = settings_for("c.js");
210 let settings_readme = settings_for("README.json");
211
212 // .editorconfig overrides .zed/settings
213 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
214 assert_eq!(settings_a.hard_tabs, true);
215 assert_eq!(settings_a.ensure_final_newline_on_save, true);
216 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
217 assert_eq!(settings_a.preferred_line_length, 120);
218
219 // .editorconfig in b/ overrides .editorconfig in root
220 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
221
222 // "indent_size" is not set, so "tab_width" is used
223 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
224
225 // When max_line_length is "off", default to .zed/settings.json
226 assert_eq!(settings_b.preferred_line_length, 64);
227 assert_eq!(settings_c.preferred_line_length, 64);
228
229 // README.md should not be affected by .editorconfig's globe "*.rs"
230 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
231 });
232}
233
234#[gpui::test]
235async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
236 init_test(cx);
237 cx.update(|cx| {
238 GitHostingProviderRegistry::default_global(cx);
239 git_hosting_providers::init(cx);
240 });
241
242 let fs = FakeFs::new(cx.executor());
243 let str_path = path!("/dir");
244 let path = Path::new(str_path);
245
246 fs.insert_tree(
247 path!("/dir"),
248 json!({
249 ".zed": {
250 "settings.json": r#"{
251 "git_hosting_providers": [
252 {
253 "provider": "gitlab",
254 "base_url": "https://google.com",
255 "name": "foo"
256 }
257 ]
258 }"#
259 },
260 }),
261 )
262 .await;
263
264 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
265 let (_worktree, _) =
266 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
267 cx.executor().run_until_parked();
268
269 cx.update(|cx| {
270 let provider = GitHostingProviderRegistry::global(cx);
271 assert!(
272 provider
273 .list_hosting_providers()
274 .into_iter()
275 .any(|provider| provider.name() == "foo")
276 );
277 });
278
279 fs.atomic_write(
280 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
281 "{}".into(),
282 )
283 .await
284 .unwrap();
285
286 cx.run_until_parked();
287
288 cx.update(|cx| {
289 let provider = GitHostingProviderRegistry::global(cx);
290 assert!(
291 !provider
292 .list_hosting_providers()
293 .into_iter()
294 .any(|provider| provider.name() == "foo")
295 );
296 });
297}
298
299#[gpui::test]
300async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
301 init_test(cx);
302 TaskStore::init(None);
303
304 let fs = FakeFs::new(cx.executor());
305 fs.insert_tree(
306 path!("/dir"),
307 json!({
308 ".zed": {
309 "settings.json": r#"{ "tab_size": 8 }"#,
310 "tasks.json": r#"[{
311 "label": "cargo check all",
312 "command": "cargo",
313 "args": ["check", "--all"]
314 },]"#,
315 },
316 "a": {
317 "a.rs": "fn a() {\n A\n}"
318 },
319 "b": {
320 ".zed": {
321 "settings.json": r#"{ "tab_size": 2 }"#,
322 "tasks.json": r#"[{
323 "label": "cargo check",
324 "command": "cargo",
325 "args": ["check"]
326 },]"#,
327 },
328 "b.rs": "fn b() {\n B\n}"
329 }
330 }),
331 )
332 .await;
333
334 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
335 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
336
337 cx.executor().run_until_parked();
338 let worktree_id = cx.update(|cx| {
339 project.update(cx, |project, cx| {
340 project.worktrees(cx).next().unwrap().read(cx).id()
341 })
342 });
343
344 let mut task_contexts = TaskContexts::default();
345 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
346 let task_contexts = Arc::new(task_contexts);
347
348 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
349 id: worktree_id,
350 directory_in_worktree: rel_path(".zed").into(),
351 id_base: "local worktree tasks from directory \".zed\"".into(),
352 };
353
354 let all_tasks = cx
355 .update(|cx| {
356 let tree = worktree.read(cx);
357
358 let file_a = File::for_entry(
359 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
360 worktree.clone(),
361 ) as _;
362 let settings_a = language_settings(None, Some(&file_a), cx);
363 let file_b = File::for_entry(
364 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
365 worktree.clone(),
366 ) as _;
367 let settings_b = language_settings(None, Some(&file_b), cx);
368
369 assert_eq!(settings_a.tab_size.get(), 8);
370 assert_eq!(settings_b.tab_size.get(), 2);
371
372 get_all_tasks(&project, task_contexts.clone(), cx)
373 })
374 .await
375 .into_iter()
376 .map(|(source_kind, task)| {
377 let resolved = task.resolved;
378 (
379 source_kind,
380 task.resolved_label,
381 resolved.args,
382 resolved.env,
383 )
384 })
385 .collect::<Vec<_>>();
386 assert_eq!(
387 all_tasks,
388 vec![
389 (
390 TaskSourceKind::Worktree {
391 id: worktree_id,
392 directory_in_worktree: rel_path("b/.zed").into(),
393 id_base: "local worktree tasks from directory \"b/.zed\"".into()
394 },
395 "cargo check".to_string(),
396 vec!["check".to_string()],
397 HashMap::default(),
398 ),
399 (
400 topmost_local_task_source_kind.clone(),
401 "cargo check all".to_string(),
402 vec!["check".to_string(), "--all".to_string()],
403 HashMap::default(),
404 ),
405 ]
406 );
407
408 let (_, resolved_task) = cx
409 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
410 .await
411 .into_iter()
412 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
413 .expect("should have one global task");
414 project.update(cx, |project, cx| {
415 let task_inventory = project
416 .task_store
417 .read(cx)
418 .task_inventory()
419 .cloned()
420 .unwrap();
421 task_inventory.update(cx, |inventory, _| {
422 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
423 inventory
424 .update_file_based_tasks(
425 TaskSettingsLocation::Global(tasks_file()),
426 Some(
427 &json!([{
428 "label": "cargo check unstable",
429 "command": "cargo",
430 "args": [
431 "check",
432 "--all",
433 "--all-targets"
434 ],
435 "env": {
436 "RUSTFLAGS": "-Zunstable-options"
437 }
438 }])
439 .to_string(),
440 ),
441 )
442 .unwrap();
443 });
444 });
445 cx.run_until_parked();
446
447 let all_tasks = cx
448 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
449 .await
450 .into_iter()
451 .map(|(source_kind, task)| {
452 let resolved = task.resolved;
453 (
454 source_kind,
455 task.resolved_label,
456 resolved.args,
457 resolved.env,
458 )
459 })
460 .collect::<Vec<_>>();
461 assert_eq!(
462 all_tasks,
463 vec![
464 (
465 topmost_local_task_source_kind.clone(),
466 "cargo check all".to_string(),
467 vec!["check".to_string(), "--all".to_string()],
468 HashMap::default(),
469 ),
470 (
471 TaskSourceKind::Worktree {
472 id: worktree_id,
473 directory_in_worktree: rel_path("b/.zed").into(),
474 id_base: "local worktree tasks from directory \"b/.zed\"".into()
475 },
476 "cargo check".to_string(),
477 vec!["check".to_string()],
478 HashMap::default(),
479 ),
480 (
481 TaskSourceKind::AbsPath {
482 abs_path: paths::tasks_file().clone(),
483 id_base: "global tasks.json".into(),
484 },
485 "cargo check unstable".to_string(),
486 vec![
487 "check".to_string(),
488 "--all".to_string(),
489 "--all-targets".to_string(),
490 ],
491 HashMap::from_iter(Some((
492 "RUSTFLAGS".to_string(),
493 "-Zunstable-options".to_string()
494 ))),
495 ),
496 ]
497 );
498}
499
500#[gpui::test]
501async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
502 init_test(cx);
503 TaskStore::init(None);
504
505 let fs = FakeFs::new(cx.executor());
506 fs.insert_tree(
507 path!("/dir"),
508 json!({
509 ".zed": {
510 "tasks.json": r#"[{
511 "label": "test worktree root",
512 "command": "echo $ZED_WORKTREE_ROOT"
513 }]"#,
514 },
515 "a": {
516 "a.rs": "fn a() {\n A\n}"
517 },
518 }),
519 )
520 .await;
521
522 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
523 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
524
525 cx.executor().run_until_parked();
526 let worktree_id = cx.update(|cx| {
527 project.update(cx, |project, cx| {
528 project.worktrees(cx).next().unwrap().read(cx).id()
529 })
530 });
531
532 let active_non_worktree_item_tasks = cx
533 .update(|cx| {
534 get_all_tasks(
535 &project,
536 Arc::new(TaskContexts {
537 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
538 active_worktree_context: None,
539 other_worktree_contexts: Vec::new(),
540 lsp_task_sources: HashMap::default(),
541 latest_selection: None,
542 }),
543 cx,
544 )
545 })
546 .await;
547 assert!(
548 active_non_worktree_item_tasks.is_empty(),
549 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
550 );
551
552 let active_worktree_tasks = cx
553 .update(|cx| {
554 get_all_tasks(
555 &project,
556 Arc::new(TaskContexts {
557 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
558 active_worktree_context: Some((worktree_id, {
559 let mut worktree_context = TaskContext::default();
560 worktree_context
561 .task_variables
562 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
563 worktree_context
564 })),
565 other_worktree_contexts: Vec::new(),
566 lsp_task_sources: HashMap::default(),
567 latest_selection: None,
568 }),
569 cx,
570 )
571 })
572 .await;
573 assert_eq!(
574 active_worktree_tasks
575 .into_iter()
576 .map(|(source_kind, task)| {
577 let resolved = task.resolved;
578 (source_kind, resolved.command.unwrap())
579 })
580 .collect::<Vec<_>>(),
581 vec![(
582 TaskSourceKind::Worktree {
583 id: worktree_id,
584 directory_in_worktree: rel_path(".zed").into(),
585 id_base: "local worktree tasks from directory \".zed\"".into(),
586 },
587 "echo /dir".to_string(),
588 )]
589 );
590}
591
592#[gpui::test]
593async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
594 cx: &mut gpui::TestAppContext,
595) {
596 pub(crate) struct PyprojectTomlManifestProvider;
597
598 impl ManifestProvider for PyprojectTomlManifestProvider {
599 fn name(&self) -> ManifestName {
600 SharedString::new_static("pyproject.toml").into()
601 }
602
603 fn search(
604 &self,
605 ManifestQuery {
606 path,
607 depth,
608 delegate,
609 }: ManifestQuery,
610 ) -> Option<Arc<RelPath>> {
611 for path in path.ancestors().take(depth) {
612 let p = path.join(rel_path("pyproject.toml"));
613 if delegate.exists(&p, Some(false)) {
614 return Some(path.into());
615 }
616 }
617
618 None
619 }
620 }
621
622 init_test(cx);
623 let fs = FakeFs::new(cx.executor());
624
625 fs.insert_tree(
626 path!("/the-root"),
627 json!({
628 ".zed": {
629 "settings.json": r#"
630 {
631 "languages": {
632 "Python": {
633 "language_servers": ["ty"]
634 }
635 }
636 }"#
637 },
638 "project-a": {
639 ".venv": {},
640 "file.py": "",
641 "pyproject.toml": ""
642 },
643 "project-b": {
644 ".venv": {},
645 "source_file.py":"",
646 "another_file.py": "",
647 "pyproject.toml": ""
648 }
649 }),
650 )
651 .await;
652 cx.update(|cx| {
653 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
654 });
655
656 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
657 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
658 let _fake_python_server = language_registry.register_fake_lsp(
659 "Python",
660 FakeLspAdapter {
661 name: "ty",
662 capabilities: lsp::ServerCapabilities {
663 ..Default::default()
664 },
665 ..Default::default()
666 },
667 );
668
669 language_registry.add(python_lang(fs.clone()));
670 let (first_buffer, _handle) = project
671 .update(cx, |project, cx| {
672 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
673 })
674 .await
675 .unwrap();
676 cx.executor().run_until_parked();
677 let servers = project.update(cx, |project, cx| {
678 project.lsp_store.update(cx, |this, cx| {
679 first_buffer.update(cx, |buffer, cx| {
680 this.language_servers_for_local_buffer(buffer, cx)
681 .map(|(adapter, server)| (adapter.clone(), server.clone()))
682 .collect::<Vec<_>>()
683 })
684 })
685 });
686 cx.executor().run_until_parked();
687 assert_eq!(servers.len(), 1);
688 let (adapter, server) = servers.into_iter().next().unwrap();
689 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
690 assert_eq!(server.server_id(), LanguageServerId(0));
691 // `workspace_folders` are set to the rooting point.
692 assert_eq!(
693 server.workspace_folders(),
694 BTreeSet::from_iter(
695 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
696 )
697 );
698
699 let (second_project_buffer, _other_handle) = project
700 .update(cx, |project, cx| {
701 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
702 })
703 .await
704 .unwrap();
705 cx.executor().run_until_parked();
706 let servers = project.update(cx, |project, cx| {
707 project.lsp_store.update(cx, |this, cx| {
708 second_project_buffer.update(cx, |buffer, cx| {
709 this.language_servers_for_local_buffer(buffer, cx)
710 .map(|(adapter, server)| (adapter.clone(), server.clone()))
711 .collect::<Vec<_>>()
712 })
713 })
714 });
715 cx.executor().run_until_parked();
716 assert_eq!(servers.len(), 1);
717 let (adapter, server) = servers.into_iter().next().unwrap();
718 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
719 // We're not using venvs at all here, so both folders should fall under the same root.
720 assert_eq!(server.server_id(), LanguageServerId(0));
721 // Now, let's select a different toolchain for one of subprojects.
722
723 let Toolchains {
724 toolchains: available_toolchains_for_b,
725 root_path,
726 ..
727 } = project
728 .update(cx, |this, cx| {
729 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
730 this.available_toolchains(
731 ProjectPath {
732 worktree_id,
733 path: rel_path("project-b/source_file.py").into(),
734 },
735 LanguageName::new("Python"),
736 cx,
737 )
738 })
739 .await
740 .expect("A toolchain to be discovered");
741 assert_eq!(root_path.as_ref(), rel_path("project-b"));
742 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
743 let currently_active_toolchain = project
744 .update(cx, |this, cx| {
745 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
746 this.active_toolchain(
747 ProjectPath {
748 worktree_id,
749 path: rel_path("project-b/source_file.py").into(),
750 },
751 LanguageName::new("Python"),
752 cx,
753 )
754 })
755 .await;
756
757 assert!(currently_active_toolchain.is_none());
758 let _ = project
759 .update(cx, |this, cx| {
760 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
761 this.activate_toolchain(
762 ProjectPath {
763 worktree_id,
764 path: root_path,
765 },
766 available_toolchains_for_b
767 .toolchains
768 .into_iter()
769 .next()
770 .unwrap(),
771 cx,
772 )
773 })
774 .await
775 .unwrap();
776 cx.run_until_parked();
777 let servers = project.update(cx, |project, cx| {
778 project.lsp_store.update(cx, |this, cx| {
779 second_project_buffer.update(cx, |buffer, cx| {
780 this.language_servers_for_local_buffer(buffer, cx)
781 .map(|(adapter, server)| (adapter.clone(), server.clone()))
782 .collect::<Vec<_>>()
783 })
784 })
785 });
786 cx.executor().run_until_parked();
787 assert_eq!(servers.len(), 1);
788 let (adapter, server) = servers.into_iter().next().unwrap();
789 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
790 // There's a new language server in town.
791 assert_eq!(server.server_id(), LanguageServerId(1));
792}
793
794#[gpui::test]
795async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
796 init_test(cx);
797
798 let fs = FakeFs::new(cx.executor());
799 fs.insert_tree(
800 path!("/dir"),
801 json!({
802 "test.rs": "const A: i32 = 1;",
803 "test2.rs": "",
804 "Cargo.toml": "a = 1",
805 "package.json": "{\"a\": 1}",
806 }),
807 )
808 .await;
809
810 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
811 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
812
813 let mut fake_rust_servers = language_registry.register_fake_lsp(
814 "Rust",
815 FakeLspAdapter {
816 name: "the-rust-language-server",
817 capabilities: lsp::ServerCapabilities {
818 completion_provider: Some(lsp::CompletionOptions {
819 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
820 ..Default::default()
821 }),
822 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
823 lsp::TextDocumentSyncOptions {
824 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
825 ..Default::default()
826 },
827 )),
828 ..Default::default()
829 },
830 ..Default::default()
831 },
832 );
833 let mut fake_json_servers = language_registry.register_fake_lsp(
834 "JSON",
835 FakeLspAdapter {
836 name: "the-json-language-server",
837 capabilities: lsp::ServerCapabilities {
838 completion_provider: Some(lsp::CompletionOptions {
839 trigger_characters: Some(vec![":".to_string()]),
840 ..Default::default()
841 }),
842 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
843 lsp::TextDocumentSyncOptions {
844 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
845 ..Default::default()
846 },
847 )),
848 ..Default::default()
849 },
850 ..Default::default()
851 },
852 );
853
854 // Open a buffer without an associated language server.
855 let (toml_buffer, _handle) = project
856 .update(cx, |project, cx| {
857 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
858 })
859 .await
860 .unwrap();
861
862 // Open a buffer with an associated language server before the language for it has been loaded.
863 let (rust_buffer, _handle2) = project
864 .update(cx, |project, cx| {
865 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
866 })
867 .await
868 .unwrap();
869 rust_buffer.update(cx, |buffer, _| {
870 assert_eq!(buffer.language().map(|l| l.name()), None);
871 });
872
873 // Now we add the languages to the project, and ensure they get assigned to all
874 // the relevant open buffers.
875 language_registry.add(json_lang());
876 language_registry.add(rust_lang());
877 cx.executor().run_until_parked();
878 rust_buffer.update(cx, |buffer, _| {
879 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
880 });
881
882 // A server is started up, and it is notified about Rust files.
883 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
884 assert_eq!(
885 fake_rust_server
886 .receive_notification::<lsp::notification::DidOpenTextDocument>()
887 .await
888 .text_document,
889 lsp::TextDocumentItem {
890 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
891 version: 0,
892 text: "const A: i32 = 1;".to_string(),
893 language_id: "rust".to_string(),
894 }
895 );
896
897 // The buffer is configured based on the language server's capabilities.
898 rust_buffer.update(cx, |buffer, _| {
899 assert_eq!(
900 buffer
901 .completion_triggers()
902 .iter()
903 .cloned()
904 .collect::<Vec<_>>(),
905 &[".".to_string(), "::".to_string()]
906 );
907 });
908 toml_buffer.update(cx, |buffer, _| {
909 assert!(buffer.completion_triggers().is_empty());
910 });
911
912 // Edit a buffer. The changes are reported to the language server.
913 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
914 assert_eq!(
915 fake_rust_server
916 .receive_notification::<lsp::notification::DidChangeTextDocument>()
917 .await
918 .text_document,
919 lsp::VersionedTextDocumentIdentifier::new(
920 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
921 1
922 )
923 );
924
925 // Open a third buffer with a different associated language server.
926 let (json_buffer, _json_handle) = project
927 .update(cx, |project, cx| {
928 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
929 })
930 .await
931 .unwrap();
932
933 // A json language server is started up and is only notified about the json buffer.
934 let mut fake_json_server = fake_json_servers.next().await.unwrap();
935 assert_eq!(
936 fake_json_server
937 .receive_notification::<lsp::notification::DidOpenTextDocument>()
938 .await
939 .text_document,
940 lsp::TextDocumentItem {
941 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
942 version: 0,
943 text: "{\"a\": 1}".to_string(),
944 language_id: "json".to_string(),
945 }
946 );
947
948 // This buffer is configured based on the second language server's
949 // capabilities.
950 json_buffer.update(cx, |buffer, _| {
951 assert_eq!(
952 buffer
953 .completion_triggers()
954 .iter()
955 .cloned()
956 .collect::<Vec<_>>(),
957 &[":".to_string()]
958 );
959 });
960
961 // When opening another buffer whose language server is already running,
962 // it is also configured based on the existing language server's capabilities.
963 let (rust_buffer2, _handle4) = project
964 .update(cx, |project, cx| {
965 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
966 })
967 .await
968 .unwrap();
969 rust_buffer2.update(cx, |buffer, _| {
970 assert_eq!(
971 buffer
972 .completion_triggers()
973 .iter()
974 .cloned()
975 .collect::<Vec<_>>(),
976 &[".".to_string(), "::".to_string()]
977 );
978 });
979
980 // Changes are reported only to servers matching the buffer's language.
981 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
982 rust_buffer2.update(cx, |buffer, cx| {
983 buffer.edit([(0..0, "let x = 1;")], None, cx)
984 });
985 assert_eq!(
986 fake_rust_server
987 .receive_notification::<lsp::notification::DidChangeTextDocument>()
988 .await
989 .text_document,
990 lsp::VersionedTextDocumentIdentifier::new(
991 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
992 1
993 )
994 );
995
996 // Save notifications are reported to all servers.
997 project
998 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
999 .await
1000 .unwrap();
1001 assert_eq!(
1002 fake_rust_server
1003 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1004 .await
1005 .text_document,
1006 lsp::TextDocumentIdentifier::new(
1007 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1008 )
1009 );
1010 assert_eq!(
1011 fake_json_server
1012 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1013 .await
1014 .text_document,
1015 lsp::TextDocumentIdentifier::new(
1016 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1017 )
1018 );
1019
1020 // Renames are reported only to servers matching the buffer's language.
1021 fs.rename(
1022 Path::new(path!("/dir/test2.rs")),
1023 Path::new(path!("/dir/test3.rs")),
1024 Default::default(),
1025 )
1026 .await
1027 .unwrap();
1028 assert_eq!(
1029 fake_rust_server
1030 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1031 .await
1032 .text_document,
1033 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1034 );
1035 assert_eq!(
1036 fake_rust_server
1037 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1038 .await
1039 .text_document,
1040 lsp::TextDocumentItem {
1041 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1042 version: 0,
1043 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1044 language_id: "rust".to_string(),
1045 },
1046 );
1047
1048 rust_buffer2.update(cx, |buffer, cx| {
1049 buffer.update_diagnostics(
1050 LanguageServerId(0),
1051 DiagnosticSet::from_sorted_entries(
1052 vec![DiagnosticEntry {
1053 diagnostic: Default::default(),
1054 range: Anchor::MIN..Anchor::MAX,
1055 }],
1056 &buffer.snapshot(),
1057 ),
1058 cx,
1059 );
1060 assert_eq!(
1061 buffer
1062 .snapshot()
1063 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1064 .count(),
1065 1
1066 );
1067 });
1068
1069 // When the rename changes the extension of the file, the buffer gets closed on the old
1070 // language server and gets opened on the new one.
1071 fs.rename(
1072 Path::new(path!("/dir/test3.rs")),
1073 Path::new(path!("/dir/test3.json")),
1074 Default::default(),
1075 )
1076 .await
1077 .unwrap();
1078 assert_eq!(
1079 fake_rust_server
1080 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1081 .await
1082 .text_document,
1083 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1084 );
1085 assert_eq!(
1086 fake_json_server
1087 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1088 .await
1089 .text_document,
1090 lsp::TextDocumentItem {
1091 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1092 version: 0,
1093 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1094 language_id: "json".to_string(),
1095 },
1096 );
1097
1098 // We clear the diagnostics, since the language has changed.
1099 rust_buffer2.update(cx, |buffer, _| {
1100 assert_eq!(
1101 buffer
1102 .snapshot()
1103 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1104 .count(),
1105 0
1106 );
1107 });
1108
1109 // The renamed file's version resets after changing language server.
1110 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1111 assert_eq!(
1112 fake_json_server
1113 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1114 .await
1115 .text_document,
1116 lsp::VersionedTextDocumentIdentifier::new(
1117 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1118 1
1119 )
1120 );
1121
1122 // Restart language servers
1123 project.update(cx, |project, cx| {
1124 project.restart_language_servers_for_buffers(
1125 vec![rust_buffer.clone(), json_buffer.clone()],
1126 HashSet::default(),
1127 cx,
1128 );
1129 });
1130
1131 let mut rust_shutdown_requests = fake_rust_server
1132 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1133 let mut json_shutdown_requests = fake_json_server
1134 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1135 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1136
1137 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1138 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1139
1140 // Ensure rust document is reopened in new rust language server
1141 assert_eq!(
1142 fake_rust_server
1143 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1144 .await
1145 .text_document,
1146 lsp::TextDocumentItem {
1147 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1148 version: 0,
1149 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1150 language_id: "rust".to_string(),
1151 }
1152 );
1153
1154 // Ensure json documents are reopened in new json language server
1155 assert_set_eq!(
1156 [
1157 fake_json_server
1158 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1159 .await
1160 .text_document,
1161 fake_json_server
1162 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1163 .await
1164 .text_document,
1165 ],
1166 [
1167 lsp::TextDocumentItem {
1168 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1169 version: 0,
1170 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1171 language_id: "json".to_string(),
1172 },
1173 lsp::TextDocumentItem {
1174 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1175 version: 0,
1176 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1177 language_id: "json".to_string(),
1178 }
1179 ]
1180 );
1181
1182 // Close notifications are reported only to servers matching the buffer's language.
1183 cx.update(|_| drop(_json_handle));
1184 let close_message = lsp::DidCloseTextDocumentParams {
1185 text_document: lsp::TextDocumentIdentifier::new(
1186 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1187 ),
1188 };
1189 assert_eq!(
1190 fake_json_server
1191 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1192 .await,
1193 close_message,
1194 );
1195}
1196
1197#[gpui::test]
1198async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1199 init_test(cx);
1200
1201 let fs = FakeFs::new(cx.executor());
1202 fs.insert_tree(
1203 path!("/the-root"),
1204 json!({
1205 ".gitignore": "target\n",
1206 "Cargo.lock": "",
1207 "src": {
1208 "a.rs": "",
1209 "b.rs": "",
1210 },
1211 "target": {
1212 "x": {
1213 "out": {
1214 "x.rs": ""
1215 }
1216 },
1217 "y": {
1218 "out": {
1219 "y.rs": "",
1220 }
1221 },
1222 "z": {
1223 "out": {
1224 "z.rs": ""
1225 }
1226 }
1227 }
1228 }),
1229 )
1230 .await;
1231 fs.insert_tree(
1232 path!("/the-registry"),
1233 json!({
1234 "dep1": {
1235 "src": {
1236 "dep1.rs": "",
1237 }
1238 },
1239 "dep2": {
1240 "src": {
1241 "dep2.rs": "",
1242 }
1243 },
1244 }),
1245 )
1246 .await;
1247 fs.insert_tree(
1248 path!("/the/stdlib"),
1249 json!({
1250 "LICENSE": "",
1251 "src": {
1252 "string.rs": "",
1253 }
1254 }),
1255 )
1256 .await;
1257
1258 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1259 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1260 (project.languages().clone(), project.lsp_store())
1261 });
1262 language_registry.add(rust_lang());
1263 let mut fake_servers = language_registry.register_fake_lsp(
1264 "Rust",
1265 FakeLspAdapter {
1266 name: "the-language-server",
1267 ..Default::default()
1268 },
1269 );
1270
1271 cx.executor().run_until_parked();
1272
1273 // Start the language server by opening a buffer with a compatible file extension.
1274 project
1275 .update(cx, |project, cx| {
1276 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1277 })
1278 .await
1279 .unwrap();
1280
1281 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1282 project.update(cx, |project, cx| {
1283 let worktree = project.worktrees(cx).next().unwrap();
1284 assert_eq!(
1285 worktree
1286 .read(cx)
1287 .snapshot()
1288 .entries(true, 0)
1289 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1290 .collect::<Vec<_>>(),
1291 &[
1292 ("", false),
1293 (".gitignore", false),
1294 ("Cargo.lock", false),
1295 ("src", false),
1296 ("src/a.rs", false),
1297 ("src/b.rs", false),
1298 ("target", true),
1299 ]
1300 );
1301 });
1302
1303 let prev_read_dir_count = fs.read_dir_call_count();
1304
1305 let fake_server = fake_servers.next().await.unwrap();
1306 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1307 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1308 id
1309 });
1310
1311 // Simulate jumping to a definition in a dependency outside of the worktree.
1312 let _out_of_worktree_buffer = project
1313 .update(cx, |project, cx| {
1314 project.open_local_buffer_via_lsp(
1315 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1316 server_id,
1317 cx,
1318 )
1319 })
1320 .await
1321 .unwrap();
1322
1323 // Keep track of the FS events reported to the language server.
1324 let file_changes = Arc::new(Mutex::new(Vec::new()));
1325 fake_server
1326 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1327 registrations: vec![lsp::Registration {
1328 id: Default::default(),
1329 method: "workspace/didChangeWatchedFiles".to_string(),
1330 register_options: serde_json::to_value(
1331 lsp::DidChangeWatchedFilesRegistrationOptions {
1332 watchers: vec![
1333 lsp::FileSystemWatcher {
1334 glob_pattern: lsp::GlobPattern::String(
1335 path!("/the-root/Cargo.toml").to_string(),
1336 ),
1337 kind: None,
1338 },
1339 lsp::FileSystemWatcher {
1340 glob_pattern: lsp::GlobPattern::String(
1341 path!("/the-root/src/*.{rs,c}").to_string(),
1342 ),
1343 kind: None,
1344 },
1345 lsp::FileSystemWatcher {
1346 glob_pattern: lsp::GlobPattern::String(
1347 path!("/the-root/target/y/**/*.rs").to_string(),
1348 ),
1349 kind: None,
1350 },
1351 lsp::FileSystemWatcher {
1352 glob_pattern: lsp::GlobPattern::String(
1353 path!("/the/stdlib/src/**/*.rs").to_string(),
1354 ),
1355 kind: None,
1356 },
1357 lsp::FileSystemWatcher {
1358 glob_pattern: lsp::GlobPattern::String(
1359 path!("**/Cargo.lock").to_string(),
1360 ),
1361 kind: None,
1362 },
1363 ],
1364 },
1365 )
1366 .ok(),
1367 }],
1368 })
1369 .await
1370 .into_response()
1371 .unwrap();
1372 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1373 let file_changes = file_changes.clone();
1374 move |params, _| {
1375 let mut file_changes = file_changes.lock();
1376 file_changes.extend(params.changes);
1377 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1378 }
1379 });
1380
1381 cx.executor().run_until_parked();
1382 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1383 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1384
1385 let mut new_watched_paths = fs.watched_paths();
1386 new_watched_paths.retain(|path| {
1387 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1388 });
1389 assert_eq!(
1390 &new_watched_paths,
1391 &[
1392 Path::new(path!("/the-root")),
1393 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1394 Path::new(path!("/the/stdlib/src"))
1395 ]
1396 );
1397
1398 // Now the language server has asked us to watch an ignored directory path,
1399 // so we recursively load it.
1400 project.update(cx, |project, cx| {
1401 let worktree = project.visible_worktrees(cx).next().unwrap();
1402 assert_eq!(
1403 worktree
1404 .read(cx)
1405 .snapshot()
1406 .entries(true, 0)
1407 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1408 .collect::<Vec<_>>(),
1409 &[
1410 ("", false),
1411 (".gitignore", false),
1412 ("Cargo.lock", false),
1413 ("src", false),
1414 ("src/a.rs", false),
1415 ("src/b.rs", false),
1416 ("target", true),
1417 ("target/x", true),
1418 ("target/y", true),
1419 ("target/y/out", true),
1420 ("target/y/out/y.rs", true),
1421 ("target/z", true),
1422 ]
1423 );
1424 });
1425
1426 // Perform some file system mutations, two of which match the watched patterns,
1427 // and one of which does not.
1428 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1429 .await
1430 .unwrap();
1431 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1432 .await
1433 .unwrap();
1434 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1435 .await
1436 .unwrap();
1437 fs.create_file(
1438 path!("/the-root/target/x/out/x2.rs").as_ref(),
1439 Default::default(),
1440 )
1441 .await
1442 .unwrap();
1443 fs.create_file(
1444 path!("/the-root/target/y/out/y2.rs").as_ref(),
1445 Default::default(),
1446 )
1447 .await
1448 .unwrap();
1449 fs.save(
1450 path!("/the-root/Cargo.lock").as_ref(),
1451 &"".into(),
1452 Default::default(),
1453 )
1454 .await
1455 .unwrap();
1456 fs.save(
1457 path!("/the-stdlib/LICENSE").as_ref(),
1458 &"".into(),
1459 Default::default(),
1460 )
1461 .await
1462 .unwrap();
1463 fs.save(
1464 path!("/the/stdlib/src/string.rs").as_ref(),
1465 &"".into(),
1466 Default::default(),
1467 )
1468 .await
1469 .unwrap();
1470
1471 // The language server receives events for the FS mutations that match its watch patterns.
1472 cx.executor().run_until_parked();
1473 assert_eq!(
1474 &*file_changes.lock(),
1475 &[
1476 lsp::FileEvent {
1477 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1478 typ: lsp::FileChangeType::CHANGED,
1479 },
1480 lsp::FileEvent {
1481 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1482 typ: lsp::FileChangeType::DELETED,
1483 },
1484 lsp::FileEvent {
1485 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1486 typ: lsp::FileChangeType::CREATED,
1487 },
1488 lsp::FileEvent {
1489 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1490 typ: lsp::FileChangeType::CREATED,
1491 },
1492 lsp::FileEvent {
1493 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1494 typ: lsp::FileChangeType::CHANGED,
1495 },
1496 ]
1497 );
1498}
1499
1500#[gpui::test]
1501async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1502 init_test(cx);
1503
1504 let fs = FakeFs::new(cx.executor());
1505 fs.insert_tree(
1506 path!("/dir"),
1507 json!({
1508 "a.rs": "let a = 1;",
1509 "b.rs": "let b = 2;"
1510 }),
1511 )
1512 .await;
1513
1514 let project = Project::test(
1515 fs,
1516 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1517 cx,
1518 )
1519 .await;
1520 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1521
1522 let buffer_a = project
1523 .update(cx, |project, cx| {
1524 project.open_local_buffer(path!("/dir/a.rs"), cx)
1525 })
1526 .await
1527 .unwrap();
1528 let buffer_b = project
1529 .update(cx, |project, cx| {
1530 project.open_local_buffer(path!("/dir/b.rs"), cx)
1531 })
1532 .await
1533 .unwrap();
1534
1535 lsp_store.update(cx, |lsp_store, cx| {
1536 lsp_store
1537 .update_diagnostics(
1538 LanguageServerId(0),
1539 lsp::PublishDiagnosticsParams {
1540 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1541 version: None,
1542 diagnostics: vec![lsp::Diagnostic {
1543 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1544 severity: Some(lsp::DiagnosticSeverity::ERROR),
1545 message: "error 1".to_string(),
1546 ..Default::default()
1547 }],
1548 },
1549 None,
1550 DiagnosticSourceKind::Pushed,
1551 &[],
1552 cx,
1553 )
1554 .unwrap();
1555 lsp_store
1556 .update_diagnostics(
1557 LanguageServerId(0),
1558 lsp::PublishDiagnosticsParams {
1559 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1560 version: None,
1561 diagnostics: vec![lsp::Diagnostic {
1562 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1563 severity: Some(DiagnosticSeverity::WARNING),
1564 message: "error 2".to_string(),
1565 ..Default::default()
1566 }],
1567 },
1568 None,
1569 DiagnosticSourceKind::Pushed,
1570 &[],
1571 cx,
1572 )
1573 .unwrap();
1574 });
1575
1576 buffer_a.update(cx, |buffer, _| {
1577 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1578 assert_eq!(
1579 chunks
1580 .iter()
1581 .map(|(s, d)| (s.as_str(), *d))
1582 .collect::<Vec<_>>(),
1583 &[
1584 ("let ", None),
1585 ("a", Some(DiagnosticSeverity::ERROR)),
1586 (" = 1;", None),
1587 ]
1588 );
1589 });
1590 buffer_b.update(cx, |buffer, _| {
1591 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1592 assert_eq!(
1593 chunks
1594 .iter()
1595 .map(|(s, d)| (s.as_str(), *d))
1596 .collect::<Vec<_>>(),
1597 &[
1598 ("let ", None),
1599 ("b", Some(DiagnosticSeverity::WARNING)),
1600 (" = 2;", None),
1601 ]
1602 );
1603 });
1604}
1605
1606#[gpui::test]
1607async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1608 init_test(cx);
1609
1610 let fs = FakeFs::new(cx.executor());
1611 fs.insert_tree(
1612 path!("/root"),
1613 json!({
1614 "dir": {
1615 ".git": {
1616 "HEAD": "ref: refs/heads/main",
1617 },
1618 ".gitignore": "b.rs",
1619 "a.rs": "let a = 1;",
1620 "b.rs": "let b = 2;",
1621 },
1622 "other.rs": "let b = c;"
1623 }),
1624 )
1625 .await;
1626
1627 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1628 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1629 let (worktree, _) = project
1630 .update(cx, |project, cx| {
1631 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1632 })
1633 .await
1634 .unwrap();
1635 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1636
1637 let (worktree, _) = project
1638 .update(cx, |project, cx| {
1639 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1640 })
1641 .await
1642 .unwrap();
1643 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1644
1645 let server_id = LanguageServerId(0);
1646 lsp_store.update(cx, |lsp_store, cx| {
1647 lsp_store
1648 .update_diagnostics(
1649 server_id,
1650 lsp::PublishDiagnosticsParams {
1651 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1652 version: None,
1653 diagnostics: vec![lsp::Diagnostic {
1654 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1655 severity: Some(lsp::DiagnosticSeverity::ERROR),
1656 message: "unused variable 'b'".to_string(),
1657 ..Default::default()
1658 }],
1659 },
1660 None,
1661 DiagnosticSourceKind::Pushed,
1662 &[],
1663 cx,
1664 )
1665 .unwrap();
1666 lsp_store
1667 .update_diagnostics(
1668 server_id,
1669 lsp::PublishDiagnosticsParams {
1670 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1671 version: None,
1672 diagnostics: vec![lsp::Diagnostic {
1673 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1674 severity: Some(lsp::DiagnosticSeverity::ERROR),
1675 message: "unknown variable 'c'".to_string(),
1676 ..Default::default()
1677 }],
1678 },
1679 None,
1680 DiagnosticSourceKind::Pushed,
1681 &[],
1682 cx,
1683 )
1684 .unwrap();
1685 });
1686
1687 let main_ignored_buffer = project
1688 .update(cx, |project, cx| {
1689 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1690 })
1691 .await
1692 .unwrap();
1693 main_ignored_buffer.update(cx, |buffer, _| {
1694 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1695 assert_eq!(
1696 chunks
1697 .iter()
1698 .map(|(s, d)| (s.as_str(), *d))
1699 .collect::<Vec<_>>(),
1700 &[
1701 ("let ", None),
1702 ("b", Some(DiagnosticSeverity::ERROR)),
1703 (" = 2;", None),
1704 ],
1705 "Gigitnored buffers should still get in-buffer diagnostics",
1706 );
1707 });
1708 let other_buffer = project
1709 .update(cx, |project, cx| {
1710 project.open_buffer((other_worktree_id, rel_path("")), cx)
1711 })
1712 .await
1713 .unwrap();
1714 other_buffer.update(cx, |buffer, _| {
1715 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1716 assert_eq!(
1717 chunks
1718 .iter()
1719 .map(|(s, d)| (s.as_str(), *d))
1720 .collect::<Vec<_>>(),
1721 &[
1722 ("let b = ", None),
1723 ("c", Some(DiagnosticSeverity::ERROR)),
1724 (";", None),
1725 ],
1726 "Buffers from hidden projects should still get in-buffer diagnostics"
1727 );
1728 });
1729
1730 project.update(cx, |project, cx| {
1731 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1732 assert_eq!(
1733 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1734 vec![(
1735 ProjectPath {
1736 worktree_id: main_worktree_id,
1737 path: rel_path("b.rs").into(),
1738 },
1739 server_id,
1740 DiagnosticSummary {
1741 error_count: 1,
1742 warning_count: 0,
1743 }
1744 )]
1745 );
1746 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1747 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1748 });
1749}
1750
1751#[gpui::test]
1752async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1753 init_test(cx);
1754
1755 let progress_token = "the-progress-token";
1756
1757 let fs = FakeFs::new(cx.executor());
1758 fs.insert_tree(
1759 path!("/dir"),
1760 json!({
1761 "a.rs": "fn a() { A }",
1762 "b.rs": "const y: i32 = 1",
1763 }),
1764 )
1765 .await;
1766
1767 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1768 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1769
1770 language_registry.add(rust_lang());
1771 let mut fake_servers = language_registry.register_fake_lsp(
1772 "Rust",
1773 FakeLspAdapter {
1774 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1775 disk_based_diagnostics_sources: vec!["disk".into()],
1776 ..Default::default()
1777 },
1778 );
1779
1780 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1781
1782 // Cause worktree to start the fake language server
1783 let _ = project
1784 .update(cx, |project, cx| {
1785 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1786 })
1787 .await
1788 .unwrap();
1789
1790 let mut events = cx.events(&project);
1791
1792 let fake_server = fake_servers.next().await.unwrap();
1793 assert_eq!(
1794 events.next().await.unwrap(),
1795 Event::LanguageServerAdded(
1796 LanguageServerId(0),
1797 fake_server.server.name(),
1798 Some(worktree_id)
1799 ),
1800 );
1801
1802 fake_server
1803 .start_progress(format!("{}/0", progress_token))
1804 .await;
1805 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1806 assert_eq!(
1807 events.next().await.unwrap(),
1808 Event::DiskBasedDiagnosticsStarted {
1809 language_server_id: LanguageServerId(0),
1810 }
1811 );
1812
1813 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1814 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1815 version: None,
1816 diagnostics: vec![lsp::Diagnostic {
1817 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1818 severity: Some(lsp::DiagnosticSeverity::ERROR),
1819 message: "undefined variable 'A'".to_string(),
1820 ..Default::default()
1821 }],
1822 });
1823 assert_eq!(
1824 events.next().await.unwrap(),
1825 Event::DiagnosticsUpdated {
1826 language_server_id: LanguageServerId(0),
1827 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1828 }
1829 );
1830
1831 fake_server.end_progress(format!("{}/0", progress_token));
1832 assert_eq!(
1833 events.next().await.unwrap(),
1834 Event::DiskBasedDiagnosticsFinished {
1835 language_server_id: LanguageServerId(0)
1836 }
1837 );
1838
1839 let buffer = project
1840 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1841 .await
1842 .unwrap();
1843
1844 buffer.update(cx, |buffer, _| {
1845 let snapshot = buffer.snapshot();
1846 let diagnostics = snapshot
1847 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1848 .collect::<Vec<_>>();
1849 assert_eq!(
1850 diagnostics,
1851 &[DiagnosticEntryRef {
1852 range: Point::new(0, 9)..Point::new(0, 10),
1853 diagnostic: &Diagnostic {
1854 severity: lsp::DiagnosticSeverity::ERROR,
1855 message: "undefined variable 'A'".to_string(),
1856 group_id: 0,
1857 is_primary: true,
1858 source_kind: DiagnosticSourceKind::Pushed,
1859 ..Diagnostic::default()
1860 }
1861 }]
1862 )
1863 });
1864
1865 // Ensure publishing empty diagnostics twice only results in one update event.
1866 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1867 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1868 version: None,
1869 diagnostics: Default::default(),
1870 });
1871 assert_eq!(
1872 events.next().await.unwrap(),
1873 Event::DiagnosticsUpdated {
1874 language_server_id: LanguageServerId(0),
1875 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1876 }
1877 );
1878
1879 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1880 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1881 version: None,
1882 diagnostics: Default::default(),
1883 });
1884 cx.executor().run_until_parked();
1885 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1886}
1887
1888#[gpui::test]
1889async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1890 init_test(cx);
1891
1892 let progress_token = "the-progress-token";
1893
1894 let fs = FakeFs::new(cx.executor());
1895 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1896
1897 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1898
1899 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1900 language_registry.add(rust_lang());
1901 let mut fake_servers = language_registry.register_fake_lsp(
1902 "Rust",
1903 FakeLspAdapter {
1904 name: "the-language-server",
1905 disk_based_diagnostics_sources: vec!["disk".into()],
1906 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1907 ..FakeLspAdapter::default()
1908 },
1909 );
1910
1911 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1912
1913 let (buffer, _handle) = project
1914 .update(cx, |project, cx| {
1915 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1916 })
1917 .await
1918 .unwrap();
1919 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1920 // Simulate diagnostics starting to update.
1921 let fake_server = fake_servers.next().await.unwrap();
1922 fake_server.start_progress(progress_token).await;
1923
1924 // Restart the server before the diagnostics finish updating.
1925 project.update(cx, |project, cx| {
1926 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1927 });
1928 let mut events = cx.events(&project);
1929
1930 // Simulate the newly started server sending more diagnostics.
1931 let fake_server = fake_servers.next().await.unwrap();
1932 assert_eq!(
1933 events.next().await.unwrap(),
1934 Event::LanguageServerRemoved(LanguageServerId(0))
1935 );
1936 assert_eq!(
1937 events.next().await.unwrap(),
1938 Event::LanguageServerAdded(
1939 LanguageServerId(1),
1940 fake_server.server.name(),
1941 Some(worktree_id)
1942 )
1943 );
1944 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1945 fake_server.start_progress(progress_token).await;
1946 assert_eq!(
1947 events.next().await.unwrap(),
1948 Event::LanguageServerBufferRegistered {
1949 server_id: LanguageServerId(1),
1950 buffer_id,
1951 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1952 name: Some(fake_server.server.name())
1953 }
1954 );
1955 assert_eq!(
1956 events.next().await.unwrap(),
1957 Event::DiskBasedDiagnosticsStarted {
1958 language_server_id: LanguageServerId(1)
1959 }
1960 );
1961 project.update(cx, |project, cx| {
1962 assert_eq!(
1963 project
1964 .language_servers_running_disk_based_diagnostics(cx)
1965 .collect::<Vec<_>>(),
1966 [LanguageServerId(1)]
1967 );
1968 });
1969
1970 // All diagnostics are considered done, despite the old server's diagnostic
1971 // task never completing.
1972 fake_server.end_progress(progress_token);
1973 assert_eq!(
1974 events.next().await.unwrap(),
1975 Event::DiskBasedDiagnosticsFinished {
1976 language_server_id: LanguageServerId(1)
1977 }
1978 );
1979 project.update(cx, |project, cx| {
1980 assert_eq!(
1981 project
1982 .language_servers_running_disk_based_diagnostics(cx)
1983 .collect::<Vec<_>>(),
1984 [] as [language::LanguageServerId; 0]
1985 );
1986 });
1987}
1988
1989#[gpui::test]
1990async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1991 init_test(cx);
1992
1993 let fs = FakeFs::new(cx.executor());
1994 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1995
1996 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1997
1998 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1999 language_registry.add(rust_lang());
2000 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2001
2002 let (buffer, _) = project
2003 .update(cx, |project, cx| {
2004 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2005 })
2006 .await
2007 .unwrap();
2008
2009 // Publish diagnostics
2010 let fake_server = fake_servers.next().await.unwrap();
2011 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2012 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2013 version: None,
2014 diagnostics: vec![lsp::Diagnostic {
2015 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2016 severity: Some(lsp::DiagnosticSeverity::ERROR),
2017 message: "the message".to_string(),
2018 ..Default::default()
2019 }],
2020 });
2021
2022 cx.executor().run_until_parked();
2023 buffer.update(cx, |buffer, _| {
2024 assert_eq!(
2025 buffer
2026 .snapshot()
2027 .diagnostics_in_range::<_, usize>(0..1, false)
2028 .map(|entry| entry.diagnostic.message.clone())
2029 .collect::<Vec<_>>(),
2030 ["the message".to_string()]
2031 );
2032 });
2033 project.update(cx, |project, cx| {
2034 assert_eq!(
2035 project.diagnostic_summary(false, cx),
2036 DiagnosticSummary {
2037 error_count: 1,
2038 warning_count: 0,
2039 }
2040 );
2041 });
2042
2043 project.update(cx, |project, cx| {
2044 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2045 });
2046
2047 // The diagnostics are cleared.
2048 cx.executor().run_until_parked();
2049 buffer.update(cx, |buffer, _| {
2050 assert_eq!(
2051 buffer
2052 .snapshot()
2053 .diagnostics_in_range::<_, usize>(0..1, false)
2054 .map(|entry| entry.diagnostic.message.clone())
2055 .collect::<Vec<_>>(),
2056 Vec::<String>::new(),
2057 );
2058 });
2059 project.update(cx, |project, cx| {
2060 assert_eq!(
2061 project.diagnostic_summary(false, cx),
2062 DiagnosticSummary {
2063 error_count: 0,
2064 warning_count: 0,
2065 }
2066 );
2067 });
2068}
2069
2070#[gpui::test]
2071async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2072 init_test(cx);
2073
2074 let fs = FakeFs::new(cx.executor());
2075 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2076
2077 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2078 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2079
2080 language_registry.add(rust_lang());
2081 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2082
2083 let (buffer, _handle) = project
2084 .update(cx, |project, cx| {
2085 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2086 })
2087 .await
2088 .unwrap();
2089
2090 // Before restarting the server, report diagnostics with an unknown buffer version.
2091 let fake_server = fake_servers.next().await.unwrap();
2092 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2093 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2094 version: Some(10000),
2095 diagnostics: Vec::new(),
2096 });
2097 cx.executor().run_until_parked();
2098 project.update(cx, |project, cx| {
2099 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2100 });
2101
2102 let mut fake_server = fake_servers.next().await.unwrap();
2103 let notification = fake_server
2104 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2105 .await
2106 .text_document;
2107 assert_eq!(notification.version, 0);
2108}
2109
2110#[gpui::test]
2111async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2112 init_test(cx);
2113
2114 let progress_token = "the-progress-token";
2115
2116 let fs = FakeFs::new(cx.executor());
2117 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2118
2119 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2120
2121 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2122 language_registry.add(rust_lang());
2123 let mut fake_servers = language_registry.register_fake_lsp(
2124 "Rust",
2125 FakeLspAdapter {
2126 name: "the-language-server",
2127 disk_based_diagnostics_sources: vec!["disk".into()],
2128 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2129 ..Default::default()
2130 },
2131 );
2132
2133 let (buffer, _handle) = project
2134 .update(cx, |project, cx| {
2135 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2136 })
2137 .await
2138 .unwrap();
2139
2140 // Simulate diagnostics starting to update.
2141 let mut fake_server = fake_servers.next().await.unwrap();
2142 fake_server
2143 .start_progress_with(
2144 "another-token",
2145 lsp::WorkDoneProgressBegin {
2146 cancellable: Some(false),
2147 ..Default::default()
2148 },
2149 )
2150 .await;
2151 fake_server
2152 .start_progress_with(
2153 progress_token,
2154 lsp::WorkDoneProgressBegin {
2155 cancellable: Some(true),
2156 ..Default::default()
2157 },
2158 )
2159 .await;
2160 cx.executor().run_until_parked();
2161
2162 project.update(cx, |project, cx| {
2163 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2164 });
2165
2166 let cancel_notification = fake_server
2167 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2168 .await;
2169 assert_eq!(
2170 cancel_notification.token,
2171 NumberOrString::String(progress_token.into())
2172 );
2173}
2174
2175#[gpui::test]
2176async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2177 init_test(cx);
2178
2179 let fs = FakeFs::new(cx.executor());
2180 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2181 .await;
2182
2183 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2184 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2185
2186 let mut fake_rust_servers = language_registry.register_fake_lsp(
2187 "Rust",
2188 FakeLspAdapter {
2189 name: "rust-lsp",
2190 ..Default::default()
2191 },
2192 );
2193 let mut fake_js_servers = language_registry.register_fake_lsp(
2194 "JavaScript",
2195 FakeLspAdapter {
2196 name: "js-lsp",
2197 ..Default::default()
2198 },
2199 );
2200 language_registry.add(rust_lang());
2201 language_registry.add(js_lang());
2202
2203 let _rs_buffer = project
2204 .update(cx, |project, cx| {
2205 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2206 })
2207 .await
2208 .unwrap();
2209 let _js_buffer = project
2210 .update(cx, |project, cx| {
2211 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2212 })
2213 .await
2214 .unwrap();
2215
2216 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2217 assert_eq!(
2218 fake_rust_server_1
2219 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2220 .await
2221 .text_document
2222 .uri
2223 .as_str(),
2224 uri!("file:///dir/a.rs")
2225 );
2226
2227 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2228 assert_eq!(
2229 fake_js_server
2230 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2231 .await
2232 .text_document
2233 .uri
2234 .as_str(),
2235 uri!("file:///dir/b.js")
2236 );
2237
2238 // Disable Rust language server, ensuring only that server gets stopped.
2239 cx.update(|cx| {
2240 SettingsStore::update_global(cx, |settings, cx| {
2241 settings.update_user_settings(cx, |settings| {
2242 settings.languages_mut().insert(
2243 "Rust".into(),
2244 LanguageSettingsContent {
2245 enable_language_server: Some(false),
2246 ..Default::default()
2247 },
2248 );
2249 });
2250 })
2251 });
2252 fake_rust_server_1
2253 .receive_notification::<lsp::notification::Exit>()
2254 .await;
2255
2256 // Enable Rust and disable JavaScript language servers, ensuring that the
2257 // former gets started again and that the latter stops.
2258 cx.update(|cx| {
2259 SettingsStore::update_global(cx, |settings, cx| {
2260 settings.update_user_settings(cx, |settings| {
2261 settings.languages_mut().insert(
2262 "Rust".into(),
2263 LanguageSettingsContent {
2264 enable_language_server: Some(true),
2265 ..Default::default()
2266 },
2267 );
2268 settings.languages_mut().insert(
2269 "JavaScript".into(),
2270 LanguageSettingsContent {
2271 enable_language_server: Some(false),
2272 ..Default::default()
2273 },
2274 );
2275 });
2276 })
2277 });
2278 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2279 assert_eq!(
2280 fake_rust_server_2
2281 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2282 .await
2283 .text_document
2284 .uri
2285 .as_str(),
2286 uri!("file:///dir/a.rs")
2287 );
2288 fake_js_server
2289 .receive_notification::<lsp::notification::Exit>()
2290 .await;
2291}
2292
2293#[gpui::test(iterations = 3)]
2294async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2295 init_test(cx);
2296
2297 let text = "
2298 fn a() { A }
2299 fn b() { BB }
2300 fn c() { CCC }
2301 "
2302 .unindent();
2303
2304 let fs = FakeFs::new(cx.executor());
2305 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2306
2307 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2308 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2309
2310 language_registry.add(rust_lang());
2311 let mut fake_servers = language_registry.register_fake_lsp(
2312 "Rust",
2313 FakeLspAdapter {
2314 disk_based_diagnostics_sources: vec!["disk".into()],
2315 ..Default::default()
2316 },
2317 );
2318
2319 let buffer = project
2320 .update(cx, |project, cx| {
2321 project.open_local_buffer(path!("/dir/a.rs"), cx)
2322 })
2323 .await
2324 .unwrap();
2325
2326 let _handle = project.update(cx, |project, cx| {
2327 project.register_buffer_with_language_servers(&buffer, cx)
2328 });
2329
2330 let mut fake_server = fake_servers.next().await.unwrap();
2331 let open_notification = fake_server
2332 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2333 .await;
2334
2335 // Edit the buffer, moving the content down
2336 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2337 let change_notification_1 = fake_server
2338 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2339 .await;
2340 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2341
2342 // Report some diagnostics for the initial version of the buffer
2343 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2344 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2345 version: Some(open_notification.text_document.version),
2346 diagnostics: vec![
2347 lsp::Diagnostic {
2348 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2349 severity: Some(DiagnosticSeverity::ERROR),
2350 message: "undefined variable 'A'".to_string(),
2351 source: Some("disk".to_string()),
2352 ..Default::default()
2353 },
2354 lsp::Diagnostic {
2355 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2356 severity: Some(DiagnosticSeverity::ERROR),
2357 message: "undefined variable 'BB'".to_string(),
2358 source: Some("disk".to_string()),
2359 ..Default::default()
2360 },
2361 lsp::Diagnostic {
2362 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2363 severity: Some(DiagnosticSeverity::ERROR),
2364 source: Some("disk".to_string()),
2365 message: "undefined variable 'CCC'".to_string(),
2366 ..Default::default()
2367 },
2368 ],
2369 });
2370
2371 // The diagnostics have moved down since they were created.
2372 cx.executor().run_until_parked();
2373 buffer.update(cx, |buffer, _| {
2374 assert_eq!(
2375 buffer
2376 .snapshot()
2377 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2378 .collect::<Vec<_>>(),
2379 &[
2380 DiagnosticEntry {
2381 range: Point::new(3, 9)..Point::new(3, 11),
2382 diagnostic: Diagnostic {
2383 source: Some("disk".into()),
2384 severity: DiagnosticSeverity::ERROR,
2385 message: "undefined variable 'BB'".to_string(),
2386 is_disk_based: true,
2387 group_id: 1,
2388 is_primary: true,
2389 source_kind: DiagnosticSourceKind::Pushed,
2390 ..Diagnostic::default()
2391 },
2392 },
2393 DiagnosticEntry {
2394 range: Point::new(4, 9)..Point::new(4, 12),
2395 diagnostic: Diagnostic {
2396 source: Some("disk".into()),
2397 severity: DiagnosticSeverity::ERROR,
2398 message: "undefined variable 'CCC'".to_string(),
2399 is_disk_based: true,
2400 group_id: 2,
2401 is_primary: true,
2402 source_kind: DiagnosticSourceKind::Pushed,
2403 ..Diagnostic::default()
2404 }
2405 }
2406 ]
2407 );
2408 assert_eq!(
2409 chunks_with_diagnostics(buffer, 0..buffer.len()),
2410 [
2411 ("\n\nfn a() { ".to_string(), None),
2412 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2413 (" }\nfn b() { ".to_string(), None),
2414 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2415 (" }\nfn c() { ".to_string(), None),
2416 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2417 (" }\n".to_string(), None),
2418 ]
2419 );
2420 assert_eq!(
2421 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2422 [
2423 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2424 (" }\nfn c() { ".to_string(), None),
2425 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2426 ]
2427 );
2428 });
2429
2430 // Ensure overlapping diagnostics are highlighted correctly.
2431 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2432 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2433 version: Some(open_notification.text_document.version),
2434 diagnostics: vec![
2435 lsp::Diagnostic {
2436 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2437 severity: Some(DiagnosticSeverity::ERROR),
2438 message: "undefined variable 'A'".to_string(),
2439 source: Some("disk".to_string()),
2440 ..Default::default()
2441 },
2442 lsp::Diagnostic {
2443 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2444 severity: Some(DiagnosticSeverity::WARNING),
2445 message: "unreachable statement".to_string(),
2446 source: Some("disk".to_string()),
2447 ..Default::default()
2448 },
2449 ],
2450 });
2451
2452 cx.executor().run_until_parked();
2453 buffer.update(cx, |buffer, _| {
2454 assert_eq!(
2455 buffer
2456 .snapshot()
2457 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2458 .collect::<Vec<_>>(),
2459 &[
2460 DiagnosticEntry {
2461 range: Point::new(2, 9)..Point::new(2, 12),
2462 diagnostic: Diagnostic {
2463 source: Some("disk".into()),
2464 severity: DiagnosticSeverity::WARNING,
2465 message: "unreachable statement".to_string(),
2466 is_disk_based: true,
2467 group_id: 4,
2468 is_primary: true,
2469 source_kind: DiagnosticSourceKind::Pushed,
2470 ..Diagnostic::default()
2471 }
2472 },
2473 DiagnosticEntry {
2474 range: Point::new(2, 9)..Point::new(2, 10),
2475 diagnostic: Diagnostic {
2476 source: Some("disk".into()),
2477 severity: DiagnosticSeverity::ERROR,
2478 message: "undefined variable 'A'".to_string(),
2479 is_disk_based: true,
2480 group_id: 3,
2481 is_primary: true,
2482 source_kind: DiagnosticSourceKind::Pushed,
2483 ..Diagnostic::default()
2484 },
2485 }
2486 ]
2487 );
2488 assert_eq!(
2489 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2490 [
2491 ("fn a() { ".to_string(), None),
2492 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2493 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2494 ("\n".to_string(), None),
2495 ]
2496 );
2497 assert_eq!(
2498 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2499 [
2500 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2501 ("\n".to_string(), None),
2502 ]
2503 );
2504 });
2505
2506 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2507 // changes since the last save.
2508 buffer.update(cx, |buffer, cx| {
2509 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2510 buffer.edit(
2511 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2512 None,
2513 cx,
2514 );
2515 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2516 });
2517 let change_notification_2 = fake_server
2518 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2519 .await;
2520 assert!(
2521 change_notification_2.text_document.version > change_notification_1.text_document.version
2522 );
2523
2524 // Handle out-of-order diagnostics
2525 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2526 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2527 version: Some(change_notification_2.text_document.version),
2528 diagnostics: vec![
2529 lsp::Diagnostic {
2530 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2531 severity: Some(DiagnosticSeverity::ERROR),
2532 message: "undefined variable 'BB'".to_string(),
2533 source: Some("disk".to_string()),
2534 ..Default::default()
2535 },
2536 lsp::Diagnostic {
2537 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2538 severity: Some(DiagnosticSeverity::WARNING),
2539 message: "undefined variable 'A'".to_string(),
2540 source: Some("disk".to_string()),
2541 ..Default::default()
2542 },
2543 ],
2544 });
2545
2546 cx.executor().run_until_parked();
2547 buffer.update(cx, |buffer, _| {
2548 assert_eq!(
2549 buffer
2550 .snapshot()
2551 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2552 .collect::<Vec<_>>(),
2553 &[
2554 DiagnosticEntry {
2555 range: Point::new(2, 21)..Point::new(2, 22),
2556 diagnostic: Diagnostic {
2557 source: Some("disk".into()),
2558 severity: DiagnosticSeverity::WARNING,
2559 message: "undefined variable 'A'".to_string(),
2560 is_disk_based: true,
2561 group_id: 6,
2562 is_primary: true,
2563 source_kind: DiagnosticSourceKind::Pushed,
2564 ..Diagnostic::default()
2565 }
2566 },
2567 DiagnosticEntry {
2568 range: Point::new(3, 9)..Point::new(3, 14),
2569 diagnostic: Diagnostic {
2570 source: Some("disk".into()),
2571 severity: DiagnosticSeverity::ERROR,
2572 message: "undefined variable 'BB'".to_string(),
2573 is_disk_based: true,
2574 group_id: 5,
2575 is_primary: true,
2576 source_kind: DiagnosticSourceKind::Pushed,
2577 ..Diagnostic::default()
2578 },
2579 }
2580 ]
2581 );
2582 });
2583}
2584
2585#[gpui::test]
2586async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2587 init_test(cx);
2588
2589 let text = concat!(
2590 "let one = ;\n", //
2591 "let two = \n",
2592 "let three = 3;\n",
2593 );
2594
2595 let fs = FakeFs::new(cx.executor());
2596 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2597
2598 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2599 let buffer = project
2600 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2601 .await
2602 .unwrap();
2603
2604 project.update(cx, |project, cx| {
2605 project.lsp_store.update(cx, |lsp_store, cx| {
2606 lsp_store
2607 .update_diagnostic_entries(
2608 LanguageServerId(0),
2609 PathBuf::from("/dir/a.rs"),
2610 None,
2611 None,
2612 vec![
2613 DiagnosticEntry {
2614 range: Unclipped(PointUtf16::new(0, 10))
2615 ..Unclipped(PointUtf16::new(0, 10)),
2616 diagnostic: Diagnostic {
2617 severity: DiagnosticSeverity::ERROR,
2618 message: "syntax error 1".to_string(),
2619 source_kind: DiagnosticSourceKind::Pushed,
2620 ..Diagnostic::default()
2621 },
2622 },
2623 DiagnosticEntry {
2624 range: Unclipped(PointUtf16::new(1, 10))
2625 ..Unclipped(PointUtf16::new(1, 10)),
2626 diagnostic: Diagnostic {
2627 severity: DiagnosticSeverity::ERROR,
2628 message: "syntax error 2".to_string(),
2629 source_kind: DiagnosticSourceKind::Pushed,
2630 ..Diagnostic::default()
2631 },
2632 },
2633 ],
2634 cx,
2635 )
2636 .unwrap();
2637 })
2638 });
2639
2640 // An empty range is extended forward to include the following character.
2641 // At the end of a line, an empty range is extended backward to include
2642 // the preceding character.
2643 buffer.update(cx, |buffer, _| {
2644 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2645 assert_eq!(
2646 chunks
2647 .iter()
2648 .map(|(s, d)| (s.as_str(), *d))
2649 .collect::<Vec<_>>(),
2650 &[
2651 ("let one = ", None),
2652 (";", Some(DiagnosticSeverity::ERROR)),
2653 ("\nlet two =", None),
2654 (" ", Some(DiagnosticSeverity::ERROR)),
2655 ("\nlet three = 3;\n", None)
2656 ]
2657 );
2658 });
2659}
2660
2661#[gpui::test]
2662async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2663 init_test(cx);
2664
2665 let fs = FakeFs::new(cx.executor());
2666 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2667 .await;
2668
2669 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2670 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2671
2672 lsp_store.update(cx, |lsp_store, cx| {
2673 lsp_store
2674 .update_diagnostic_entries(
2675 LanguageServerId(0),
2676 Path::new("/dir/a.rs").to_owned(),
2677 None,
2678 None,
2679 vec![DiagnosticEntry {
2680 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2681 diagnostic: Diagnostic {
2682 severity: DiagnosticSeverity::ERROR,
2683 is_primary: true,
2684 message: "syntax error a1".to_string(),
2685 source_kind: DiagnosticSourceKind::Pushed,
2686 ..Diagnostic::default()
2687 },
2688 }],
2689 cx,
2690 )
2691 .unwrap();
2692 lsp_store
2693 .update_diagnostic_entries(
2694 LanguageServerId(1),
2695 Path::new("/dir/a.rs").to_owned(),
2696 None,
2697 None,
2698 vec![DiagnosticEntry {
2699 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2700 diagnostic: Diagnostic {
2701 severity: DiagnosticSeverity::ERROR,
2702 is_primary: true,
2703 message: "syntax error b1".to_string(),
2704 source_kind: DiagnosticSourceKind::Pushed,
2705 ..Diagnostic::default()
2706 },
2707 }],
2708 cx,
2709 )
2710 .unwrap();
2711
2712 assert_eq!(
2713 lsp_store.diagnostic_summary(false, cx),
2714 DiagnosticSummary {
2715 error_count: 2,
2716 warning_count: 0,
2717 }
2718 );
2719 });
2720}
2721
2722#[gpui::test]
2723async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2724 init_test(cx);
2725
2726 let text = "
2727 fn a() {
2728 f1();
2729 }
2730 fn b() {
2731 f2();
2732 }
2733 fn c() {
2734 f3();
2735 }
2736 "
2737 .unindent();
2738
2739 let fs = FakeFs::new(cx.executor());
2740 fs.insert_tree(
2741 path!("/dir"),
2742 json!({
2743 "a.rs": text.clone(),
2744 }),
2745 )
2746 .await;
2747
2748 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2749 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2750
2751 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2752 language_registry.add(rust_lang());
2753 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2754
2755 let (buffer, _handle) = project
2756 .update(cx, |project, cx| {
2757 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2758 })
2759 .await
2760 .unwrap();
2761
2762 let mut fake_server = fake_servers.next().await.unwrap();
2763 let lsp_document_version = fake_server
2764 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2765 .await
2766 .text_document
2767 .version;
2768
2769 // Simulate editing the buffer after the language server computes some edits.
2770 buffer.update(cx, |buffer, cx| {
2771 buffer.edit(
2772 [(
2773 Point::new(0, 0)..Point::new(0, 0),
2774 "// above first function\n",
2775 )],
2776 None,
2777 cx,
2778 );
2779 buffer.edit(
2780 [(
2781 Point::new(2, 0)..Point::new(2, 0),
2782 " // inside first function\n",
2783 )],
2784 None,
2785 cx,
2786 );
2787 buffer.edit(
2788 [(
2789 Point::new(6, 4)..Point::new(6, 4),
2790 "// inside second function ",
2791 )],
2792 None,
2793 cx,
2794 );
2795
2796 assert_eq!(
2797 buffer.text(),
2798 "
2799 // above first function
2800 fn a() {
2801 // inside first function
2802 f1();
2803 }
2804 fn b() {
2805 // inside second function f2();
2806 }
2807 fn c() {
2808 f3();
2809 }
2810 "
2811 .unindent()
2812 );
2813 });
2814
2815 let edits = lsp_store
2816 .update(cx, |lsp_store, cx| {
2817 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2818 &buffer,
2819 vec![
2820 // replace body of first function
2821 lsp::TextEdit {
2822 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2823 new_text: "
2824 fn a() {
2825 f10();
2826 }
2827 "
2828 .unindent(),
2829 },
2830 // edit inside second function
2831 lsp::TextEdit {
2832 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2833 new_text: "00".into(),
2834 },
2835 // edit inside third function via two distinct edits
2836 lsp::TextEdit {
2837 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2838 new_text: "4000".into(),
2839 },
2840 lsp::TextEdit {
2841 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2842 new_text: "".into(),
2843 },
2844 ],
2845 LanguageServerId(0),
2846 Some(lsp_document_version),
2847 cx,
2848 )
2849 })
2850 .await
2851 .unwrap();
2852
2853 buffer.update(cx, |buffer, cx| {
2854 for (range, new_text) in edits {
2855 buffer.edit([(range, new_text)], None, cx);
2856 }
2857 assert_eq!(
2858 buffer.text(),
2859 "
2860 // above first function
2861 fn a() {
2862 // inside first function
2863 f10();
2864 }
2865 fn b() {
2866 // inside second function f200();
2867 }
2868 fn c() {
2869 f4000();
2870 }
2871 "
2872 .unindent()
2873 );
2874 });
2875}
2876
2877#[gpui::test]
2878async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2879 init_test(cx);
2880
2881 let text = "
2882 use a::b;
2883 use a::c;
2884
2885 fn f() {
2886 b();
2887 c();
2888 }
2889 "
2890 .unindent();
2891
2892 let fs = FakeFs::new(cx.executor());
2893 fs.insert_tree(
2894 path!("/dir"),
2895 json!({
2896 "a.rs": text.clone(),
2897 }),
2898 )
2899 .await;
2900
2901 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2902 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2903 let buffer = project
2904 .update(cx, |project, cx| {
2905 project.open_local_buffer(path!("/dir/a.rs"), cx)
2906 })
2907 .await
2908 .unwrap();
2909
2910 // Simulate the language server sending us a small edit in the form of a very large diff.
2911 // Rust-analyzer does this when performing a merge-imports code action.
2912 let edits = lsp_store
2913 .update(cx, |lsp_store, cx| {
2914 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2915 &buffer,
2916 [
2917 // Replace the first use statement without editing the semicolon.
2918 lsp::TextEdit {
2919 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2920 new_text: "a::{b, c}".into(),
2921 },
2922 // Reinsert the remainder of the file between the semicolon and the final
2923 // newline of the file.
2924 lsp::TextEdit {
2925 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2926 new_text: "\n\n".into(),
2927 },
2928 lsp::TextEdit {
2929 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2930 new_text: "
2931 fn f() {
2932 b();
2933 c();
2934 }"
2935 .unindent(),
2936 },
2937 // Delete everything after the first newline of the file.
2938 lsp::TextEdit {
2939 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2940 new_text: "".into(),
2941 },
2942 ],
2943 LanguageServerId(0),
2944 None,
2945 cx,
2946 )
2947 })
2948 .await
2949 .unwrap();
2950
2951 buffer.update(cx, |buffer, cx| {
2952 let edits = edits
2953 .into_iter()
2954 .map(|(range, text)| {
2955 (
2956 range.start.to_point(buffer)..range.end.to_point(buffer),
2957 text,
2958 )
2959 })
2960 .collect::<Vec<_>>();
2961
2962 assert_eq!(
2963 edits,
2964 [
2965 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2966 (Point::new(1, 0)..Point::new(2, 0), "".into())
2967 ]
2968 );
2969
2970 for (range, new_text) in edits {
2971 buffer.edit([(range, new_text)], None, cx);
2972 }
2973 assert_eq!(
2974 buffer.text(),
2975 "
2976 use a::{b, c};
2977
2978 fn f() {
2979 b();
2980 c();
2981 }
2982 "
2983 .unindent()
2984 );
2985 });
2986}
2987
2988#[gpui::test]
2989async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2990 cx: &mut gpui::TestAppContext,
2991) {
2992 init_test(cx);
2993
2994 let text = "Path()";
2995
2996 let fs = FakeFs::new(cx.executor());
2997 fs.insert_tree(
2998 path!("/dir"),
2999 json!({
3000 "a.rs": text
3001 }),
3002 )
3003 .await;
3004
3005 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3006 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3007 let buffer = project
3008 .update(cx, |project, cx| {
3009 project.open_local_buffer(path!("/dir/a.rs"), cx)
3010 })
3011 .await
3012 .unwrap();
3013
3014 // Simulate the language server sending us a pair of edits at the same location,
3015 // with an insertion following a replacement (which violates the LSP spec).
3016 let edits = lsp_store
3017 .update(cx, |lsp_store, cx| {
3018 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3019 &buffer,
3020 [
3021 lsp::TextEdit {
3022 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3023 new_text: "Path".into(),
3024 },
3025 lsp::TextEdit {
3026 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3027 new_text: "from path import Path\n\n\n".into(),
3028 },
3029 ],
3030 LanguageServerId(0),
3031 None,
3032 cx,
3033 )
3034 })
3035 .await
3036 .unwrap();
3037
3038 buffer.update(cx, |buffer, cx| {
3039 buffer.edit(edits, None, cx);
3040 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3041 });
3042}
3043
3044#[gpui::test]
3045async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3046 init_test(cx);
3047
3048 let text = "
3049 use a::b;
3050 use a::c;
3051
3052 fn f() {
3053 b();
3054 c();
3055 }
3056 "
3057 .unindent();
3058
3059 let fs = FakeFs::new(cx.executor());
3060 fs.insert_tree(
3061 path!("/dir"),
3062 json!({
3063 "a.rs": text.clone(),
3064 }),
3065 )
3066 .await;
3067
3068 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3069 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3070 let buffer = project
3071 .update(cx, |project, cx| {
3072 project.open_local_buffer(path!("/dir/a.rs"), cx)
3073 })
3074 .await
3075 .unwrap();
3076
3077 // Simulate the language server sending us edits in a non-ordered fashion,
3078 // with ranges sometimes being inverted or pointing to invalid locations.
3079 let edits = lsp_store
3080 .update(cx, |lsp_store, cx| {
3081 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3082 &buffer,
3083 [
3084 lsp::TextEdit {
3085 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3086 new_text: "\n\n".into(),
3087 },
3088 lsp::TextEdit {
3089 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3090 new_text: "a::{b, c}".into(),
3091 },
3092 lsp::TextEdit {
3093 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3094 new_text: "".into(),
3095 },
3096 lsp::TextEdit {
3097 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3098 new_text: "
3099 fn f() {
3100 b();
3101 c();
3102 }"
3103 .unindent(),
3104 },
3105 ],
3106 LanguageServerId(0),
3107 None,
3108 cx,
3109 )
3110 })
3111 .await
3112 .unwrap();
3113
3114 buffer.update(cx, |buffer, cx| {
3115 let edits = edits
3116 .into_iter()
3117 .map(|(range, text)| {
3118 (
3119 range.start.to_point(buffer)..range.end.to_point(buffer),
3120 text,
3121 )
3122 })
3123 .collect::<Vec<_>>();
3124
3125 assert_eq!(
3126 edits,
3127 [
3128 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3129 (Point::new(1, 0)..Point::new(2, 0), "".into())
3130 ]
3131 );
3132
3133 for (range, new_text) in edits {
3134 buffer.edit([(range, new_text)], None, cx);
3135 }
3136 assert_eq!(
3137 buffer.text(),
3138 "
3139 use a::{b, c};
3140
3141 fn f() {
3142 b();
3143 c();
3144 }
3145 "
3146 .unindent()
3147 );
3148 });
3149}
3150
3151fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3152 buffer: &Buffer,
3153 range: Range<T>,
3154) -> Vec<(String, Option<DiagnosticSeverity>)> {
3155 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3156 for chunk in buffer.snapshot().chunks(range, true) {
3157 if chunks
3158 .last()
3159 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3160 {
3161 chunks.last_mut().unwrap().0.push_str(chunk.text);
3162 } else {
3163 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3164 }
3165 }
3166 chunks
3167}
3168
3169#[gpui::test(iterations = 10)]
3170async fn test_definition(cx: &mut gpui::TestAppContext) {
3171 init_test(cx);
3172
3173 let fs = FakeFs::new(cx.executor());
3174 fs.insert_tree(
3175 path!("/dir"),
3176 json!({
3177 "a.rs": "const fn a() { A }",
3178 "b.rs": "const y: i32 = crate::a()",
3179 }),
3180 )
3181 .await;
3182
3183 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3184
3185 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3186 language_registry.add(rust_lang());
3187 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3188
3189 let (buffer, _handle) = project
3190 .update(cx, |project, cx| {
3191 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3192 })
3193 .await
3194 .unwrap();
3195
3196 let fake_server = fake_servers.next().await.unwrap();
3197 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3198 let params = params.text_document_position_params;
3199 assert_eq!(
3200 params.text_document.uri.to_file_path().unwrap(),
3201 Path::new(path!("/dir/b.rs")),
3202 );
3203 assert_eq!(params.position, lsp::Position::new(0, 22));
3204
3205 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3206 lsp::Location::new(
3207 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3208 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3209 ),
3210 )))
3211 });
3212 let mut definitions = project
3213 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3214 .await
3215 .unwrap()
3216 .unwrap();
3217
3218 // Assert no new language server started
3219 cx.executor().run_until_parked();
3220 assert!(fake_servers.try_next().is_err());
3221
3222 assert_eq!(definitions.len(), 1);
3223 let definition = definitions.pop().unwrap();
3224 cx.update(|cx| {
3225 let target_buffer = definition.target.buffer.read(cx);
3226 assert_eq!(
3227 target_buffer
3228 .file()
3229 .unwrap()
3230 .as_local()
3231 .unwrap()
3232 .abs_path(cx),
3233 Path::new(path!("/dir/a.rs")),
3234 );
3235 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3236 assert_eq!(
3237 list_worktrees(&project, cx),
3238 [
3239 (path!("/dir/a.rs").as_ref(), false),
3240 (path!("/dir/b.rs").as_ref(), true)
3241 ],
3242 );
3243
3244 drop(definition);
3245 });
3246 cx.update(|cx| {
3247 assert_eq!(
3248 list_worktrees(&project, cx),
3249 [(path!("/dir/b.rs").as_ref(), true)]
3250 );
3251 });
3252
3253 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3254 project
3255 .read(cx)
3256 .worktrees(cx)
3257 .map(|worktree| {
3258 let worktree = worktree.read(cx);
3259 (
3260 worktree.as_local().unwrap().abs_path().as_ref(),
3261 worktree.is_visible(),
3262 )
3263 })
3264 .collect::<Vec<_>>()
3265 }
3266}
3267
3268#[gpui::test]
3269async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3270 init_test(cx);
3271
3272 let fs = FakeFs::new(cx.executor());
3273 fs.insert_tree(
3274 path!("/dir"),
3275 json!({
3276 "a.ts": "",
3277 }),
3278 )
3279 .await;
3280
3281 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3282
3283 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3284 language_registry.add(typescript_lang());
3285 let mut fake_language_servers = language_registry.register_fake_lsp(
3286 "TypeScript",
3287 FakeLspAdapter {
3288 capabilities: lsp::ServerCapabilities {
3289 completion_provider: Some(lsp::CompletionOptions {
3290 trigger_characters: Some(vec![".".to_string()]),
3291 ..Default::default()
3292 }),
3293 ..Default::default()
3294 },
3295 ..Default::default()
3296 },
3297 );
3298
3299 let (buffer, _handle) = project
3300 .update(cx, |p, cx| {
3301 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3302 })
3303 .await
3304 .unwrap();
3305
3306 let fake_server = fake_language_servers.next().await.unwrap();
3307
3308 // When text_edit exists, it takes precedence over insert_text and label
3309 let text = "let a = obj.fqn";
3310 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3311 let completions = project.update(cx, |project, cx| {
3312 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3313 });
3314
3315 fake_server
3316 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3317 Ok(Some(lsp::CompletionResponse::Array(vec![
3318 lsp::CompletionItem {
3319 label: "labelText".into(),
3320 insert_text: Some("insertText".into()),
3321 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3322 range: lsp::Range::new(
3323 lsp::Position::new(0, text.len() as u32 - 3),
3324 lsp::Position::new(0, text.len() as u32),
3325 ),
3326 new_text: "textEditText".into(),
3327 })),
3328 ..Default::default()
3329 },
3330 ])))
3331 })
3332 .next()
3333 .await;
3334
3335 let completions = completions
3336 .await
3337 .unwrap()
3338 .into_iter()
3339 .flat_map(|response| response.completions)
3340 .collect::<Vec<_>>();
3341 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3342
3343 assert_eq!(completions.len(), 1);
3344 assert_eq!(completions[0].new_text, "textEditText");
3345 assert_eq!(
3346 completions[0].replace_range.to_offset(&snapshot),
3347 text.len() - 3..text.len()
3348 );
3349}
3350
3351#[gpui::test]
3352async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3353 init_test(cx);
3354
3355 let fs = FakeFs::new(cx.executor());
3356 fs.insert_tree(
3357 path!("/dir"),
3358 json!({
3359 "a.ts": "",
3360 }),
3361 )
3362 .await;
3363
3364 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3365
3366 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3367 language_registry.add(typescript_lang());
3368 let mut fake_language_servers = language_registry.register_fake_lsp(
3369 "TypeScript",
3370 FakeLspAdapter {
3371 capabilities: lsp::ServerCapabilities {
3372 completion_provider: Some(lsp::CompletionOptions {
3373 trigger_characters: Some(vec![".".to_string()]),
3374 ..Default::default()
3375 }),
3376 ..Default::default()
3377 },
3378 ..Default::default()
3379 },
3380 );
3381
3382 let (buffer, _handle) = project
3383 .update(cx, |p, cx| {
3384 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3385 })
3386 .await
3387 .unwrap();
3388
3389 let fake_server = fake_language_servers.next().await.unwrap();
3390 let text = "let a = obj.fqn";
3391
3392 // Test 1: When text_edit is None but insert_text exists with default edit_range
3393 {
3394 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3395 let completions = project.update(cx, |project, cx| {
3396 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3397 });
3398
3399 fake_server
3400 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3401 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3402 is_incomplete: false,
3403 item_defaults: Some(lsp::CompletionListItemDefaults {
3404 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3405 lsp::Range::new(
3406 lsp::Position::new(0, text.len() as u32 - 3),
3407 lsp::Position::new(0, text.len() as u32),
3408 ),
3409 )),
3410 ..Default::default()
3411 }),
3412 items: vec![lsp::CompletionItem {
3413 label: "labelText".into(),
3414 insert_text: Some("insertText".into()),
3415 text_edit: None,
3416 ..Default::default()
3417 }],
3418 })))
3419 })
3420 .next()
3421 .await;
3422
3423 let completions = completions
3424 .await
3425 .unwrap()
3426 .into_iter()
3427 .flat_map(|response| response.completions)
3428 .collect::<Vec<_>>();
3429 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3430
3431 assert_eq!(completions.len(), 1);
3432 assert_eq!(completions[0].new_text, "insertText");
3433 assert_eq!(
3434 completions[0].replace_range.to_offset(&snapshot),
3435 text.len() - 3..text.len()
3436 );
3437 }
3438
3439 // Test 2: When both text_edit and insert_text are None with default edit_range
3440 {
3441 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3442 let completions = project.update(cx, |project, cx| {
3443 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3444 });
3445
3446 fake_server
3447 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3448 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3449 is_incomplete: false,
3450 item_defaults: Some(lsp::CompletionListItemDefaults {
3451 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3452 lsp::Range::new(
3453 lsp::Position::new(0, text.len() as u32 - 3),
3454 lsp::Position::new(0, text.len() as u32),
3455 ),
3456 )),
3457 ..Default::default()
3458 }),
3459 items: vec![lsp::CompletionItem {
3460 label: "labelText".into(),
3461 insert_text: None,
3462 text_edit: None,
3463 ..Default::default()
3464 }],
3465 })))
3466 })
3467 .next()
3468 .await;
3469
3470 let completions = completions
3471 .await
3472 .unwrap()
3473 .into_iter()
3474 .flat_map(|response| response.completions)
3475 .collect::<Vec<_>>();
3476 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3477
3478 assert_eq!(completions.len(), 1);
3479 assert_eq!(completions[0].new_text, "labelText");
3480 assert_eq!(
3481 completions[0].replace_range.to_offset(&snapshot),
3482 text.len() - 3..text.len()
3483 );
3484 }
3485}
3486
3487#[gpui::test]
3488async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3489 init_test(cx);
3490
3491 let fs = FakeFs::new(cx.executor());
3492 fs.insert_tree(
3493 path!("/dir"),
3494 json!({
3495 "a.ts": "",
3496 }),
3497 )
3498 .await;
3499
3500 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3501
3502 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3503 language_registry.add(typescript_lang());
3504 let mut fake_language_servers = language_registry.register_fake_lsp(
3505 "TypeScript",
3506 FakeLspAdapter {
3507 capabilities: lsp::ServerCapabilities {
3508 completion_provider: Some(lsp::CompletionOptions {
3509 trigger_characters: Some(vec![":".to_string()]),
3510 ..Default::default()
3511 }),
3512 ..Default::default()
3513 },
3514 ..Default::default()
3515 },
3516 );
3517
3518 let (buffer, _handle) = project
3519 .update(cx, |p, cx| {
3520 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3521 })
3522 .await
3523 .unwrap();
3524
3525 let fake_server = fake_language_servers.next().await.unwrap();
3526
3527 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3528 let text = "let a = b.fqn";
3529 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3530 let completions = project.update(cx, |project, cx| {
3531 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3532 });
3533
3534 fake_server
3535 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3536 Ok(Some(lsp::CompletionResponse::Array(vec![
3537 lsp::CompletionItem {
3538 label: "fullyQualifiedName?".into(),
3539 insert_text: Some("fullyQualifiedName".into()),
3540 ..Default::default()
3541 },
3542 ])))
3543 })
3544 .next()
3545 .await;
3546 let completions = completions
3547 .await
3548 .unwrap()
3549 .into_iter()
3550 .flat_map(|response| response.completions)
3551 .collect::<Vec<_>>();
3552 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3553 assert_eq!(completions.len(), 1);
3554 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3555 assert_eq!(
3556 completions[0].replace_range.to_offset(&snapshot),
3557 text.len() - 3..text.len()
3558 );
3559
3560 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3561 let text = "let a = \"atoms/cmp\"";
3562 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3563 let completions = project.update(cx, |project, cx| {
3564 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3565 });
3566
3567 fake_server
3568 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3569 Ok(Some(lsp::CompletionResponse::Array(vec![
3570 lsp::CompletionItem {
3571 label: "component".into(),
3572 ..Default::default()
3573 },
3574 ])))
3575 })
3576 .next()
3577 .await;
3578 let completions = completions
3579 .await
3580 .unwrap()
3581 .into_iter()
3582 .flat_map(|response| response.completions)
3583 .collect::<Vec<_>>();
3584 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3585 assert_eq!(completions.len(), 1);
3586 assert_eq!(completions[0].new_text, "component");
3587 assert_eq!(
3588 completions[0].replace_range.to_offset(&snapshot),
3589 text.len() - 4..text.len() - 1
3590 );
3591}
3592
3593#[gpui::test]
3594async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3595 init_test(cx);
3596
3597 let fs = FakeFs::new(cx.executor());
3598 fs.insert_tree(
3599 path!("/dir"),
3600 json!({
3601 "a.ts": "",
3602 }),
3603 )
3604 .await;
3605
3606 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3607
3608 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3609 language_registry.add(typescript_lang());
3610 let mut fake_language_servers = language_registry.register_fake_lsp(
3611 "TypeScript",
3612 FakeLspAdapter {
3613 capabilities: lsp::ServerCapabilities {
3614 completion_provider: Some(lsp::CompletionOptions {
3615 trigger_characters: Some(vec![":".to_string()]),
3616 ..Default::default()
3617 }),
3618 ..Default::default()
3619 },
3620 ..Default::default()
3621 },
3622 );
3623
3624 let (buffer, _handle) = project
3625 .update(cx, |p, cx| {
3626 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3627 })
3628 .await
3629 .unwrap();
3630
3631 let fake_server = fake_language_servers.next().await.unwrap();
3632
3633 let text = "let a = b.fqn";
3634 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3635 let completions = project.update(cx, |project, cx| {
3636 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3637 });
3638
3639 fake_server
3640 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3641 Ok(Some(lsp::CompletionResponse::Array(vec![
3642 lsp::CompletionItem {
3643 label: "fullyQualifiedName?".into(),
3644 insert_text: Some("fully\rQualified\r\nName".into()),
3645 ..Default::default()
3646 },
3647 ])))
3648 })
3649 .next()
3650 .await;
3651 let completions = completions
3652 .await
3653 .unwrap()
3654 .into_iter()
3655 .flat_map(|response| response.completions)
3656 .collect::<Vec<_>>();
3657 assert_eq!(completions.len(), 1);
3658 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3659}
3660
3661#[gpui::test(iterations = 10)]
3662async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3663 init_test(cx);
3664
3665 let fs = FakeFs::new(cx.executor());
3666 fs.insert_tree(
3667 path!("/dir"),
3668 json!({
3669 "a.ts": "a",
3670 }),
3671 )
3672 .await;
3673
3674 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3675
3676 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3677 language_registry.add(typescript_lang());
3678 let mut fake_language_servers = language_registry.register_fake_lsp(
3679 "TypeScript",
3680 FakeLspAdapter {
3681 capabilities: lsp::ServerCapabilities {
3682 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3683 lsp::CodeActionOptions {
3684 resolve_provider: Some(true),
3685 ..lsp::CodeActionOptions::default()
3686 },
3687 )),
3688 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3689 commands: vec!["_the/command".to_string()],
3690 ..lsp::ExecuteCommandOptions::default()
3691 }),
3692 ..lsp::ServerCapabilities::default()
3693 },
3694 ..FakeLspAdapter::default()
3695 },
3696 );
3697
3698 let (buffer, _handle) = project
3699 .update(cx, |p, cx| {
3700 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3701 })
3702 .await
3703 .unwrap();
3704
3705 let fake_server = fake_language_servers.next().await.unwrap();
3706
3707 // Language server returns code actions that contain commands, and not edits.
3708 let actions = project.update(cx, |project, cx| {
3709 project.code_actions(&buffer, 0..0, None, cx)
3710 });
3711 fake_server
3712 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3713 Ok(Some(vec![
3714 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3715 title: "The code action".into(),
3716 data: Some(serde_json::json!({
3717 "command": "_the/command",
3718 })),
3719 ..lsp::CodeAction::default()
3720 }),
3721 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3722 title: "two".into(),
3723 ..lsp::CodeAction::default()
3724 }),
3725 ]))
3726 })
3727 .next()
3728 .await;
3729
3730 let action = actions.await.unwrap().unwrap()[0].clone();
3731 let apply = project.update(cx, |project, cx| {
3732 project.apply_code_action(buffer.clone(), action, true, cx)
3733 });
3734
3735 // Resolving the code action does not populate its edits. In absence of
3736 // edits, we must execute the given command.
3737 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3738 |mut action, _| async move {
3739 if action.data.is_some() {
3740 action.command = Some(lsp::Command {
3741 title: "The command".into(),
3742 command: "_the/command".into(),
3743 arguments: Some(vec![json!("the-argument")]),
3744 });
3745 }
3746 Ok(action)
3747 },
3748 );
3749
3750 // While executing the command, the language server sends the editor
3751 // a `workspaceEdit` request.
3752 fake_server
3753 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3754 let fake = fake_server.clone();
3755 move |params, _| {
3756 assert_eq!(params.command, "_the/command");
3757 let fake = fake.clone();
3758 async move {
3759 fake.server
3760 .request::<lsp::request::ApplyWorkspaceEdit>(
3761 lsp::ApplyWorkspaceEditParams {
3762 label: None,
3763 edit: lsp::WorkspaceEdit {
3764 changes: Some(
3765 [(
3766 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3767 vec![lsp::TextEdit {
3768 range: lsp::Range::new(
3769 lsp::Position::new(0, 0),
3770 lsp::Position::new(0, 0),
3771 ),
3772 new_text: "X".into(),
3773 }],
3774 )]
3775 .into_iter()
3776 .collect(),
3777 ),
3778 ..Default::default()
3779 },
3780 },
3781 )
3782 .await
3783 .into_response()
3784 .unwrap();
3785 Ok(Some(json!(null)))
3786 }
3787 }
3788 })
3789 .next()
3790 .await;
3791
3792 // Applying the code action returns a project transaction containing the edits
3793 // sent by the language server in its `workspaceEdit` request.
3794 let transaction = apply.await.unwrap();
3795 assert!(transaction.0.contains_key(&buffer));
3796 buffer.update(cx, |buffer, cx| {
3797 assert_eq!(buffer.text(), "Xa");
3798 buffer.undo(cx);
3799 assert_eq!(buffer.text(), "a");
3800 });
3801}
3802
3803#[gpui::test]
3804async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3805 init_test(cx);
3806 let fs = FakeFs::new(cx.background_executor.clone());
3807 let expected_contents = "content";
3808 fs.as_fake()
3809 .insert_tree(
3810 "/root",
3811 json!({
3812 "test.txt": expected_contents
3813 }),
3814 )
3815 .await;
3816
3817 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3818
3819 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3820 let worktree = project.worktrees(cx).next().unwrap();
3821 let entry_id = worktree
3822 .read(cx)
3823 .entry_for_path(rel_path("test.txt"))
3824 .unwrap()
3825 .id;
3826 (worktree, entry_id)
3827 });
3828 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3829 let _result = project
3830 .update(cx, |project, cx| {
3831 project.rename_entry(
3832 entry_id,
3833 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3834 cx,
3835 )
3836 })
3837 .await
3838 .unwrap();
3839 worktree.read_with(cx, |worktree, _| {
3840 assert!(
3841 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3842 "Old file should have been removed"
3843 );
3844 assert!(
3845 worktree
3846 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3847 .is_some(),
3848 "Whole directory hierarchy and the new file should have been created"
3849 );
3850 });
3851 assert_eq!(
3852 worktree
3853 .update(cx, |worktree, cx| {
3854 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3855 })
3856 .await
3857 .unwrap()
3858 .text,
3859 expected_contents,
3860 "Moved file's contents should be preserved"
3861 );
3862
3863 let entry_id = worktree.read_with(cx, |worktree, _| {
3864 worktree
3865 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3866 .unwrap()
3867 .id
3868 });
3869
3870 let _result = project
3871 .update(cx, |project, cx| {
3872 project.rename_entry(
3873 entry_id,
3874 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3875 cx,
3876 )
3877 })
3878 .await
3879 .unwrap();
3880 worktree.read_with(cx, |worktree, _| {
3881 assert!(
3882 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3883 "First file should not reappear"
3884 );
3885 assert!(
3886 worktree
3887 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3888 .is_none(),
3889 "Old file should have been removed"
3890 );
3891 assert!(
3892 worktree
3893 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3894 .is_some(),
3895 "No error should have occurred after moving into existing directory"
3896 );
3897 });
3898 assert_eq!(
3899 worktree
3900 .update(cx, |worktree, cx| {
3901 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3902 })
3903 .await
3904 .unwrap()
3905 .text,
3906 expected_contents,
3907 "Moved file's contents should be preserved"
3908 );
3909}
3910
3911#[gpui::test(iterations = 10)]
3912async fn test_save_file(cx: &mut gpui::TestAppContext) {
3913 init_test(cx);
3914
3915 let fs = FakeFs::new(cx.executor());
3916 fs.insert_tree(
3917 path!("/dir"),
3918 json!({
3919 "file1": "the old contents",
3920 }),
3921 )
3922 .await;
3923
3924 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3925 let buffer = project
3926 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3927 .await
3928 .unwrap();
3929 buffer.update(cx, |buffer, cx| {
3930 assert_eq!(buffer.text(), "the old contents");
3931 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3932 });
3933
3934 project
3935 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3936 .await
3937 .unwrap();
3938
3939 let new_text = fs
3940 .load(Path::new(path!("/dir/file1")))
3941 .await
3942 .unwrap()
3943 .replace("\r\n", "\n");
3944 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3945}
3946
3947#[gpui::test(iterations = 10)]
3948async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3949 // Issue: #24349
3950 init_test(cx);
3951
3952 let fs = FakeFs::new(cx.executor());
3953 fs.insert_tree(path!("/dir"), json!({})).await;
3954
3955 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3956 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3957
3958 language_registry.add(rust_lang());
3959 let mut fake_rust_servers = language_registry.register_fake_lsp(
3960 "Rust",
3961 FakeLspAdapter {
3962 name: "the-rust-language-server",
3963 capabilities: lsp::ServerCapabilities {
3964 completion_provider: Some(lsp::CompletionOptions {
3965 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3966 ..Default::default()
3967 }),
3968 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3969 lsp::TextDocumentSyncOptions {
3970 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3971 ..Default::default()
3972 },
3973 )),
3974 ..Default::default()
3975 },
3976 ..Default::default()
3977 },
3978 );
3979
3980 let buffer = project
3981 .update(cx, |this, cx| this.create_buffer(false, cx))
3982 .unwrap()
3983 .await;
3984 project.update(cx, |this, cx| {
3985 this.register_buffer_with_language_servers(&buffer, cx);
3986 buffer.update(cx, |buffer, cx| {
3987 assert!(!this.has_language_servers_for(buffer, cx));
3988 })
3989 });
3990
3991 project
3992 .update(cx, |this, cx| {
3993 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3994 this.save_buffer_as(
3995 buffer.clone(),
3996 ProjectPath {
3997 worktree_id,
3998 path: rel_path("file.rs").into(),
3999 },
4000 cx,
4001 )
4002 })
4003 .await
4004 .unwrap();
4005 // A server is started up, and it is notified about Rust files.
4006 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4007 assert_eq!(
4008 fake_rust_server
4009 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4010 .await
4011 .text_document,
4012 lsp::TextDocumentItem {
4013 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4014 version: 0,
4015 text: "".to_string(),
4016 language_id: "rust".to_string(),
4017 }
4018 );
4019
4020 project.update(cx, |this, cx| {
4021 buffer.update(cx, |buffer, cx| {
4022 assert!(this.has_language_servers_for(buffer, cx));
4023 })
4024 });
4025}
4026
4027#[gpui::test(iterations = 30)]
4028async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4029 init_test(cx);
4030
4031 let fs = FakeFs::new(cx.executor());
4032 fs.insert_tree(
4033 path!("/dir"),
4034 json!({
4035 "file1": "the original contents",
4036 }),
4037 )
4038 .await;
4039
4040 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4041 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4042 let buffer = project
4043 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4044 .await
4045 .unwrap();
4046
4047 // Simulate buffer diffs being slow, so that they don't complete before
4048 // the next file change occurs.
4049 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4050
4051 // Change the buffer's file on disk, and then wait for the file change
4052 // to be detected by the worktree, so that the buffer starts reloading.
4053 fs.save(
4054 path!("/dir/file1").as_ref(),
4055 &"the first contents".into(),
4056 Default::default(),
4057 )
4058 .await
4059 .unwrap();
4060 worktree.next_event(cx).await;
4061
4062 // Change the buffer's file again. Depending on the random seed, the
4063 // previous file change may still be in progress.
4064 fs.save(
4065 path!("/dir/file1").as_ref(),
4066 &"the second contents".into(),
4067 Default::default(),
4068 )
4069 .await
4070 .unwrap();
4071 worktree.next_event(cx).await;
4072
4073 cx.executor().run_until_parked();
4074 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4075 buffer.read_with(cx, |buffer, _| {
4076 assert_eq!(buffer.text(), on_disk_text);
4077 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4078 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4079 });
4080}
4081
4082#[gpui::test(iterations = 30)]
4083async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4084 init_test(cx);
4085
4086 let fs = FakeFs::new(cx.executor());
4087 fs.insert_tree(
4088 path!("/dir"),
4089 json!({
4090 "file1": "the original contents",
4091 }),
4092 )
4093 .await;
4094
4095 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4096 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4097 let buffer = project
4098 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4099 .await
4100 .unwrap();
4101
4102 // Simulate buffer diffs being slow, so that they don't complete before
4103 // the next file change occurs.
4104 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4105
4106 // Change the buffer's file on disk, and then wait for the file change
4107 // to be detected by the worktree, so that the buffer starts reloading.
4108 fs.save(
4109 path!("/dir/file1").as_ref(),
4110 &"the first contents".into(),
4111 Default::default(),
4112 )
4113 .await
4114 .unwrap();
4115 worktree.next_event(cx).await;
4116
4117 cx.executor()
4118 .spawn(cx.executor().simulate_random_delay())
4119 .await;
4120
4121 // Perform a noop edit, causing the buffer's version to increase.
4122 buffer.update(cx, |buffer, cx| {
4123 buffer.edit([(0..0, " ")], None, cx);
4124 buffer.undo(cx);
4125 });
4126
4127 cx.executor().run_until_parked();
4128 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4129 buffer.read_with(cx, |buffer, _| {
4130 let buffer_text = buffer.text();
4131 if buffer_text == on_disk_text {
4132 assert!(
4133 !buffer.is_dirty() && !buffer.has_conflict(),
4134 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4135 );
4136 }
4137 // If the file change occurred while the buffer was processing the first
4138 // change, the buffer will be in a conflicting state.
4139 else {
4140 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4141 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4142 }
4143 });
4144}
4145
4146#[gpui::test]
4147async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4148 init_test(cx);
4149
4150 let fs = FakeFs::new(cx.executor());
4151 fs.insert_tree(
4152 path!("/dir"),
4153 json!({
4154 "file1": "the old contents",
4155 }),
4156 )
4157 .await;
4158
4159 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4160 let buffer = project
4161 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4162 .await
4163 .unwrap();
4164 buffer.update(cx, |buffer, cx| {
4165 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4166 });
4167
4168 project
4169 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4170 .await
4171 .unwrap();
4172
4173 let new_text = fs
4174 .load(Path::new(path!("/dir/file1")))
4175 .await
4176 .unwrap()
4177 .replace("\r\n", "\n");
4178 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4179}
4180
4181#[gpui::test]
4182async fn test_save_as(cx: &mut gpui::TestAppContext) {
4183 init_test(cx);
4184
4185 let fs = FakeFs::new(cx.executor());
4186 fs.insert_tree("/dir", json!({})).await;
4187
4188 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4189
4190 let languages = project.update(cx, |project, _| project.languages().clone());
4191 languages.add(rust_lang());
4192
4193 let buffer = project.update(cx, |project, cx| {
4194 project.create_local_buffer("", None, false, cx)
4195 });
4196 buffer.update(cx, |buffer, cx| {
4197 buffer.edit([(0..0, "abc")], None, cx);
4198 assert!(buffer.is_dirty());
4199 assert!(!buffer.has_conflict());
4200 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4201 });
4202 project
4203 .update(cx, |project, cx| {
4204 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4205 let path = ProjectPath {
4206 worktree_id,
4207 path: rel_path("file1.rs").into(),
4208 };
4209 project.save_buffer_as(buffer.clone(), path, cx)
4210 })
4211 .await
4212 .unwrap();
4213 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4214
4215 cx.executor().run_until_parked();
4216 buffer.update(cx, |buffer, cx| {
4217 assert_eq!(
4218 buffer.file().unwrap().full_path(cx),
4219 Path::new("dir/file1.rs")
4220 );
4221 assert!(!buffer.is_dirty());
4222 assert!(!buffer.has_conflict());
4223 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4224 });
4225
4226 let opened_buffer = project
4227 .update(cx, |project, cx| {
4228 project.open_local_buffer("/dir/file1.rs", cx)
4229 })
4230 .await
4231 .unwrap();
4232 assert_eq!(opened_buffer, buffer);
4233}
4234
4235#[gpui::test(retries = 5)]
4236async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4237 use worktree::WorktreeModelHandle as _;
4238
4239 init_test(cx);
4240 cx.executor().allow_parking();
4241
4242 let dir = TempTree::new(json!({
4243 "a": {
4244 "file1": "",
4245 "file2": "",
4246 "file3": "",
4247 },
4248 "b": {
4249 "c": {
4250 "file4": "",
4251 "file5": "",
4252 }
4253 }
4254 }));
4255
4256 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4257
4258 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4259 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4260 async move { buffer.await.unwrap() }
4261 };
4262 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4263 project.update(cx, |project, cx| {
4264 let tree = project.worktrees(cx).next().unwrap();
4265 tree.read(cx)
4266 .entry_for_path(rel_path(path))
4267 .unwrap_or_else(|| panic!("no entry for path {}", path))
4268 .id
4269 })
4270 };
4271
4272 let buffer2 = buffer_for_path("a/file2", cx).await;
4273 let buffer3 = buffer_for_path("a/file3", cx).await;
4274 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4275 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4276
4277 let file2_id = id_for_path("a/file2", cx);
4278 let file3_id = id_for_path("a/file3", cx);
4279 let file4_id = id_for_path("b/c/file4", cx);
4280
4281 // Create a remote copy of this worktree.
4282 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4283 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4284
4285 let updates = Arc::new(Mutex::new(Vec::new()));
4286 tree.update(cx, |tree, cx| {
4287 let updates = updates.clone();
4288 tree.observe_updates(0, cx, move |update| {
4289 updates.lock().push(update);
4290 async { true }
4291 });
4292 });
4293
4294 let remote = cx.update(|cx| {
4295 Worktree::remote(
4296 0,
4297 1,
4298 metadata,
4299 project.read(cx).client().into(),
4300 project.read(cx).path_style(cx),
4301 cx,
4302 )
4303 });
4304
4305 cx.executor().run_until_parked();
4306
4307 cx.update(|cx| {
4308 assert!(!buffer2.read(cx).is_dirty());
4309 assert!(!buffer3.read(cx).is_dirty());
4310 assert!(!buffer4.read(cx).is_dirty());
4311 assert!(!buffer5.read(cx).is_dirty());
4312 });
4313
4314 // Rename and delete files and directories.
4315 tree.flush_fs_events(cx).await;
4316 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4317 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4318 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4319 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4320 tree.flush_fs_events(cx).await;
4321
4322 cx.update(|app| {
4323 assert_eq!(
4324 tree.read(app).paths().collect::<Vec<_>>(),
4325 vec![
4326 rel_path("a"),
4327 rel_path("a/file1"),
4328 rel_path("a/file2.new"),
4329 rel_path("b"),
4330 rel_path("d"),
4331 rel_path("d/file3"),
4332 rel_path("d/file4"),
4333 ]
4334 );
4335 });
4336
4337 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4338 assert_eq!(id_for_path("d/file3", cx), file3_id);
4339 assert_eq!(id_for_path("d/file4", cx), file4_id);
4340
4341 cx.update(|cx| {
4342 assert_eq!(
4343 buffer2.read(cx).file().unwrap().path().as_ref(),
4344 rel_path("a/file2.new")
4345 );
4346 assert_eq!(
4347 buffer3.read(cx).file().unwrap().path().as_ref(),
4348 rel_path("d/file3")
4349 );
4350 assert_eq!(
4351 buffer4.read(cx).file().unwrap().path().as_ref(),
4352 rel_path("d/file4")
4353 );
4354 assert_eq!(
4355 buffer5.read(cx).file().unwrap().path().as_ref(),
4356 rel_path("b/c/file5")
4357 );
4358
4359 assert_matches!(
4360 buffer2.read(cx).file().unwrap().disk_state(),
4361 DiskState::Present { .. }
4362 );
4363 assert_matches!(
4364 buffer3.read(cx).file().unwrap().disk_state(),
4365 DiskState::Present { .. }
4366 );
4367 assert_matches!(
4368 buffer4.read(cx).file().unwrap().disk_state(),
4369 DiskState::Present { .. }
4370 );
4371 assert_eq!(
4372 buffer5.read(cx).file().unwrap().disk_state(),
4373 DiskState::Deleted
4374 );
4375 });
4376
4377 // Update the remote worktree. Check that it becomes consistent with the
4378 // local worktree.
4379 cx.executor().run_until_parked();
4380
4381 remote.update(cx, |remote, _| {
4382 for update in updates.lock().drain(..) {
4383 remote.as_remote_mut().unwrap().update_from_remote(update);
4384 }
4385 });
4386 cx.executor().run_until_parked();
4387 remote.update(cx, |remote, _| {
4388 assert_eq!(
4389 remote.paths().collect::<Vec<_>>(),
4390 vec![
4391 rel_path("a"),
4392 rel_path("a/file1"),
4393 rel_path("a/file2.new"),
4394 rel_path("b"),
4395 rel_path("d"),
4396 rel_path("d/file3"),
4397 rel_path("d/file4"),
4398 ]
4399 );
4400 });
4401}
4402
4403#[gpui::test(iterations = 10)]
4404async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4405 init_test(cx);
4406
4407 let fs = FakeFs::new(cx.executor());
4408 fs.insert_tree(
4409 path!("/dir"),
4410 json!({
4411 "a": {
4412 "file1": "",
4413 }
4414 }),
4415 )
4416 .await;
4417
4418 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4419 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4420 let tree_id = tree.update(cx, |tree, _| tree.id());
4421
4422 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4423 project.update(cx, |project, cx| {
4424 let tree = project.worktrees(cx).next().unwrap();
4425 tree.read(cx)
4426 .entry_for_path(rel_path(path))
4427 .unwrap_or_else(|| panic!("no entry for path {}", path))
4428 .id
4429 })
4430 };
4431
4432 let dir_id = id_for_path("a", cx);
4433 let file_id = id_for_path("a/file1", cx);
4434 let buffer = project
4435 .update(cx, |p, cx| {
4436 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4437 })
4438 .await
4439 .unwrap();
4440 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4441
4442 project
4443 .update(cx, |project, cx| {
4444 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4445 })
4446 .unwrap()
4447 .await
4448 .into_included()
4449 .unwrap();
4450 cx.executor().run_until_parked();
4451
4452 assert_eq!(id_for_path("b", cx), dir_id);
4453 assert_eq!(id_for_path("b/file1", cx), file_id);
4454 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4455}
4456
4457#[gpui::test]
4458async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4459 init_test(cx);
4460
4461 let fs = FakeFs::new(cx.executor());
4462 fs.insert_tree(
4463 "/dir",
4464 json!({
4465 "a.txt": "a-contents",
4466 "b.txt": "b-contents",
4467 }),
4468 )
4469 .await;
4470
4471 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4472
4473 // Spawn multiple tasks to open paths, repeating some paths.
4474 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4475 (
4476 p.open_local_buffer("/dir/a.txt", cx),
4477 p.open_local_buffer("/dir/b.txt", cx),
4478 p.open_local_buffer("/dir/a.txt", cx),
4479 )
4480 });
4481
4482 let buffer_a_1 = buffer_a_1.await.unwrap();
4483 let buffer_a_2 = buffer_a_2.await.unwrap();
4484 let buffer_b = buffer_b.await.unwrap();
4485 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4486 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4487
4488 // There is only one buffer per path.
4489 let buffer_a_id = buffer_a_1.entity_id();
4490 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4491
4492 // Open the same path again while it is still open.
4493 drop(buffer_a_1);
4494 let buffer_a_3 = project
4495 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4496 .await
4497 .unwrap();
4498
4499 // There's still only one buffer per path.
4500 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4501}
4502
4503#[gpui::test]
4504async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4505 init_test(cx);
4506
4507 let fs = FakeFs::new(cx.executor());
4508 fs.insert_tree(
4509 path!("/dir"),
4510 json!({
4511 "file1": "abc",
4512 "file2": "def",
4513 "file3": "ghi",
4514 }),
4515 )
4516 .await;
4517
4518 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4519
4520 let buffer1 = project
4521 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4522 .await
4523 .unwrap();
4524 let events = Arc::new(Mutex::new(Vec::new()));
4525
4526 // initially, the buffer isn't dirty.
4527 buffer1.update(cx, |buffer, cx| {
4528 cx.subscribe(&buffer1, {
4529 let events = events.clone();
4530 move |_, _, event, _| match event {
4531 BufferEvent::Operation { .. } => {}
4532 _ => events.lock().push(event.clone()),
4533 }
4534 })
4535 .detach();
4536
4537 assert!(!buffer.is_dirty());
4538 assert!(events.lock().is_empty());
4539
4540 buffer.edit([(1..2, "")], None, cx);
4541 });
4542
4543 // after the first edit, the buffer is dirty, and emits a dirtied event.
4544 buffer1.update(cx, |buffer, cx| {
4545 assert!(buffer.text() == "ac");
4546 assert!(buffer.is_dirty());
4547 assert_eq!(
4548 *events.lock(),
4549 &[
4550 language::BufferEvent::Edited,
4551 language::BufferEvent::DirtyChanged
4552 ]
4553 );
4554 events.lock().clear();
4555 buffer.did_save(
4556 buffer.version(),
4557 buffer.file().unwrap().disk_state().mtime(),
4558 cx,
4559 );
4560 });
4561
4562 // after saving, the buffer is not dirty, and emits a saved event.
4563 buffer1.update(cx, |buffer, cx| {
4564 assert!(!buffer.is_dirty());
4565 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4566 events.lock().clear();
4567
4568 buffer.edit([(1..1, "B")], None, cx);
4569 buffer.edit([(2..2, "D")], None, cx);
4570 });
4571
4572 // after editing again, the buffer is dirty, and emits another dirty event.
4573 buffer1.update(cx, |buffer, cx| {
4574 assert!(buffer.text() == "aBDc");
4575 assert!(buffer.is_dirty());
4576 assert_eq!(
4577 *events.lock(),
4578 &[
4579 language::BufferEvent::Edited,
4580 language::BufferEvent::DirtyChanged,
4581 language::BufferEvent::Edited,
4582 ],
4583 );
4584 events.lock().clear();
4585
4586 // After restoring the buffer to its previously-saved state,
4587 // the buffer is not considered dirty anymore.
4588 buffer.edit([(1..3, "")], None, cx);
4589 assert!(buffer.text() == "ac");
4590 assert!(!buffer.is_dirty());
4591 });
4592
4593 assert_eq!(
4594 *events.lock(),
4595 &[
4596 language::BufferEvent::Edited,
4597 language::BufferEvent::DirtyChanged
4598 ]
4599 );
4600
4601 // When a file is deleted, it is not considered dirty.
4602 let events = Arc::new(Mutex::new(Vec::new()));
4603 let buffer2 = project
4604 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4605 .await
4606 .unwrap();
4607 buffer2.update(cx, |_, cx| {
4608 cx.subscribe(&buffer2, {
4609 let events = events.clone();
4610 move |_, _, event, _| match event {
4611 BufferEvent::Operation { .. } => {}
4612 _ => events.lock().push(event.clone()),
4613 }
4614 })
4615 .detach();
4616 });
4617
4618 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4619 .await
4620 .unwrap();
4621 cx.executor().run_until_parked();
4622 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4623 assert_eq!(
4624 mem::take(&mut *events.lock()),
4625 &[language::BufferEvent::FileHandleChanged]
4626 );
4627
4628 // Buffer becomes dirty when edited.
4629 buffer2.update(cx, |buffer, cx| {
4630 buffer.edit([(2..3, "")], None, cx);
4631 assert_eq!(buffer.is_dirty(), true);
4632 });
4633 assert_eq!(
4634 mem::take(&mut *events.lock()),
4635 &[
4636 language::BufferEvent::Edited,
4637 language::BufferEvent::DirtyChanged
4638 ]
4639 );
4640
4641 // Buffer becomes clean again when all of its content is removed, because
4642 // the file was deleted.
4643 buffer2.update(cx, |buffer, cx| {
4644 buffer.edit([(0..2, "")], None, cx);
4645 assert_eq!(buffer.is_empty(), true);
4646 assert_eq!(buffer.is_dirty(), false);
4647 });
4648 assert_eq!(
4649 *events.lock(),
4650 &[
4651 language::BufferEvent::Edited,
4652 language::BufferEvent::DirtyChanged
4653 ]
4654 );
4655
4656 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4657 let events = Arc::new(Mutex::new(Vec::new()));
4658 let buffer3 = project
4659 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4660 .await
4661 .unwrap();
4662 buffer3.update(cx, |_, cx| {
4663 cx.subscribe(&buffer3, {
4664 let events = events.clone();
4665 move |_, _, event, _| match event {
4666 BufferEvent::Operation { .. } => {}
4667 _ => events.lock().push(event.clone()),
4668 }
4669 })
4670 .detach();
4671 });
4672
4673 buffer3.update(cx, |buffer, cx| {
4674 buffer.edit([(0..0, "x")], None, cx);
4675 });
4676 events.lock().clear();
4677 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4678 .await
4679 .unwrap();
4680 cx.executor().run_until_parked();
4681 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4682 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4683}
4684
4685#[gpui::test]
4686async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4687 init_test(cx);
4688
4689 let (initial_contents, initial_offsets) =
4690 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4691 let fs = FakeFs::new(cx.executor());
4692 fs.insert_tree(
4693 path!("/dir"),
4694 json!({
4695 "the-file": initial_contents,
4696 }),
4697 )
4698 .await;
4699 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4700 let buffer = project
4701 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4702 .await
4703 .unwrap();
4704
4705 let anchors = initial_offsets
4706 .iter()
4707 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4708 .collect::<Vec<_>>();
4709
4710 // Change the file on disk, adding two new lines of text, and removing
4711 // one line.
4712 buffer.update(cx, |buffer, _| {
4713 assert!(!buffer.is_dirty());
4714 assert!(!buffer.has_conflict());
4715 });
4716
4717 let (new_contents, new_offsets) =
4718 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4719 fs.save(
4720 path!("/dir/the-file").as_ref(),
4721 &new_contents.as_str().into(),
4722 LineEnding::Unix,
4723 )
4724 .await
4725 .unwrap();
4726
4727 // Because the buffer was not modified, it is reloaded from disk. Its
4728 // contents are edited according to the diff between the old and new
4729 // file contents.
4730 cx.executor().run_until_parked();
4731 buffer.update(cx, |buffer, _| {
4732 assert_eq!(buffer.text(), new_contents);
4733 assert!(!buffer.is_dirty());
4734 assert!(!buffer.has_conflict());
4735
4736 let anchor_offsets = anchors
4737 .iter()
4738 .map(|anchor| anchor.to_offset(&*buffer))
4739 .collect::<Vec<_>>();
4740 assert_eq!(anchor_offsets, new_offsets);
4741 });
4742
4743 // Modify the buffer
4744 buffer.update(cx, |buffer, cx| {
4745 buffer.edit([(0..0, " ")], None, cx);
4746 assert!(buffer.is_dirty());
4747 assert!(!buffer.has_conflict());
4748 });
4749
4750 // Change the file on disk again, adding blank lines to the beginning.
4751 fs.save(
4752 path!("/dir/the-file").as_ref(),
4753 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4754 LineEnding::Unix,
4755 )
4756 .await
4757 .unwrap();
4758
4759 // Because the buffer is modified, it doesn't reload from disk, but is
4760 // marked as having a conflict.
4761 cx.executor().run_until_parked();
4762 buffer.update(cx, |buffer, _| {
4763 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4764 assert!(buffer.has_conflict());
4765 });
4766}
4767
4768#[gpui::test]
4769async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4770 init_test(cx);
4771
4772 let fs = FakeFs::new(cx.executor());
4773 fs.insert_tree(
4774 path!("/dir"),
4775 json!({
4776 "file1": "a\nb\nc\n",
4777 "file2": "one\r\ntwo\r\nthree\r\n",
4778 }),
4779 )
4780 .await;
4781
4782 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4783 let buffer1 = project
4784 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4785 .await
4786 .unwrap();
4787 let buffer2 = project
4788 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4789 .await
4790 .unwrap();
4791
4792 buffer1.update(cx, |buffer, _| {
4793 assert_eq!(buffer.text(), "a\nb\nc\n");
4794 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4795 });
4796 buffer2.update(cx, |buffer, _| {
4797 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4798 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4799 });
4800
4801 // Change a file's line endings on disk from unix to windows. The buffer's
4802 // state updates correctly.
4803 fs.save(
4804 path!("/dir/file1").as_ref(),
4805 &"aaa\nb\nc\n".into(),
4806 LineEnding::Windows,
4807 )
4808 .await
4809 .unwrap();
4810 cx.executor().run_until_parked();
4811 buffer1.update(cx, |buffer, _| {
4812 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4813 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4814 });
4815
4816 // Save a file with windows line endings. The file is written correctly.
4817 buffer2.update(cx, |buffer, cx| {
4818 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4819 });
4820 project
4821 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4822 .await
4823 .unwrap();
4824 assert_eq!(
4825 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4826 "one\r\ntwo\r\nthree\r\nfour\r\n",
4827 );
4828}
4829
4830#[gpui::test]
4831async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4832 init_test(cx);
4833
4834 let fs = FakeFs::new(cx.executor());
4835 fs.insert_tree(
4836 path!("/dir"),
4837 json!({
4838 "a.rs": "
4839 fn foo(mut v: Vec<usize>) {
4840 for x in &v {
4841 v.push(1);
4842 }
4843 }
4844 "
4845 .unindent(),
4846 }),
4847 )
4848 .await;
4849
4850 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4851 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4852 let buffer = project
4853 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4854 .await
4855 .unwrap();
4856
4857 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4858 let message = lsp::PublishDiagnosticsParams {
4859 uri: buffer_uri.clone(),
4860 diagnostics: vec![
4861 lsp::Diagnostic {
4862 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4863 severity: Some(DiagnosticSeverity::WARNING),
4864 message: "error 1".to_string(),
4865 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4866 location: lsp::Location {
4867 uri: buffer_uri.clone(),
4868 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4869 },
4870 message: "error 1 hint 1".to_string(),
4871 }]),
4872 ..Default::default()
4873 },
4874 lsp::Diagnostic {
4875 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4876 severity: Some(DiagnosticSeverity::HINT),
4877 message: "error 1 hint 1".to_string(),
4878 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4879 location: lsp::Location {
4880 uri: buffer_uri.clone(),
4881 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4882 },
4883 message: "original diagnostic".to_string(),
4884 }]),
4885 ..Default::default()
4886 },
4887 lsp::Diagnostic {
4888 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4889 severity: Some(DiagnosticSeverity::ERROR),
4890 message: "error 2".to_string(),
4891 related_information: Some(vec![
4892 lsp::DiagnosticRelatedInformation {
4893 location: lsp::Location {
4894 uri: buffer_uri.clone(),
4895 range: lsp::Range::new(
4896 lsp::Position::new(1, 13),
4897 lsp::Position::new(1, 15),
4898 ),
4899 },
4900 message: "error 2 hint 1".to_string(),
4901 },
4902 lsp::DiagnosticRelatedInformation {
4903 location: lsp::Location {
4904 uri: buffer_uri.clone(),
4905 range: lsp::Range::new(
4906 lsp::Position::new(1, 13),
4907 lsp::Position::new(1, 15),
4908 ),
4909 },
4910 message: "error 2 hint 2".to_string(),
4911 },
4912 ]),
4913 ..Default::default()
4914 },
4915 lsp::Diagnostic {
4916 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4917 severity: Some(DiagnosticSeverity::HINT),
4918 message: "error 2 hint 1".to_string(),
4919 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4920 location: lsp::Location {
4921 uri: buffer_uri.clone(),
4922 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4923 },
4924 message: "original diagnostic".to_string(),
4925 }]),
4926 ..Default::default()
4927 },
4928 lsp::Diagnostic {
4929 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4930 severity: Some(DiagnosticSeverity::HINT),
4931 message: "error 2 hint 2".to_string(),
4932 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4933 location: lsp::Location {
4934 uri: buffer_uri,
4935 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4936 },
4937 message: "original diagnostic".to_string(),
4938 }]),
4939 ..Default::default()
4940 },
4941 ],
4942 version: None,
4943 };
4944
4945 lsp_store
4946 .update(cx, |lsp_store, cx| {
4947 lsp_store.update_diagnostics(
4948 LanguageServerId(0),
4949 message,
4950 None,
4951 DiagnosticSourceKind::Pushed,
4952 &[],
4953 cx,
4954 )
4955 })
4956 .unwrap();
4957 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4958
4959 assert_eq!(
4960 buffer
4961 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4962 .collect::<Vec<_>>(),
4963 &[
4964 DiagnosticEntry {
4965 range: Point::new(1, 8)..Point::new(1, 9),
4966 diagnostic: Diagnostic {
4967 severity: DiagnosticSeverity::WARNING,
4968 message: "error 1".to_string(),
4969 group_id: 1,
4970 is_primary: true,
4971 source_kind: DiagnosticSourceKind::Pushed,
4972 ..Diagnostic::default()
4973 }
4974 },
4975 DiagnosticEntry {
4976 range: Point::new(1, 8)..Point::new(1, 9),
4977 diagnostic: Diagnostic {
4978 severity: DiagnosticSeverity::HINT,
4979 message: "error 1 hint 1".to_string(),
4980 group_id: 1,
4981 is_primary: false,
4982 source_kind: DiagnosticSourceKind::Pushed,
4983 ..Diagnostic::default()
4984 }
4985 },
4986 DiagnosticEntry {
4987 range: Point::new(1, 13)..Point::new(1, 15),
4988 diagnostic: Diagnostic {
4989 severity: DiagnosticSeverity::HINT,
4990 message: "error 2 hint 1".to_string(),
4991 group_id: 0,
4992 is_primary: false,
4993 source_kind: DiagnosticSourceKind::Pushed,
4994 ..Diagnostic::default()
4995 }
4996 },
4997 DiagnosticEntry {
4998 range: Point::new(1, 13)..Point::new(1, 15),
4999 diagnostic: Diagnostic {
5000 severity: DiagnosticSeverity::HINT,
5001 message: "error 2 hint 2".to_string(),
5002 group_id: 0,
5003 is_primary: false,
5004 source_kind: DiagnosticSourceKind::Pushed,
5005 ..Diagnostic::default()
5006 }
5007 },
5008 DiagnosticEntry {
5009 range: Point::new(2, 8)..Point::new(2, 17),
5010 diagnostic: Diagnostic {
5011 severity: DiagnosticSeverity::ERROR,
5012 message: "error 2".to_string(),
5013 group_id: 0,
5014 is_primary: true,
5015 source_kind: DiagnosticSourceKind::Pushed,
5016 ..Diagnostic::default()
5017 }
5018 }
5019 ]
5020 );
5021
5022 assert_eq!(
5023 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5024 &[
5025 DiagnosticEntry {
5026 range: Point::new(1, 13)..Point::new(1, 15),
5027 diagnostic: Diagnostic {
5028 severity: DiagnosticSeverity::HINT,
5029 message: "error 2 hint 1".to_string(),
5030 group_id: 0,
5031 is_primary: false,
5032 source_kind: DiagnosticSourceKind::Pushed,
5033 ..Diagnostic::default()
5034 }
5035 },
5036 DiagnosticEntry {
5037 range: Point::new(1, 13)..Point::new(1, 15),
5038 diagnostic: Diagnostic {
5039 severity: DiagnosticSeverity::HINT,
5040 message: "error 2 hint 2".to_string(),
5041 group_id: 0,
5042 is_primary: false,
5043 source_kind: DiagnosticSourceKind::Pushed,
5044 ..Diagnostic::default()
5045 }
5046 },
5047 DiagnosticEntry {
5048 range: Point::new(2, 8)..Point::new(2, 17),
5049 diagnostic: Diagnostic {
5050 severity: DiagnosticSeverity::ERROR,
5051 message: "error 2".to_string(),
5052 group_id: 0,
5053 is_primary: true,
5054 source_kind: DiagnosticSourceKind::Pushed,
5055 ..Diagnostic::default()
5056 }
5057 }
5058 ]
5059 );
5060
5061 assert_eq!(
5062 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5063 &[
5064 DiagnosticEntry {
5065 range: Point::new(1, 8)..Point::new(1, 9),
5066 diagnostic: Diagnostic {
5067 severity: DiagnosticSeverity::WARNING,
5068 message: "error 1".to_string(),
5069 group_id: 1,
5070 is_primary: true,
5071 source_kind: DiagnosticSourceKind::Pushed,
5072 ..Diagnostic::default()
5073 }
5074 },
5075 DiagnosticEntry {
5076 range: Point::new(1, 8)..Point::new(1, 9),
5077 diagnostic: Diagnostic {
5078 severity: DiagnosticSeverity::HINT,
5079 message: "error 1 hint 1".to_string(),
5080 group_id: 1,
5081 is_primary: false,
5082 source_kind: DiagnosticSourceKind::Pushed,
5083 ..Diagnostic::default()
5084 }
5085 },
5086 ]
5087 );
5088}
5089
5090#[gpui::test]
5091async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5092 init_test(cx);
5093
5094 let fs = FakeFs::new(cx.executor());
5095 fs.insert_tree(
5096 path!("/dir"),
5097 json!({
5098 "one.rs": "const ONE: usize = 1;",
5099 "two": {
5100 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5101 }
5102
5103 }),
5104 )
5105 .await;
5106 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5107
5108 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5109 language_registry.add(rust_lang());
5110 let watched_paths = lsp::FileOperationRegistrationOptions {
5111 filters: vec![
5112 FileOperationFilter {
5113 scheme: Some("file".to_owned()),
5114 pattern: lsp::FileOperationPattern {
5115 glob: "**/*.rs".to_owned(),
5116 matches: Some(lsp::FileOperationPatternKind::File),
5117 options: None,
5118 },
5119 },
5120 FileOperationFilter {
5121 scheme: Some("file".to_owned()),
5122 pattern: lsp::FileOperationPattern {
5123 glob: "**/**".to_owned(),
5124 matches: Some(lsp::FileOperationPatternKind::Folder),
5125 options: None,
5126 },
5127 },
5128 ],
5129 };
5130 let mut fake_servers = language_registry.register_fake_lsp(
5131 "Rust",
5132 FakeLspAdapter {
5133 capabilities: lsp::ServerCapabilities {
5134 workspace: Some(lsp::WorkspaceServerCapabilities {
5135 workspace_folders: None,
5136 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5137 did_rename: Some(watched_paths.clone()),
5138 will_rename: Some(watched_paths),
5139 ..Default::default()
5140 }),
5141 }),
5142 ..Default::default()
5143 },
5144 ..Default::default()
5145 },
5146 );
5147
5148 let _ = project
5149 .update(cx, |project, cx| {
5150 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5151 })
5152 .await
5153 .unwrap();
5154
5155 let fake_server = fake_servers.next().await.unwrap();
5156 let response = project.update(cx, |project, cx| {
5157 let worktree = project.worktrees(cx).next().unwrap();
5158 let entry = worktree
5159 .read(cx)
5160 .entry_for_path(rel_path("one.rs"))
5161 .unwrap();
5162 project.rename_entry(
5163 entry.id,
5164 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5165 cx,
5166 )
5167 });
5168 let expected_edit = lsp::WorkspaceEdit {
5169 changes: None,
5170 document_changes: Some(DocumentChanges::Edits({
5171 vec![TextDocumentEdit {
5172 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5173 range: lsp::Range {
5174 start: lsp::Position {
5175 line: 0,
5176 character: 1,
5177 },
5178 end: lsp::Position {
5179 line: 0,
5180 character: 3,
5181 },
5182 },
5183 new_text: "This is not a drill".to_owned(),
5184 })],
5185 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5186 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5187 version: Some(1337),
5188 },
5189 }]
5190 })),
5191 change_annotations: None,
5192 };
5193 let resolved_workspace_edit = Arc::new(OnceLock::new());
5194 fake_server
5195 .set_request_handler::<WillRenameFiles, _, _>({
5196 let resolved_workspace_edit = resolved_workspace_edit.clone();
5197 let expected_edit = expected_edit.clone();
5198 move |params, _| {
5199 let resolved_workspace_edit = resolved_workspace_edit.clone();
5200 let expected_edit = expected_edit.clone();
5201 async move {
5202 assert_eq!(params.files.len(), 1);
5203 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5204 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5205 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5206 Ok(Some(expected_edit))
5207 }
5208 }
5209 })
5210 .next()
5211 .await
5212 .unwrap();
5213 let _ = response.await.unwrap();
5214 fake_server
5215 .handle_notification::<DidRenameFiles, _>(|params, _| {
5216 assert_eq!(params.files.len(), 1);
5217 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5218 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5219 })
5220 .next()
5221 .await
5222 .unwrap();
5223 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5224}
5225
5226#[gpui::test]
5227async fn test_rename(cx: &mut gpui::TestAppContext) {
5228 // hi
5229 init_test(cx);
5230
5231 let fs = FakeFs::new(cx.executor());
5232 fs.insert_tree(
5233 path!("/dir"),
5234 json!({
5235 "one.rs": "const ONE: usize = 1;",
5236 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5237 }),
5238 )
5239 .await;
5240
5241 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5242
5243 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5244 language_registry.add(rust_lang());
5245 let mut fake_servers = language_registry.register_fake_lsp(
5246 "Rust",
5247 FakeLspAdapter {
5248 capabilities: lsp::ServerCapabilities {
5249 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5250 prepare_provider: Some(true),
5251 work_done_progress_options: Default::default(),
5252 })),
5253 ..Default::default()
5254 },
5255 ..Default::default()
5256 },
5257 );
5258
5259 let (buffer, _handle) = project
5260 .update(cx, |project, cx| {
5261 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5262 })
5263 .await
5264 .unwrap();
5265
5266 let fake_server = fake_servers.next().await.unwrap();
5267
5268 let response = project.update(cx, |project, cx| {
5269 project.prepare_rename(buffer.clone(), 7, cx)
5270 });
5271 fake_server
5272 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5273 assert_eq!(
5274 params.text_document.uri.as_str(),
5275 uri!("file:///dir/one.rs")
5276 );
5277 assert_eq!(params.position, lsp::Position::new(0, 7));
5278 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5279 lsp::Position::new(0, 6),
5280 lsp::Position::new(0, 9),
5281 ))))
5282 })
5283 .next()
5284 .await
5285 .unwrap();
5286 let response = response.await.unwrap();
5287 let PrepareRenameResponse::Success(range) = response else {
5288 panic!("{:?}", response);
5289 };
5290 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5291 assert_eq!(range, 6..9);
5292
5293 let response = project.update(cx, |project, cx| {
5294 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5295 });
5296 fake_server
5297 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5298 assert_eq!(
5299 params.text_document_position.text_document.uri.as_str(),
5300 uri!("file:///dir/one.rs")
5301 );
5302 assert_eq!(
5303 params.text_document_position.position,
5304 lsp::Position::new(0, 7)
5305 );
5306 assert_eq!(params.new_name, "THREE");
5307 Ok(Some(lsp::WorkspaceEdit {
5308 changes: Some(
5309 [
5310 (
5311 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5312 vec![lsp::TextEdit::new(
5313 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5314 "THREE".to_string(),
5315 )],
5316 ),
5317 (
5318 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5319 vec![
5320 lsp::TextEdit::new(
5321 lsp::Range::new(
5322 lsp::Position::new(0, 24),
5323 lsp::Position::new(0, 27),
5324 ),
5325 "THREE".to_string(),
5326 ),
5327 lsp::TextEdit::new(
5328 lsp::Range::new(
5329 lsp::Position::new(0, 35),
5330 lsp::Position::new(0, 38),
5331 ),
5332 "THREE".to_string(),
5333 ),
5334 ],
5335 ),
5336 ]
5337 .into_iter()
5338 .collect(),
5339 ),
5340 ..Default::default()
5341 }))
5342 })
5343 .next()
5344 .await
5345 .unwrap();
5346 let mut transaction = response.await.unwrap().0;
5347 assert_eq!(transaction.len(), 2);
5348 assert_eq!(
5349 transaction
5350 .remove_entry(&buffer)
5351 .unwrap()
5352 .0
5353 .update(cx, |buffer, _| buffer.text()),
5354 "const THREE: usize = 1;"
5355 );
5356 assert_eq!(
5357 transaction
5358 .into_keys()
5359 .next()
5360 .unwrap()
5361 .update(cx, |buffer, _| buffer.text()),
5362 "const TWO: usize = one::THREE + one::THREE;"
5363 );
5364}
5365
5366#[gpui::test]
5367async fn test_search(cx: &mut gpui::TestAppContext) {
5368 init_test(cx);
5369
5370 let fs = FakeFs::new(cx.executor());
5371 fs.insert_tree(
5372 path!("/dir"),
5373 json!({
5374 "one.rs": "const ONE: usize = 1;",
5375 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5376 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5377 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5378 }),
5379 )
5380 .await;
5381 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5382 assert_eq!(
5383 search(
5384 &project,
5385 SearchQuery::text(
5386 "TWO",
5387 false,
5388 true,
5389 false,
5390 Default::default(),
5391 Default::default(),
5392 false,
5393 None
5394 )
5395 .unwrap(),
5396 cx
5397 )
5398 .await
5399 .unwrap(),
5400 HashMap::from_iter([
5401 (path!("dir/two.rs").to_string(), vec![6..9]),
5402 (path!("dir/three.rs").to_string(), vec![37..40])
5403 ])
5404 );
5405
5406 let buffer_4 = project
5407 .update(cx, |project, cx| {
5408 project.open_local_buffer(path!("/dir/four.rs"), cx)
5409 })
5410 .await
5411 .unwrap();
5412 buffer_4.update(cx, |buffer, cx| {
5413 let text = "two::TWO";
5414 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5415 });
5416
5417 assert_eq!(
5418 search(
5419 &project,
5420 SearchQuery::text(
5421 "TWO",
5422 false,
5423 true,
5424 false,
5425 Default::default(),
5426 Default::default(),
5427 false,
5428 None,
5429 )
5430 .unwrap(),
5431 cx
5432 )
5433 .await
5434 .unwrap(),
5435 HashMap::from_iter([
5436 (path!("dir/two.rs").to_string(), vec![6..9]),
5437 (path!("dir/three.rs").to_string(), vec![37..40]),
5438 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5439 ])
5440 );
5441}
5442
5443#[gpui::test]
5444async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5445 init_test(cx);
5446
5447 let search_query = "file";
5448
5449 let fs = FakeFs::new(cx.executor());
5450 fs.insert_tree(
5451 path!("/dir"),
5452 json!({
5453 "one.rs": r#"// Rust file one"#,
5454 "one.ts": r#"// TypeScript file one"#,
5455 "two.rs": r#"// Rust file two"#,
5456 "two.ts": r#"// TypeScript file two"#,
5457 }),
5458 )
5459 .await;
5460 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5461
5462 assert!(
5463 search(
5464 &project,
5465 SearchQuery::text(
5466 search_query,
5467 false,
5468 true,
5469 false,
5470 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5471 Default::default(),
5472 false,
5473 None
5474 )
5475 .unwrap(),
5476 cx
5477 )
5478 .await
5479 .unwrap()
5480 .is_empty(),
5481 "If no inclusions match, no files should be returned"
5482 );
5483
5484 assert_eq!(
5485 search(
5486 &project,
5487 SearchQuery::text(
5488 search_query,
5489 false,
5490 true,
5491 false,
5492 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5493 Default::default(),
5494 false,
5495 None
5496 )
5497 .unwrap(),
5498 cx
5499 )
5500 .await
5501 .unwrap(),
5502 HashMap::from_iter([
5503 (path!("dir/one.rs").to_string(), vec![8..12]),
5504 (path!("dir/two.rs").to_string(), vec![8..12]),
5505 ]),
5506 "Rust only search should give only Rust files"
5507 );
5508
5509 assert_eq!(
5510 search(
5511 &project,
5512 SearchQuery::text(
5513 search_query,
5514 false,
5515 true,
5516 false,
5517 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5518 .unwrap(),
5519 Default::default(),
5520 false,
5521 None,
5522 )
5523 .unwrap(),
5524 cx
5525 )
5526 .await
5527 .unwrap(),
5528 HashMap::from_iter([
5529 (path!("dir/one.ts").to_string(), vec![14..18]),
5530 (path!("dir/two.ts").to_string(), vec![14..18]),
5531 ]),
5532 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5533 );
5534
5535 assert_eq!(
5536 search(
5537 &project,
5538 SearchQuery::text(
5539 search_query,
5540 false,
5541 true,
5542 false,
5543 PathMatcher::new(
5544 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5545 PathStyle::local()
5546 )
5547 .unwrap(),
5548 Default::default(),
5549 false,
5550 None,
5551 )
5552 .unwrap(),
5553 cx
5554 )
5555 .await
5556 .unwrap(),
5557 HashMap::from_iter([
5558 (path!("dir/two.ts").to_string(), vec![14..18]),
5559 (path!("dir/one.rs").to_string(), vec![8..12]),
5560 (path!("dir/one.ts").to_string(), vec![14..18]),
5561 (path!("dir/two.rs").to_string(), vec![8..12]),
5562 ]),
5563 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5564 );
5565}
5566
5567#[gpui::test]
5568async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5569 init_test(cx);
5570
5571 let search_query = "file";
5572
5573 let fs = FakeFs::new(cx.executor());
5574 fs.insert_tree(
5575 path!("/dir"),
5576 json!({
5577 "one.rs": r#"// Rust file one"#,
5578 "one.ts": r#"// TypeScript file one"#,
5579 "two.rs": r#"// Rust file two"#,
5580 "two.ts": r#"// TypeScript file two"#,
5581 }),
5582 )
5583 .await;
5584 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5585
5586 assert_eq!(
5587 search(
5588 &project,
5589 SearchQuery::text(
5590 search_query,
5591 false,
5592 true,
5593 false,
5594 Default::default(),
5595 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5596 false,
5597 None,
5598 )
5599 .unwrap(),
5600 cx
5601 )
5602 .await
5603 .unwrap(),
5604 HashMap::from_iter([
5605 (path!("dir/one.rs").to_string(), vec![8..12]),
5606 (path!("dir/one.ts").to_string(), vec![14..18]),
5607 (path!("dir/two.rs").to_string(), vec![8..12]),
5608 (path!("dir/two.ts").to_string(), vec![14..18]),
5609 ]),
5610 "If no exclusions match, all files should be returned"
5611 );
5612
5613 assert_eq!(
5614 search(
5615 &project,
5616 SearchQuery::text(
5617 search_query,
5618 false,
5619 true,
5620 false,
5621 Default::default(),
5622 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5623 false,
5624 None,
5625 )
5626 .unwrap(),
5627 cx
5628 )
5629 .await
5630 .unwrap(),
5631 HashMap::from_iter([
5632 (path!("dir/one.ts").to_string(), vec![14..18]),
5633 (path!("dir/two.ts").to_string(), vec![14..18]),
5634 ]),
5635 "Rust exclusion search should give only TypeScript files"
5636 );
5637
5638 assert_eq!(
5639 search(
5640 &project,
5641 SearchQuery::text(
5642 search_query,
5643 false,
5644 true,
5645 false,
5646 Default::default(),
5647 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5648 .unwrap(),
5649 false,
5650 None,
5651 )
5652 .unwrap(),
5653 cx
5654 )
5655 .await
5656 .unwrap(),
5657 HashMap::from_iter([
5658 (path!("dir/one.rs").to_string(), vec![8..12]),
5659 (path!("dir/two.rs").to_string(), vec![8..12]),
5660 ]),
5661 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5662 );
5663
5664 assert!(
5665 search(
5666 &project,
5667 SearchQuery::text(
5668 search_query,
5669 false,
5670 true,
5671 false,
5672 Default::default(),
5673 PathMatcher::new(
5674 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5675 PathStyle::local(),
5676 )
5677 .unwrap(),
5678 false,
5679 None,
5680 )
5681 .unwrap(),
5682 cx
5683 )
5684 .await
5685 .unwrap()
5686 .is_empty(),
5687 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5688 );
5689}
5690
5691#[gpui::test]
5692async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5693 init_test(cx);
5694
5695 let search_query = "file";
5696
5697 let fs = FakeFs::new(cx.executor());
5698 fs.insert_tree(
5699 path!("/dir"),
5700 json!({
5701 "one.rs": r#"// Rust file one"#,
5702 "one.ts": r#"// TypeScript file one"#,
5703 "two.rs": r#"// Rust file two"#,
5704 "two.ts": r#"// TypeScript file two"#,
5705 }),
5706 )
5707 .await;
5708
5709 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5710 let path_style = PathStyle::local();
5711 let _buffer = project.update(cx, |project, cx| {
5712 project.create_local_buffer("file", None, false, cx)
5713 });
5714
5715 assert_eq!(
5716 search(
5717 &project,
5718 SearchQuery::text(
5719 search_query,
5720 false,
5721 true,
5722 false,
5723 Default::default(),
5724 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5725 false,
5726 None,
5727 )
5728 .unwrap(),
5729 cx
5730 )
5731 .await
5732 .unwrap(),
5733 HashMap::from_iter([
5734 (path!("dir/one.rs").to_string(), vec![8..12]),
5735 (path!("dir/one.ts").to_string(), vec![14..18]),
5736 (path!("dir/two.rs").to_string(), vec![8..12]),
5737 (path!("dir/two.ts").to_string(), vec![14..18]),
5738 ]),
5739 "If no exclusions match, all files should be returned"
5740 );
5741
5742 assert_eq!(
5743 search(
5744 &project,
5745 SearchQuery::text(
5746 search_query,
5747 false,
5748 true,
5749 false,
5750 Default::default(),
5751 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5752 false,
5753 None,
5754 )
5755 .unwrap(),
5756 cx
5757 )
5758 .await
5759 .unwrap(),
5760 HashMap::from_iter([
5761 (path!("dir/one.ts").to_string(), vec![14..18]),
5762 (path!("dir/two.ts").to_string(), vec![14..18]),
5763 ]),
5764 "Rust exclusion search should give only TypeScript files"
5765 );
5766
5767 assert_eq!(
5768 search(
5769 &project,
5770 SearchQuery::text(
5771 search_query,
5772 false,
5773 true,
5774 false,
5775 Default::default(),
5776 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5777 false,
5778 None,
5779 )
5780 .unwrap(),
5781 cx
5782 )
5783 .await
5784 .unwrap(),
5785 HashMap::from_iter([
5786 (path!("dir/one.rs").to_string(), vec![8..12]),
5787 (path!("dir/two.rs").to_string(), vec![8..12]),
5788 ]),
5789 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5790 );
5791
5792 assert!(
5793 search(
5794 &project,
5795 SearchQuery::text(
5796 search_query,
5797 false,
5798 true,
5799 false,
5800 Default::default(),
5801 PathMatcher::new(
5802 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5803 PathStyle::local(),
5804 )
5805 .unwrap(),
5806 false,
5807 None,
5808 )
5809 .unwrap(),
5810 cx
5811 )
5812 .await
5813 .unwrap()
5814 .is_empty(),
5815 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5816 );
5817}
5818
5819#[gpui::test]
5820async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5821 init_test(cx);
5822
5823 let search_query = "file";
5824
5825 let fs = FakeFs::new(cx.executor());
5826 fs.insert_tree(
5827 path!("/dir"),
5828 json!({
5829 "one.rs": r#"// Rust file one"#,
5830 "one.ts": r#"// TypeScript file one"#,
5831 "two.rs": r#"// Rust file two"#,
5832 "two.ts": r#"// TypeScript file two"#,
5833 }),
5834 )
5835 .await;
5836 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5837 assert!(
5838 search(
5839 &project,
5840 SearchQuery::text(
5841 search_query,
5842 false,
5843 true,
5844 false,
5845 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5846 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5847 false,
5848 None,
5849 )
5850 .unwrap(),
5851 cx
5852 )
5853 .await
5854 .unwrap()
5855 .is_empty(),
5856 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5857 );
5858
5859 assert!(
5860 search(
5861 &project,
5862 SearchQuery::text(
5863 search_query,
5864 false,
5865 true,
5866 false,
5867 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5868 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5869 false,
5870 None,
5871 )
5872 .unwrap(),
5873 cx
5874 )
5875 .await
5876 .unwrap()
5877 .is_empty(),
5878 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5879 );
5880
5881 assert!(
5882 search(
5883 &project,
5884 SearchQuery::text(
5885 search_query,
5886 false,
5887 true,
5888 false,
5889 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5890 .unwrap(),
5891 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5892 .unwrap(),
5893 false,
5894 None,
5895 )
5896 .unwrap(),
5897 cx
5898 )
5899 .await
5900 .unwrap()
5901 .is_empty(),
5902 "Non-matching inclusions and exclusions should not change that."
5903 );
5904
5905 assert_eq!(
5906 search(
5907 &project,
5908 SearchQuery::text(
5909 search_query,
5910 false,
5911 true,
5912 false,
5913 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5914 .unwrap(),
5915 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
5916 .unwrap(),
5917 false,
5918 None,
5919 )
5920 .unwrap(),
5921 cx
5922 )
5923 .await
5924 .unwrap(),
5925 HashMap::from_iter([
5926 (path!("dir/one.ts").to_string(), vec![14..18]),
5927 (path!("dir/two.ts").to_string(), vec![14..18]),
5928 ]),
5929 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5930 );
5931}
5932
5933#[gpui::test]
5934async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5935 init_test(cx);
5936
5937 let fs = FakeFs::new(cx.executor());
5938 fs.insert_tree(
5939 path!("/worktree-a"),
5940 json!({
5941 "haystack.rs": r#"// NEEDLE"#,
5942 "haystack.ts": r#"// NEEDLE"#,
5943 }),
5944 )
5945 .await;
5946 fs.insert_tree(
5947 path!("/worktree-b"),
5948 json!({
5949 "haystack.rs": r#"// NEEDLE"#,
5950 "haystack.ts": r#"// NEEDLE"#,
5951 }),
5952 )
5953 .await;
5954
5955 let path_style = PathStyle::local();
5956 let project = Project::test(
5957 fs.clone(),
5958 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5959 cx,
5960 )
5961 .await;
5962
5963 assert_eq!(
5964 search(
5965 &project,
5966 SearchQuery::text(
5967 "NEEDLE",
5968 false,
5969 true,
5970 false,
5971 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
5972 Default::default(),
5973 true,
5974 None,
5975 )
5976 .unwrap(),
5977 cx
5978 )
5979 .await
5980 .unwrap(),
5981 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5982 "should only return results from included worktree"
5983 );
5984 assert_eq!(
5985 search(
5986 &project,
5987 SearchQuery::text(
5988 "NEEDLE",
5989 false,
5990 true,
5991 false,
5992 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
5993 Default::default(),
5994 true,
5995 None,
5996 )
5997 .unwrap(),
5998 cx
5999 )
6000 .await
6001 .unwrap(),
6002 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6003 "should only return results from included worktree"
6004 );
6005
6006 assert_eq!(
6007 search(
6008 &project,
6009 SearchQuery::text(
6010 "NEEDLE",
6011 false,
6012 true,
6013 false,
6014 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6015 Default::default(),
6016 false,
6017 None,
6018 )
6019 .unwrap(),
6020 cx
6021 )
6022 .await
6023 .unwrap(),
6024 HashMap::from_iter([
6025 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6026 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6027 ]),
6028 "should return results from both worktrees"
6029 );
6030}
6031
6032#[gpui::test]
6033async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6034 init_test(cx);
6035
6036 let fs = FakeFs::new(cx.background_executor.clone());
6037 fs.insert_tree(
6038 path!("/dir"),
6039 json!({
6040 ".git": {},
6041 ".gitignore": "**/target\n/node_modules\n",
6042 "target": {
6043 "index.txt": "index_key:index_value"
6044 },
6045 "node_modules": {
6046 "eslint": {
6047 "index.ts": "const eslint_key = 'eslint value'",
6048 "package.json": r#"{ "some_key": "some value" }"#,
6049 },
6050 "prettier": {
6051 "index.ts": "const prettier_key = 'prettier value'",
6052 "package.json": r#"{ "other_key": "other value" }"#,
6053 },
6054 },
6055 "package.json": r#"{ "main_key": "main value" }"#,
6056 }),
6057 )
6058 .await;
6059 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6060
6061 let query = "key";
6062 assert_eq!(
6063 search(
6064 &project,
6065 SearchQuery::text(
6066 query,
6067 false,
6068 false,
6069 false,
6070 Default::default(),
6071 Default::default(),
6072 false,
6073 None,
6074 )
6075 .unwrap(),
6076 cx
6077 )
6078 .await
6079 .unwrap(),
6080 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6081 "Only one non-ignored file should have the query"
6082 );
6083
6084 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6085 let path_style = PathStyle::local();
6086 assert_eq!(
6087 search(
6088 &project,
6089 SearchQuery::text(
6090 query,
6091 false,
6092 false,
6093 true,
6094 Default::default(),
6095 Default::default(),
6096 false,
6097 None,
6098 )
6099 .unwrap(),
6100 cx
6101 )
6102 .await
6103 .unwrap(),
6104 HashMap::from_iter([
6105 (path!("dir/package.json").to_string(), vec![8..11]),
6106 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6107 (
6108 path!("dir/node_modules/prettier/package.json").to_string(),
6109 vec![9..12]
6110 ),
6111 (
6112 path!("dir/node_modules/prettier/index.ts").to_string(),
6113 vec![15..18]
6114 ),
6115 (
6116 path!("dir/node_modules/eslint/index.ts").to_string(),
6117 vec![13..16]
6118 ),
6119 (
6120 path!("dir/node_modules/eslint/package.json").to_string(),
6121 vec![8..11]
6122 ),
6123 ]),
6124 "Unrestricted search with ignored directories should find every file with the query"
6125 );
6126
6127 let files_to_include =
6128 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6129 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6130 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6131 assert_eq!(
6132 search(
6133 &project,
6134 SearchQuery::text(
6135 query,
6136 false,
6137 false,
6138 true,
6139 files_to_include,
6140 files_to_exclude,
6141 false,
6142 None,
6143 )
6144 .unwrap(),
6145 cx
6146 )
6147 .await
6148 .unwrap(),
6149 HashMap::from_iter([(
6150 path!("dir/node_modules/prettier/package.json").to_string(),
6151 vec![9..12]
6152 )]),
6153 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6154 );
6155}
6156
6157#[gpui::test]
6158async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6159 init_test(cx);
6160
6161 let fs = FakeFs::new(cx.executor());
6162 fs.insert_tree(
6163 path!("/dir"),
6164 json!({
6165 "one.rs": "// ПРИВЕТ? привет!",
6166 "two.rs": "// ПРИВЕТ.",
6167 "three.rs": "// привет",
6168 }),
6169 )
6170 .await;
6171 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6172 let unicode_case_sensitive_query = SearchQuery::text(
6173 "привет",
6174 false,
6175 true,
6176 false,
6177 Default::default(),
6178 Default::default(),
6179 false,
6180 None,
6181 );
6182 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6183 assert_eq!(
6184 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6185 .await
6186 .unwrap(),
6187 HashMap::from_iter([
6188 (path!("dir/one.rs").to_string(), vec![17..29]),
6189 (path!("dir/three.rs").to_string(), vec![3..15]),
6190 ])
6191 );
6192
6193 let unicode_case_insensitive_query = SearchQuery::text(
6194 "привет",
6195 false,
6196 false,
6197 false,
6198 Default::default(),
6199 Default::default(),
6200 false,
6201 None,
6202 );
6203 assert_matches!(
6204 unicode_case_insensitive_query,
6205 Ok(SearchQuery::Regex { .. })
6206 );
6207 assert_eq!(
6208 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6209 .await
6210 .unwrap(),
6211 HashMap::from_iter([
6212 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6213 (path!("dir/two.rs").to_string(), vec![3..15]),
6214 (path!("dir/three.rs").to_string(), vec![3..15]),
6215 ])
6216 );
6217
6218 assert_eq!(
6219 search(
6220 &project,
6221 SearchQuery::text(
6222 "привет.",
6223 false,
6224 false,
6225 false,
6226 Default::default(),
6227 Default::default(),
6228 false,
6229 None,
6230 )
6231 .unwrap(),
6232 cx
6233 )
6234 .await
6235 .unwrap(),
6236 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6237 );
6238}
6239
6240#[gpui::test]
6241async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6242 init_test(cx);
6243
6244 let fs = FakeFs::new(cx.executor());
6245 fs.insert_tree(
6246 "/one/two",
6247 json!({
6248 "three": {
6249 "a.txt": "",
6250 "four": {}
6251 },
6252 "c.rs": ""
6253 }),
6254 )
6255 .await;
6256
6257 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6258 project
6259 .update(cx, |project, cx| {
6260 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6261 project.create_entry((id, rel_path("b..")), true, cx)
6262 })
6263 .await
6264 .unwrap()
6265 .into_included()
6266 .unwrap();
6267
6268 assert_eq!(
6269 fs.paths(true),
6270 vec![
6271 PathBuf::from(path!("/")),
6272 PathBuf::from(path!("/one")),
6273 PathBuf::from(path!("/one/two")),
6274 PathBuf::from(path!("/one/two/c.rs")),
6275 PathBuf::from(path!("/one/two/three")),
6276 PathBuf::from(path!("/one/two/three/a.txt")),
6277 PathBuf::from(path!("/one/two/three/b..")),
6278 PathBuf::from(path!("/one/two/three/four")),
6279 ]
6280 );
6281}
6282
6283#[gpui::test]
6284async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6285 init_test(cx);
6286
6287 let fs = FakeFs::new(cx.executor());
6288 fs.insert_tree(
6289 path!("/dir"),
6290 json!({
6291 "a.tsx": "a",
6292 }),
6293 )
6294 .await;
6295
6296 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6297
6298 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6299 language_registry.add(tsx_lang());
6300 let language_server_names = [
6301 "TypeScriptServer",
6302 "TailwindServer",
6303 "ESLintServer",
6304 "NoHoverCapabilitiesServer",
6305 ];
6306 let mut language_servers = [
6307 language_registry.register_fake_lsp(
6308 "tsx",
6309 FakeLspAdapter {
6310 name: language_server_names[0],
6311 capabilities: lsp::ServerCapabilities {
6312 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6313 ..lsp::ServerCapabilities::default()
6314 },
6315 ..FakeLspAdapter::default()
6316 },
6317 ),
6318 language_registry.register_fake_lsp(
6319 "tsx",
6320 FakeLspAdapter {
6321 name: language_server_names[1],
6322 capabilities: lsp::ServerCapabilities {
6323 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6324 ..lsp::ServerCapabilities::default()
6325 },
6326 ..FakeLspAdapter::default()
6327 },
6328 ),
6329 language_registry.register_fake_lsp(
6330 "tsx",
6331 FakeLspAdapter {
6332 name: language_server_names[2],
6333 capabilities: lsp::ServerCapabilities {
6334 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6335 ..lsp::ServerCapabilities::default()
6336 },
6337 ..FakeLspAdapter::default()
6338 },
6339 ),
6340 language_registry.register_fake_lsp(
6341 "tsx",
6342 FakeLspAdapter {
6343 name: language_server_names[3],
6344 capabilities: lsp::ServerCapabilities {
6345 hover_provider: None,
6346 ..lsp::ServerCapabilities::default()
6347 },
6348 ..FakeLspAdapter::default()
6349 },
6350 ),
6351 ];
6352
6353 let (buffer, _handle) = project
6354 .update(cx, |p, cx| {
6355 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6356 })
6357 .await
6358 .unwrap();
6359 cx.executor().run_until_parked();
6360
6361 let mut servers_with_hover_requests = HashMap::default();
6362 for i in 0..language_server_names.len() {
6363 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6364 panic!(
6365 "Failed to get language server #{i} with name {}",
6366 &language_server_names[i]
6367 )
6368 });
6369 let new_server_name = new_server.server.name();
6370 assert!(
6371 !servers_with_hover_requests.contains_key(&new_server_name),
6372 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6373 );
6374 match new_server_name.as_ref() {
6375 "TailwindServer" | "TypeScriptServer" => {
6376 servers_with_hover_requests.insert(
6377 new_server_name.clone(),
6378 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6379 move |_, _| {
6380 let name = new_server_name.clone();
6381 async move {
6382 Ok(Some(lsp::Hover {
6383 contents: lsp::HoverContents::Scalar(
6384 lsp::MarkedString::String(format!("{name} hover")),
6385 ),
6386 range: None,
6387 }))
6388 }
6389 },
6390 ),
6391 );
6392 }
6393 "ESLintServer" => {
6394 servers_with_hover_requests.insert(
6395 new_server_name,
6396 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6397 |_, _| async move { Ok(None) },
6398 ),
6399 );
6400 }
6401 "NoHoverCapabilitiesServer" => {
6402 let _never_handled = new_server
6403 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6404 panic!(
6405 "Should not call for hovers server with no corresponding capabilities"
6406 )
6407 });
6408 }
6409 unexpected => panic!("Unexpected server name: {unexpected}"),
6410 }
6411 }
6412
6413 let hover_task = project.update(cx, |project, cx| {
6414 project.hover(&buffer, Point::new(0, 0), cx)
6415 });
6416 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6417 |mut hover_request| async move {
6418 hover_request
6419 .next()
6420 .await
6421 .expect("All hover requests should have been triggered")
6422 },
6423 ))
6424 .await;
6425 assert_eq!(
6426 vec!["TailwindServer hover", "TypeScriptServer hover"],
6427 hover_task
6428 .await
6429 .into_iter()
6430 .flatten()
6431 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6432 .sorted()
6433 .collect::<Vec<_>>(),
6434 "Should receive hover responses from all related servers with hover capabilities"
6435 );
6436}
6437
6438#[gpui::test]
6439async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6440 init_test(cx);
6441
6442 let fs = FakeFs::new(cx.executor());
6443 fs.insert_tree(
6444 path!("/dir"),
6445 json!({
6446 "a.ts": "a",
6447 }),
6448 )
6449 .await;
6450
6451 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6452
6453 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6454 language_registry.add(typescript_lang());
6455 let mut fake_language_servers = language_registry.register_fake_lsp(
6456 "TypeScript",
6457 FakeLspAdapter {
6458 capabilities: lsp::ServerCapabilities {
6459 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6460 ..lsp::ServerCapabilities::default()
6461 },
6462 ..FakeLspAdapter::default()
6463 },
6464 );
6465
6466 let (buffer, _handle) = project
6467 .update(cx, |p, cx| {
6468 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6469 })
6470 .await
6471 .unwrap();
6472 cx.executor().run_until_parked();
6473
6474 let fake_server = fake_language_servers
6475 .next()
6476 .await
6477 .expect("failed to get the language server");
6478
6479 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6480 move |_, _| async move {
6481 Ok(Some(lsp::Hover {
6482 contents: lsp::HoverContents::Array(vec![
6483 lsp::MarkedString::String("".to_string()),
6484 lsp::MarkedString::String(" ".to_string()),
6485 lsp::MarkedString::String("\n\n\n".to_string()),
6486 ]),
6487 range: None,
6488 }))
6489 },
6490 );
6491
6492 let hover_task = project.update(cx, |project, cx| {
6493 project.hover(&buffer, Point::new(0, 0), cx)
6494 });
6495 let () = request_handled
6496 .next()
6497 .await
6498 .expect("All hover requests should have been triggered");
6499 assert_eq!(
6500 Vec::<String>::new(),
6501 hover_task
6502 .await
6503 .into_iter()
6504 .flatten()
6505 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6506 .sorted()
6507 .collect::<Vec<_>>(),
6508 "Empty hover parts should be ignored"
6509 );
6510}
6511
6512#[gpui::test]
6513async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6514 init_test(cx);
6515
6516 let fs = FakeFs::new(cx.executor());
6517 fs.insert_tree(
6518 path!("/dir"),
6519 json!({
6520 "a.ts": "a",
6521 }),
6522 )
6523 .await;
6524
6525 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6526
6527 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6528 language_registry.add(typescript_lang());
6529 let mut fake_language_servers = language_registry.register_fake_lsp(
6530 "TypeScript",
6531 FakeLspAdapter {
6532 capabilities: lsp::ServerCapabilities {
6533 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6534 ..lsp::ServerCapabilities::default()
6535 },
6536 ..FakeLspAdapter::default()
6537 },
6538 );
6539
6540 let (buffer, _handle) = project
6541 .update(cx, |p, cx| {
6542 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6543 })
6544 .await
6545 .unwrap();
6546 cx.executor().run_until_parked();
6547
6548 let fake_server = fake_language_servers
6549 .next()
6550 .await
6551 .expect("failed to get the language server");
6552
6553 let mut request_handled = fake_server
6554 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6555 Ok(Some(vec![
6556 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6557 title: "organize imports".to_string(),
6558 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6559 ..lsp::CodeAction::default()
6560 }),
6561 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6562 title: "fix code".to_string(),
6563 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6564 ..lsp::CodeAction::default()
6565 }),
6566 ]))
6567 });
6568
6569 let code_actions_task = project.update(cx, |project, cx| {
6570 project.code_actions(
6571 &buffer,
6572 0..buffer.read(cx).len(),
6573 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6574 cx,
6575 )
6576 });
6577
6578 let () = request_handled
6579 .next()
6580 .await
6581 .expect("The code action request should have been triggered");
6582
6583 let code_actions = code_actions_task.await.unwrap().unwrap();
6584 assert_eq!(code_actions.len(), 1);
6585 assert_eq!(
6586 code_actions[0].lsp_action.action_kind(),
6587 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6588 );
6589}
6590
6591#[gpui::test]
6592async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6593 init_test(cx);
6594
6595 let fs = FakeFs::new(cx.executor());
6596 fs.insert_tree(
6597 path!("/dir"),
6598 json!({
6599 "a.tsx": "a",
6600 }),
6601 )
6602 .await;
6603
6604 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6605
6606 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6607 language_registry.add(tsx_lang());
6608 let language_server_names = [
6609 "TypeScriptServer",
6610 "TailwindServer",
6611 "ESLintServer",
6612 "NoActionsCapabilitiesServer",
6613 ];
6614
6615 let mut language_server_rxs = [
6616 language_registry.register_fake_lsp(
6617 "tsx",
6618 FakeLspAdapter {
6619 name: language_server_names[0],
6620 capabilities: lsp::ServerCapabilities {
6621 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6622 ..lsp::ServerCapabilities::default()
6623 },
6624 ..FakeLspAdapter::default()
6625 },
6626 ),
6627 language_registry.register_fake_lsp(
6628 "tsx",
6629 FakeLspAdapter {
6630 name: language_server_names[1],
6631 capabilities: lsp::ServerCapabilities {
6632 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6633 ..lsp::ServerCapabilities::default()
6634 },
6635 ..FakeLspAdapter::default()
6636 },
6637 ),
6638 language_registry.register_fake_lsp(
6639 "tsx",
6640 FakeLspAdapter {
6641 name: language_server_names[2],
6642 capabilities: lsp::ServerCapabilities {
6643 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6644 ..lsp::ServerCapabilities::default()
6645 },
6646 ..FakeLspAdapter::default()
6647 },
6648 ),
6649 language_registry.register_fake_lsp(
6650 "tsx",
6651 FakeLspAdapter {
6652 name: language_server_names[3],
6653 capabilities: lsp::ServerCapabilities {
6654 code_action_provider: None,
6655 ..lsp::ServerCapabilities::default()
6656 },
6657 ..FakeLspAdapter::default()
6658 },
6659 ),
6660 ];
6661
6662 let (buffer, _handle) = project
6663 .update(cx, |p, cx| {
6664 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6665 })
6666 .await
6667 .unwrap();
6668 cx.executor().run_until_parked();
6669
6670 let mut servers_with_actions_requests = HashMap::default();
6671 for i in 0..language_server_names.len() {
6672 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6673 panic!(
6674 "Failed to get language server #{i} with name {}",
6675 &language_server_names[i]
6676 )
6677 });
6678 let new_server_name = new_server.server.name();
6679
6680 assert!(
6681 !servers_with_actions_requests.contains_key(&new_server_name),
6682 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6683 );
6684 match new_server_name.0.as_ref() {
6685 "TailwindServer" | "TypeScriptServer" => {
6686 servers_with_actions_requests.insert(
6687 new_server_name.clone(),
6688 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6689 move |_, _| {
6690 let name = new_server_name.clone();
6691 async move {
6692 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6693 lsp::CodeAction {
6694 title: format!("{name} code action"),
6695 ..lsp::CodeAction::default()
6696 },
6697 )]))
6698 }
6699 },
6700 ),
6701 );
6702 }
6703 "ESLintServer" => {
6704 servers_with_actions_requests.insert(
6705 new_server_name,
6706 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6707 |_, _| async move { Ok(None) },
6708 ),
6709 );
6710 }
6711 "NoActionsCapabilitiesServer" => {
6712 let _never_handled = new_server
6713 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6714 panic!(
6715 "Should not call for code actions server with no corresponding capabilities"
6716 )
6717 });
6718 }
6719 unexpected => panic!("Unexpected server name: {unexpected}"),
6720 }
6721 }
6722
6723 let code_actions_task = project.update(cx, |project, cx| {
6724 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6725 });
6726
6727 // cx.run_until_parked();
6728 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6729 |mut code_actions_request| async move {
6730 code_actions_request
6731 .next()
6732 .await
6733 .expect("All code actions requests should have been triggered")
6734 },
6735 ))
6736 .await;
6737 assert_eq!(
6738 vec!["TailwindServer code action", "TypeScriptServer code action"],
6739 code_actions_task
6740 .await
6741 .unwrap()
6742 .unwrap()
6743 .into_iter()
6744 .map(|code_action| code_action.lsp_action.title().to_owned())
6745 .sorted()
6746 .collect::<Vec<_>>(),
6747 "Should receive code actions responses from all related servers with hover capabilities"
6748 );
6749}
6750
6751#[gpui::test]
6752async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6753 init_test(cx);
6754
6755 let fs = FakeFs::new(cx.executor());
6756 fs.insert_tree(
6757 "/dir",
6758 json!({
6759 "a.rs": "let a = 1;",
6760 "b.rs": "let b = 2;",
6761 "c.rs": "let c = 2;",
6762 }),
6763 )
6764 .await;
6765
6766 let project = Project::test(
6767 fs,
6768 [
6769 "/dir/a.rs".as_ref(),
6770 "/dir/b.rs".as_ref(),
6771 "/dir/c.rs".as_ref(),
6772 ],
6773 cx,
6774 )
6775 .await;
6776
6777 // check the initial state and get the worktrees
6778 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6779 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6780 assert_eq!(worktrees.len(), 3);
6781
6782 let worktree_a = worktrees[0].read(cx);
6783 let worktree_b = worktrees[1].read(cx);
6784 let worktree_c = worktrees[2].read(cx);
6785
6786 // check they start in the right order
6787 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6788 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6789 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6790
6791 (
6792 worktrees[0].clone(),
6793 worktrees[1].clone(),
6794 worktrees[2].clone(),
6795 )
6796 });
6797
6798 // move first worktree to after the second
6799 // [a, b, c] -> [b, a, c]
6800 project
6801 .update(cx, |project, cx| {
6802 let first = worktree_a.read(cx);
6803 let second = worktree_b.read(cx);
6804 project.move_worktree(first.id(), second.id(), cx)
6805 })
6806 .expect("moving first after second");
6807
6808 // check the state after moving
6809 project.update(cx, |project, cx| {
6810 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6811 assert_eq!(worktrees.len(), 3);
6812
6813 let first = worktrees[0].read(cx);
6814 let second = worktrees[1].read(cx);
6815 let third = worktrees[2].read(cx);
6816
6817 // check they are now in the right order
6818 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6819 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6820 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6821 });
6822
6823 // move the second worktree to before the first
6824 // [b, a, c] -> [a, b, c]
6825 project
6826 .update(cx, |project, cx| {
6827 let second = worktree_a.read(cx);
6828 let first = worktree_b.read(cx);
6829 project.move_worktree(first.id(), second.id(), cx)
6830 })
6831 .expect("moving second before first");
6832
6833 // check the state after moving
6834 project.update(cx, |project, cx| {
6835 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6836 assert_eq!(worktrees.len(), 3);
6837
6838 let first = worktrees[0].read(cx);
6839 let second = worktrees[1].read(cx);
6840 let third = worktrees[2].read(cx);
6841
6842 // check they are now in the right order
6843 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6844 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6845 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6846 });
6847
6848 // move the second worktree to after the third
6849 // [a, b, c] -> [a, c, b]
6850 project
6851 .update(cx, |project, cx| {
6852 let second = worktree_b.read(cx);
6853 let third = worktree_c.read(cx);
6854 project.move_worktree(second.id(), third.id(), cx)
6855 })
6856 .expect("moving second after third");
6857
6858 // check the state after moving
6859 project.update(cx, |project, cx| {
6860 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6861 assert_eq!(worktrees.len(), 3);
6862
6863 let first = worktrees[0].read(cx);
6864 let second = worktrees[1].read(cx);
6865 let third = worktrees[2].read(cx);
6866
6867 // check they are now in the right order
6868 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6869 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6870 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6871 });
6872
6873 // move the third worktree to before the second
6874 // [a, c, b] -> [a, b, c]
6875 project
6876 .update(cx, |project, cx| {
6877 let third = worktree_c.read(cx);
6878 let second = worktree_b.read(cx);
6879 project.move_worktree(third.id(), second.id(), cx)
6880 })
6881 .expect("moving third before second");
6882
6883 // check the state after moving
6884 project.update(cx, |project, cx| {
6885 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6886 assert_eq!(worktrees.len(), 3);
6887
6888 let first = worktrees[0].read(cx);
6889 let second = worktrees[1].read(cx);
6890 let third = worktrees[2].read(cx);
6891
6892 // check they are now in the right order
6893 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6894 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6895 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6896 });
6897
6898 // move the first worktree to after the third
6899 // [a, b, c] -> [b, c, a]
6900 project
6901 .update(cx, |project, cx| {
6902 let first = worktree_a.read(cx);
6903 let third = worktree_c.read(cx);
6904 project.move_worktree(first.id(), third.id(), cx)
6905 })
6906 .expect("moving first after third");
6907
6908 // check the state after moving
6909 project.update(cx, |project, cx| {
6910 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6911 assert_eq!(worktrees.len(), 3);
6912
6913 let first = worktrees[0].read(cx);
6914 let second = worktrees[1].read(cx);
6915 let third = worktrees[2].read(cx);
6916
6917 // check they are now in the right order
6918 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6919 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6920 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6921 });
6922
6923 // move the third worktree to before the first
6924 // [b, c, a] -> [a, b, c]
6925 project
6926 .update(cx, |project, cx| {
6927 let third = worktree_a.read(cx);
6928 let first = worktree_b.read(cx);
6929 project.move_worktree(third.id(), first.id(), cx)
6930 })
6931 .expect("moving third before first");
6932
6933 // check the state after moving
6934 project.update(cx, |project, cx| {
6935 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6936 assert_eq!(worktrees.len(), 3);
6937
6938 let first = worktrees[0].read(cx);
6939 let second = worktrees[1].read(cx);
6940 let third = worktrees[2].read(cx);
6941
6942 // check they are now in the right order
6943 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6944 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6945 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6946 });
6947}
6948
6949#[gpui::test]
6950async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6951 init_test(cx);
6952
6953 let staged_contents = r#"
6954 fn main() {
6955 println!("hello world");
6956 }
6957 "#
6958 .unindent();
6959 let file_contents = r#"
6960 // print goodbye
6961 fn main() {
6962 println!("goodbye world");
6963 }
6964 "#
6965 .unindent();
6966
6967 let fs = FakeFs::new(cx.background_executor.clone());
6968 fs.insert_tree(
6969 "/dir",
6970 json!({
6971 ".git": {},
6972 "src": {
6973 "main.rs": file_contents,
6974 }
6975 }),
6976 )
6977 .await;
6978
6979 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
6980
6981 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6982
6983 let buffer = project
6984 .update(cx, |project, cx| {
6985 project.open_local_buffer("/dir/src/main.rs", cx)
6986 })
6987 .await
6988 .unwrap();
6989 let unstaged_diff = project
6990 .update(cx, |project, cx| {
6991 project.open_unstaged_diff(buffer.clone(), cx)
6992 })
6993 .await
6994 .unwrap();
6995
6996 cx.run_until_parked();
6997 unstaged_diff.update(cx, |unstaged_diff, cx| {
6998 let snapshot = buffer.read(cx).snapshot();
6999 assert_hunks(
7000 unstaged_diff.hunks(&snapshot, cx),
7001 &snapshot,
7002 &unstaged_diff.base_text_string().unwrap(),
7003 &[
7004 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7005 (
7006 2..3,
7007 " println!(\"hello world\");\n",
7008 " println!(\"goodbye world\");\n",
7009 DiffHunkStatus::modified_none(),
7010 ),
7011 ],
7012 );
7013 });
7014
7015 let staged_contents = r#"
7016 // print goodbye
7017 fn main() {
7018 }
7019 "#
7020 .unindent();
7021
7022 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7023
7024 cx.run_until_parked();
7025 unstaged_diff.update(cx, |unstaged_diff, cx| {
7026 let snapshot = buffer.read(cx).snapshot();
7027 assert_hunks(
7028 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7029 &snapshot,
7030 &unstaged_diff.base_text().text(),
7031 &[(
7032 2..3,
7033 "",
7034 " println!(\"goodbye world\");\n",
7035 DiffHunkStatus::added_none(),
7036 )],
7037 );
7038 });
7039}
7040
7041#[gpui::test]
7042async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7043 init_test(cx);
7044
7045 let committed_contents = r#"
7046 fn main() {
7047 println!("hello world");
7048 }
7049 "#
7050 .unindent();
7051 let staged_contents = r#"
7052 fn main() {
7053 println!("goodbye world");
7054 }
7055 "#
7056 .unindent();
7057 let file_contents = r#"
7058 // print goodbye
7059 fn main() {
7060 println!("goodbye world");
7061 }
7062 "#
7063 .unindent();
7064
7065 let fs = FakeFs::new(cx.background_executor.clone());
7066 fs.insert_tree(
7067 "/dir",
7068 json!({
7069 ".git": {},
7070 "src": {
7071 "modification.rs": file_contents,
7072 }
7073 }),
7074 )
7075 .await;
7076
7077 fs.set_head_for_repo(
7078 Path::new("/dir/.git"),
7079 &[
7080 ("src/modification.rs", committed_contents),
7081 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7082 ],
7083 "deadbeef",
7084 );
7085 fs.set_index_for_repo(
7086 Path::new("/dir/.git"),
7087 &[
7088 ("src/modification.rs", staged_contents),
7089 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7090 ],
7091 );
7092
7093 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7094 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7095 let language = rust_lang();
7096 language_registry.add(language.clone());
7097
7098 let buffer_1 = project
7099 .update(cx, |project, cx| {
7100 project.open_local_buffer("/dir/src/modification.rs", cx)
7101 })
7102 .await
7103 .unwrap();
7104 let diff_1 = project
7105 .update(cx, |project, cx| {
7106 project.open_uncommitted_diff(buffer_1.clone(), cx)
7107 })
7108 .await
7109 .unwrap();
7110 diff_1.read_with(cx, |diff, _| {
7111 assert_eq!(diff.base_text().language().cloned(), Some(language))
7112 });
7113 cx.run_until_parked();
7114 diff_1.update(cx, |diff, cx| {
7115 let snapshot = buffer_1.read(cx).snapshot();
7116 assert_hunks(
7117 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7118 &snapshot,
7119 &diff.base_text_string().unwrap(),
7120 &[
7121 (
7122 0..1,
7123 "",
7124 "// print goodbye\n",
7125 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7126 ),
7127 (
7128 2..3,
7129 " println!(\"hello world\");\n",
7130 " println!(\"goodbye world\");\n",
7131 DiffHunkStatus::modified_none(),
7132 ),
7133 ],
7134 );
7135 });
7136
7137 // Reset HEAD to a version that differs from both the buffer and the index.
7138 let committed_contents = r#"
7139 // print goodbye
7140 fn main() {
7141 }
7142 "#
7143 .unindent();
7144 fs.set_head_for_repo(
7145 Path::new("/dir/.git"),
7146 &[
7147 ("src/modification.rs", committed_contents.clone()),
7148 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7149 ],
7150 "deadbeef",
7151 );
7152
7153 // Buffer now has an unstaged hunk.
7154 cx.run_until_parked();
7155 diff_1.update(cx, |diff, cx| {
7156 let snapshot = buffer_1.read(cx).snapshot();
7157 assert_hunks(
7158 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7159 &snapshot,
7160 &diff.base_text().text(),
7161 &[(
7162 2..3,
7163 "",
7164 " println!(\"goodbye world\");\n",
7165 DiffHunkStatus::added_none(),
7166 )],
7167 );
7168 });
7169
7170 // Open a buffer for a file that's been deleted.
7171 let buffer_2 = project
7172 .update(cx, |project, cx| {
7173 project.open_local_buffer("/dir/src/deletion.rs", cx)
7174 })
7175 .await
7176 .unwrap();
7177 let diff_2 = project
7178 .update(cx, |project, cx| {
7179 project.open_uncommitted_diff(buffer_2.clone(), cx)
7180 })
7181 .await
7182 .unwrap();
7183 cx.run_until_parked();
7184 diff_2.update(cx, |diff, cx| {
7185 let snapshot = buffer_2.read(cx).snapshot();
7186 assert_hunks(
7187 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7188 &snapshot,
7189 &diff.base_text_string().unwrap(),
7190 &[(
7191 0..0,
7192 "// the-deleted-contents\n",
7193 "",
7194 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7195 )],
7196 );
7197 });
7198
7199 // Stage the deletion of this file
7200 fs.set_index_for_repo(
7201 Path::new("/dir/.git"),
7202 &[("src/modification.rs", committed_contents.clone())],
7203 );
7204 cx.run_until_parked();
7205 diff_2.update(cx, |diff, cx| {
7206 let snapshot = buffer_2.read(cx).snapshot();
7207 assert_hunks(
7208 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7209 &snapshot,
7210 &diff.base_text_string().unwrap(),
7211 &[(
7212 0..0,
7213 "// the-deleted-contents\n",
7214 "",
7215 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7216 )],
7217 );
7218 });
7219}
7220
7221#[gpui::test]
7222async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7223 use DiffHunkSecondaryStatus::*;
7224 init_test(cx);
7225
7226 let committed_contents = r#"
7227 zero
7228 one
7229 two
7230 three
7231 four
7232 five
7233 "#
7234 .unindent();
7235 let file_contents = r#"
7236 one
7237 TWO
7238 three
7239 FOUR
7240 five
7241 "#
7242 .unindent();
7243
7244 let fs = FakeFs::new(cx.background_executor.clone());
7245 fs.insert_tree(
7246 "/dir",
7247 json!({
7248 ".git": {},
7249 "file.txt": file_contents.clone()
7250 }),
7251 )
7252 .await;
7253
7254 fs.set_head_and_index_for_repo(
7255 path!("/dir/.git").as_ref(),
7256 &[("file.txt", committed_contents.clone())],
7257 );
7258
7259 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7260
7261 let buffer = project
7262 .update(cx, |project, cx| {
7263 project.open_local_buffer("/dir/file.txt", cx)
7264 })
7265 .await
7266 .unwrap();
7267 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7268 let uncommitted_diff = project
7269 .update(cx, |project, cx| {
7270 project.open_uncommitted_diff(buffer.clone(), cx)
7271 })
7272 .await
7273 .unwrap();
7274 let mut diff_events = cx.events(&uncommitted_diff);
7275
7276 // The hunks are initially unstaged.
7277 uncommitted_diff.read_with(cx, |diff, cx| {
7278 assert_hunks(
7279 diff.hunks(&snapshot, cx),
7280 &snapshot,
7281 &diff.base_text_string().unwrap(),
7282 &[
7283 (
7284 0..0,
7285 "zero\n",
7286 "",
7287 DiffHunkStatus::deleted(HasSecondaryHunk),
7288 ),
7289 (
7290 1..2,
7291 "two\n",
7292 "TWO\n",
7293 DiffHunkStatus::modified(HasSecondaryHunk),
7294 ),
7295 (
7296 3..4,
7297 "four\n",
7298 "FOUR\n",
7299 DiffHunkStatus::modified(HasSecondaryHunk),
7300 ),
7301 ],
7302 );
7303 });
7304
7305 // Stage a hunk. It appears as optimistically staged.
7306 uncommitted_diff.update(cx, |diff, cx| {
7307 let range =
7308 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7309 let hunks = diff
7310 .hunks_intersecting_range(range, &snapshot, cx)
7311 .collect::<Vec<_>>();
7312 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7313
7314 assert_hunks(
7315 diff.hunks(&snapshot, cx),
7316 &snapshot,
7317 &diff.base_text_string().unwrap(),
7318 &[
7319 (
7320 0..0,
7321 "zero\n",
7322 "",
7323 DiffHunkStatus::deleted(HasSecondaryHunk),
7324 ),
7325 (
7326 1..2,
7327 "two\n",
7328 "TWO\n",
7329 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7330 ),
7331 (
7332 3..4,
7333 "four\n",
7334 "FOUR\n",
7335 DiffHunkStatus::modified(HasSecondaryHunk),
7336 ),
7337 ],
7338 );
7339 });
7340
7341 // The diff emits a change event for the range of the staged hunk.
7342 assert!(matches!(
7343 diff_events.next().await.unwrap(),
7344 BufferDiffEvent::HunksStagedOrUnstaged(_)
7345 ));
7346 let event = diff_events.next().await.unwrap();
7347 if let BufferDiffEvent::DiffChanged {
7348 changed_range: Some(changed_range),
7349 } = event
7350 {
7351 let changed_range = changed_range.to_point(&snapshot);
7352 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7353 } else {
7354 panic!("Unexpected event {event:?}");
7355 }
7356
7357 // When the write to the index completes, it appears as staged.
7358 cx.run_until_parked();
7359 uncommitted_diff.update(cx, |diff, cx| {
7360 assert_hunks(
7361 diff.hunks(&snapshot, cx),
7362 &snapshot,
7363 &diff.base_text_string().unwrap(),
7364 &[
7365 (
7366 0..0,
7367 "zero\n",
7368 "",
7369 DiffHunkStatus::deleted(HasSecondaryHunk),
7370 ),
7371 (
7372 1..2,
7373 "two\n",
7374 "TWO\n",
7375 DiffHunkStatus::modified(NoSecondaryHunk),
7376 ),
7377 (
7378 3..4,
7379 "four\n",
7380 "FOUR\n",
7381 DiffHunkStatus::modified(HasSecondaryHunk),
7382 ),
7383 ],
7384 );
7385 });
7386
7387 // The diff emits a change event for the changed index text.
7388 let event = diff_events.next().await.unwrap();
7389 if let BufferDiffEvent::DiffChanged {
7390 changed_range: Some(changed_range),
7391 } = event
7392 {
7393 let changed_range = changed_range.to_point(&snapshot);
7394 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7395 } else {
7396 panic!("Unexpected event {event:?}");
7397 }
7398
7399 // Simulate a problem writing to the git index.
7400 fs.set_error_message_for_index_write(
7401 "/dir/.git".as_ref(),
7402 Some("failed to write git index".into()),
7403 );
7404
7405 // Stage another hunk.
7406 uncommitted_diff.update(cx, |diff, cx| {
7407 let range =
7408 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7409 let hunks = diff
7410 .hunks_intersecting_range(range, &snapshot, cx)
7411 .collect::<Vec<_>>();
7412 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7413
7414 assert_hunks(
7415 diff.hunks(&snapshot, cx),
7416 &snapshot,
7417 &diff.base_text_string().unwrap(),
7418 &[
7419 (
7420 0..0,
7421 "zero\n",
7422 "",
7423 DiffHunkStatus::deleted(HasSecondaryHunk),
7424 ),
7425 (
7426 1..2,
7427 "two\n",
7428 "TWO\n",
7429 DiffHunkStatus::modified(NoSecondaryHunk),
7430 ),
7431 (
7432 3..4,
7433 "four\n",
7434 "FOUR\n",
7435 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7436 ),
7437 ],
7438 );
7439 });
7440 assert!(matches!(
7441 diff_events.next().await.unwrap(),
7442 BufferDiffEvent::HunksStagedOrUnstaged(_)
7443 ));
7444 let event = diff_events.next().await.unwrap();
7445 if let BufferDiffEvent::DiffChanged {
7446 changed_range: Some(changed_range),
7447 } = event
7448 {
7449 let changed_range = changed_range.to_point(&snapshot);
7450 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7451 } else {
7452 panic!("Unexpected event {event:?}");
7453 }
7454
7455 // When the write fails, the hunk returns to being unstaged.
7456 cx.run_until_parked();
7457 uncommitted_diff.update(cx, |diff, cx| {
7458 assert_hunks(
7459 diff.hunks(&snapshot, cx),
7460 &snapshot,
7461 &diff.base_text_string().unwrap(),
7462 &[
7463 (
7464 0..0,
7465 "zero\n",
7466 "",
7467 DiffHunkStatus::deleted(HasSecondaryHunk),
7468 ),
7469 (
7470 1..2,
7471 "two\n",
7472 "TWO\n",
7473 DiffHunkStatus::modified(NoSecondaryHunk),
7474 ),
7475 (
7476 3..4,
7477 "four\n",
7478 "FOUR\n",
7479 DiffHunkStatus::modified(HasSecondaryHunk),
7480 ),
7481 ],
7482 );
7483 });
7484
7485 let event = diff_events.next().await.unwrap();
7486 if let BufferDiffEvent::DiffChanged {
7487 changed_range: Some(changed_range),
7488 } = event
7489 {
7490 let changed_range = changed_range.to_point(&snapshot);
7491 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7492 } else {
7493 panic!("Unexpected event {event:?}");
7494 }
7495
7496 // Allow writing to the git index to succeed again.
7497 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7498
7499 // Stage two hunks with separate operations.
7500 uncommitted_diff.update(cx, |diff, cx| {
7501 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7502 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7503 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7504 });
7505
7506 // Both staged hunks appear as pending.
7507 uncommitted_diff.update(cx, |diff, cx| {
7508 assert_hunks(
7509 diff.hunks(&snapshot, cx),
7510 &snapshot,
7511 &diff.base_text_string().unwrap(),
7512 &[
7513 (
7514 0..0,
7515 "zero\n",
7516 "",
7517 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7518 ),
7519 (
7520 1..2,
7521 "two\n",
7522 "TWO\n",
7523 DiffHunkStatus::modified(NoSecondaryHunk),
7524 ),
7525 (
7526 3..4,
7527 "four\n",
7528 "FOUR\n",
7529 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7530 ),
7531 ],
7532 );
7533 });
7534
7535 // Both staging operations take effect.
7536 cx.run_until_parked();
7537 uncommitted_diff.update(cx, |diff, cx| {
7538 assert_hunks(
7539 diff.hunks(&snapshot, cx),
7540 &snapshot,
7541 &diff.base_text_string().unwrap(),
7542 &[
7543 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7544 (
7545 1..2,
7546 "two\n",
7547 "TWO\n",
7548 DiffHunkStatus::modified(NoSecondaryHunk),
7549 ),
7550 (
7551 3..4,
7552 "four\n",
7553 "FOUR\n",
7554 DiffHunkStatus::modified(NoSecondaryHunk),
7555 ),
7556 ],
7557 );
7558 });
7559}
7560
7561#[gpui::test(seeds(340, 472))]
7562async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7563 use DiffHunkSecondaryStatus::*;
7564 init_test(cx);
7565
7566 let committed_contents = r#"
7567 zero
7568 one
7569 two
7570 three
7571 four
7572 five
7573 "#
7574 .unindent();
7575 let file_contents = r#"
7576 one
7577 TWO
7578 three
7579 FOUR
7580 five
7581 "#
7582 .unindent();
7583
7584 let fs = FakeFs::new(cx.background_executor.clone());
7585 fs.insert_tree(
7586 "/dir",
7587 json!({
7588 ".git": {},
7589 "file.txt": file_contents.clone()
7590 }),
7591 )
7592 .await;
7593
7594 fs.set_head_for_repo(
7595 "/dir/.git".as_ref(),
7596 &[("file.txt", committed_contents.clone())],
7597 "deadbeef",
7598 );
7599 fs.set_index_for_repo(
7600 "/dir/.git".as_ref(),
7601 &[("file.txt", committed_contents.clone())],
7602 );
7603
7604 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7605
7606 let buffer = project
7607 .update(cx, |project, cx| {
7608 project.open_local_buffer("/dir/file.txt", cx)
7609 })
7610 .await
7611 .unwrap();
7612 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7613 let uncommitted_diff = project
7614 .update(cx, |project, cx| {
7615 project.open_uncommitted_diff(buffer.clone(), cx)
7616 })
7617 .await
7618 .unwrap();
7619
7620 // The hunks are initially unstaged.
7621 uncommitted_diff.read_with(cx, |diff, cx| {
7622 assert_hunks(
7623 diff.hunks(&snapshot, cx),
7624 &snapshot,
7625 &diff.base_text_string().unwrap(),
7626 &[
7627 (
7628 0..0,
7629 "zero\n",
7630 "",
7631 DiffHunkStatus::deleted(HasSecondaryHunk),
7632 ),
7633 (
7634 1..2,
7635 "two\n",
7636 "TWO\n",
7637 DiffHunkStatus::modified(HasSecondaryHunk),
7638 ),
7639 (
7640 3..4,
7641 "four\n",
7642 "FOUR\n",
7643 DiffHunkStatus::modified(HasSecondaryHunk),
7644 ),
7645 ],
7646 );
7647 });
7648
7649 // Pause IO events
7650 fs.pause_events();
7651
7652 // Stage the first hunk.
7653 uncommitted_diff.update(cx, |diff, cx| {
7654 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7655 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7656 assert_hunks(
7657 diff.hunks(&snapshot, cx),
7658 &snapshot,
7659 &diff.base_text_string().unwrap(),
7660 &[
7661 (
7662 0..0,
7663 "zero\n",
7664 "",
7665 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7666 ),
7667 (
7668 1..2,
7669 "two\n",
7670 "TWO\n",
7671 DiffHunkStatus::modified(HasSecondaryHunk),
7672 ),
7673 (
7674 3..4,
7675 "four\n",
7676 "FOUR\n",
7677 DiffHunkStatus::modified(HasSecondaryHunk),
7678 ),
7679 ],
7680 );
7681 });
7682
7683 // Stage the second hunk *before* receiving the FS event for the first hunk.
7684 cx.run_until_parked();
7685 uncommitted_diff.update(cx, |diff, cx| {
7686 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7687 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7688 assert_hunks(
7689 diff.hunks(&snapshot, cx),
7690 &snapshot,
7691 &diff.base_text_string().unwrap(),
7692 &[
7693 (
7694 0..0,
7695 "zero\n",
7696 "",
7697 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7698 ),
7699 (
7700 1..2,
7701 "two\n",
7702 "TWO\n",
7703 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7704 ),
7705 (
7706 3..4,
7707 "four\n",
7708 "FOUR\n",
7709 DiffHunkStatus::modified(HasSecondaryHunk),
7710 ),
7711 ],
7712 );
7713 });
7714
7715 // Process the FS event for staging the first hunk (second event is still pending).
7716 fs.flush_events(1);
7717 cx.run_until_parked();
7718
7719 // Stage the third hunk before receiving the second FS event.
7720 uncommitted_diff.update(cx, |diff, cx| {
7721 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7722 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7723 });
7724
7725 // Wait for all remaining IO.
7726 cx.run_until_parked();
7727 fs.flush_events(fs.buffered_event_count());
7728
7729 // Now all hunks are staged.
7730 cx.run_until_parked();
7731 uncommitted_diff.update(cx, |diff, cx| {
7732 assert_hunks(
7733 diff.hunks(&snapshot, cx),
7734 &snapshot,
7735 &diff.base_text_string().unwrap(),
7736 &[
7737 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7738 (
7739 1..2,
7740 "two\n",
7741 "TWO\n",
7742 DiffHunkStatus::modified(NoSecondaryHunk),
7743 ),
7744 (
7745 3..4,
7746 "four\n",
7747 "FOUR\n",
7748 DiffHunkStatus::modified(NoSecondaryHunk),
7749 ),
7750 ],
7751 );
7752 });
7753}
7754
7755#[gpui::test(iterations = 25)]
7756async fn test_staging_random_hunks(
7757 mut rng: StdRng,
7758 executor: BackgroundExecutor,
7759 cx: &mut gpui::TestAppContext,
7760) {
7761 let operations = env::var("OPERATIONS")
7762 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7763 .unwrap_or(20);
7764
7765 // Try to induce races between diff recalculation and index writes.
7766 if rng.random_bool(0.5) {
7767 executor.deprioritize(*CALCULATE_DIFF_TASK);
7768 }
7769
7770 use DiffHunkSecondaryStatus::*;
7771 init_test(cx);
7772
7773 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7774 let index_text = committed_text.clone();
7775 let buffer_text = (0..30)
7776 .map(|i| match i % 5 {
7777 0 => format!("line {i} (modified)\n"),
7778 _ => format!("line {i}\n"),
7779 })
7780 .collect::<String>();
7781
7782 let fs = FakeFs::new(cx.background_executor.clone());
7783 fs.insert_tree(
7784 path!("/dir"),
7785 json!({
7786 ".git": {},
7787 "file.txt": buffer_text.clone()
7788 }),
7789 )
7790 .await;
7791 fs.set_head_for_repo(
7792 path!("/dir/.git").as_ref(),
7793 &[("file.txt", committed_text.clone())],
7794 "deadbeef",
7795 );
7796 fs.set_index_for_repo(
7797 path!("/dir/.git").as_ref(),
7798 &[("file.txt", index_text.clone())],
7799 );
7800 let repo = fs
7801 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7802 .unwrap();
7803
7804 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7805 let buffer = project
7806 .update(cx, |project, cx| {
7807 project.open_local_buffer(path!("/dir/file.txt"), cx)
7808 })
7809 .await
7810 .unwrap();
7811 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7812 let uncommitted_diff = project
7813 .update(cx, |project, cx| {
7814 project.open_uncommitted_diff(buffer.clone(), cx)
7815 })
7816 .await
7817 .unwrap();
7818
7819 let mut hunks =
7820 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7821 assert_eq!(hunks.len(), 6);
7822
7823 for _i in 0..operations {
7824 let hunk_ix = rng.random_range(0..hunks.len());
7825 let hunk = &mut hunks[hunk_ix];
7826 let row = hunk.range.start.row;
7827
7828 if hunk.status().has_secondary_hunk() {
7829 log::info!("staging hunk at {row}");
7830 uncommitted_diff.update(cx, |diff, cx| {
7831 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7832 });
7833 hunk.secondary_status = SecondaryHunkRemovalPending;
7834 } else {
7835 log::info!("unstaging hunk at {row}");
7836 uncommitted_diff.update(cx, |diff, cx| {
7837 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7838 });
7839 hunk.secondary_status = SecondaryHunkAdditionPending;
7840 }
7841
7842 for _ in 0..rng.random_range(0..10) {
7843 log::info!("yielding");
7844 cx.executor().simulate_random_delay().await;
7845 }
7846 }
7847
7848 cx.executor().run_until_parked();
7849
7850 for hunk in &mut hunks {
7851 if hunk.secondary_status == SecondaryHunkRemovalPending {
7852 hunk.secondary_status = NoSecondaryHunk;
7853 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7854 hunk.secondary_status = HasSecondaryHunk;
7855 }
7856 }
7857
7858 log::info!(
7859 "index text:\n{}",
7860 repo.load_index_text(rel_path("file.txt").into())
7861 .await
7862 .unwrap()
7863 );
7864
7865 uncommitted_diff.update(cx, |diff, cx| {
7866 let expected_hunks = hunks
7867 .iter()
7868 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7869 .collect::<Vec<_>>();
7870 let actual_hunks = diff
7871 .hunks(&snapshot, cx)
7872 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7873 .collect::<Vec<_>>();
7874 assert_eq!(actual_hunks, expected_hunks);
7875 });
7876}
7877
7878#[gpui::test]
7879async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7880 init_test(cx);
7881
7882 let committed_contents = r#"
7883 fn main() {
7884 println!("hello from HEAD");
7885 }
7886 "#
7887 .unindent();
7888 let file_contents = r#"
7889 fn main() {
7890 println!("hello from the working copy");
7891 }
7892 "#
7893 .unindent();
7894
7895 let fs = FakeFs::new(cx.background_executor.clone());
7896 fs.insert_tree(
7897 "/dir",
7898 json!({
7899 ".git": {},
7900 "src": {
7901 "main.rs": file_contents,
7902 }
7903 }),
7904 )
7905 .await;
7906
7907 fs.set_head_for_repo(
7908 Path::new("/dir/.git"),
7909 &[("src/main.rs", committed_contents.clone())],
7910 "deadbeef",
7911 );
7912 fs.set_index_for_repo(
7913 Path::new("/dir/.git"),
7914 &[("src/main.rs", committed_contents.clone())],
7915 );
7916
7917 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7918
7919 let buffer = project
7920 .update(cx, |project, cx| {
7921 project.open_local_buffer("/dir/src/main.rs", cx)
7922 })
7923 .await
7924 .unwrap();
7925 let uncommitted_diff = project
7926 .update(cx, |project, cx| {
7927 project.open_uncommitted_diff(buffer.clone(), cx)
7928 })
7929 .await
7930 .unwrap();
7931
7932 cx.run_until_parked();
7933 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7934 let snapshot = buffer.read(cx).snapshot();
7935 assert_hunks(
7936 uncommitted_diff.hunks(&snapshot, cx),
7937 &snapshot,
7938 &uncommitted_diff.base_text_string().unwrap(),
7939 &[(
7940 1..2,
7941 " println!(\"hello from HEAD\");\n",
7942 " println!(\"hello from the working copy\");\n",
7943 DiffHunkStatus {
7944 kind: DiffHunkStatusKind::Modified,
7945 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7946 },
7947 )],
7948 );
7949 });
7950}
7951
7952#[gpui::test]
7953async fn test_repository_and_path_for_project_path(
7954 background_executor: BackgroundExecutor,
7955 cx: &mut gpui::TestAppContext,
7956) {
7957 init_test(cx);
7958 let fs = FakeFs::new(background_executor);
7959 fs.insert_tree(
7960 path!("/root"),
7961 json!({
7962 "c.txt": "",
7963 "dir1": {
7964 ".git": {},
7965 "deps": {
7966 "dep1": {
7967 ".git": {},
7968 "src": {
7969 "a.txt": ""
7970 }
7971 }
7972 },
7973 "src": {
7974 "b.txt": ""
7975 }
7976 },
7977 }),
7978 )
7979 .await;
7980
7981 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7982 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7983 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7984 project
7985 .update(cx, |project, cx| project.git_scans_complete(cx))
7986 .await;
7987 cx.run_until_parked();
7988
7989 project.read_with(cx, |project, cx| {
7990 let git_store = project.git_store().read(cx);
7991 let pairs = [
7992 ("c.txt", None),
7993 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7994 (
7995 "dir1/deps/dep1/src/a.txt",
7996 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7997 ),
7998 ];
7999 let expected = pairs
8000 .iter()
8001 .map(|(path, result)| {
8002 (
8003 path,
8004 result.map(|(repo, repo_path)| {
8005 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8006 }),
8007 )
8008 })
8009 .collect::<Vec<_>>();
8010 let actual = pairs
8011 .iter()
8012 .map(|(path, _)| {
8013 let project_path = (tree_id, rel_path(path)).into();
8014 let result = maybe!({
8015 let (repo, repo_path) =
8016 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8017 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8018 });
8019 (path, result)
8020 })
8021 .collect::<Vec<_>>();
8022 pretty_assertions::assert_eq!(expected, actual);
8023 });
8024
8025 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8026 .await
8027 .unwrap();
8028 cx.run_until_parked();
8029
8030 project.read_with(cx, |project, cx| {
8031 let git_store = project.git_store().read(cx);
8032 assert_eq!(
8033 git_store.repository_and_path_for_project_path(
8034 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8035 cx
8036 ),
8037 None
8038 );
8039 });
8040}
8041
8042#[gpui::test]
8043async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8044 init_test(cx);
8045 let fs = FakeFs::new(cx.background_executor.clone());
8046 let home = paths::home_dir();
8047 fs.insert_tree(
8048 home,
8049 json!({
8050 ".git": {},
8051 "project": {
8052 "a.txt": "A"
8053 },
8054 }),
8055 )
8056 .await;
8057
8058 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8059 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8060 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8061
8062 project
8063 .update(cx, |project, cx| project.git_scans_complete(cx))
8064 .await;
8065 tree.flush_fs_events(cx).await;
8066
8067 project.read_with(cx, |project, cx| {
8068 let containing = project
8069 .git_store()
8070 .read(cx)
8071 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8072 assert!(containing.is_none());
8073 });
8074
8075 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8076 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8077 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8078 project
8079 .update(cx, |project, cx| project.git_scans_complete(cx))
8080 .await;
8081 tree.flush_fs_events(cx).await;
8082
8083 project.read_with(cx, |project, cx| {
8084 let containing = project
8085 .git_store()
8086 .read(cx)
8087 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8088 assert_eq!(
8089 containing
8090 .unwrap()
8091 .0
8092 .read(cx)
8093 .work_directory_abs_path
8094 .as_ref(),
8095 home,
8096 );
8097 });
8098}
8099
8100#[gpui::test]
8101async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8102 init_test(cx);
8103 cx.executor().allow_parking();
8104
8105 let root = TempTree::new(json!({
8106 "project": {
8107 "a.txt": "a", // Modified
8108 "b.txt": "bb", // Added
8109 "c.txt": "ccc", // Unchanged
8110 "d.txt": "dddd", // Deleted
8111 },
8112 }));
8113
8114 // Set up git repository before creating the project.
8115 let work_dir = root.path().join("project");
8116 let repo = git_init(work_dir.as_path());
8117 git_add("a.txt", &repo);
8118 git_add("c.txt", &repo);
8119 git_add("d.txt", &repo);
8120 git_commit("Initial commit", &repo);
8121 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8122 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8123
8124 let project = Project::test(
8125 Arc::new(RealFs::new(None, cx.executor())),
8126 [root.path()],
8127 cx,
8128 )
8129 .await;
8130
8131 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8132 tree.flush_fs_events(cx).await;
8133 project
8134 .update(cx, |project, cx| project.git_scans_complete(cx))
8135 .await;
8136 cx.executor().run_until_parked();
8137
8138 let repository = project.read_with(cx, |project, cx| {
8139 project.repositories(cx).values().next().unwrap().clone()
8140 });
8141
8142 // Check that the right git state is observed on startup
8143 repository.read_with(cx, |repository, _| {
8144 let entries = repository.cached_status().collect::<Vec<_>>();
8145 assert_eq!(
8146 entries,
8147 [
8148 StatusEntry {
8149 repo_path: repo_path("a.txt"),
8150 status: StatusCode::Modified.worktree(),
8151 },
8152 StatusEntry {
8153 repo_path: repo_path("b.txt"),
8154 status: FileStatus::Untracked,
8155 },
8156 StatusEntry {
8157 repo_path: repo_path("d.txt"),
8158 status: StatusCode::Deleted.worktree(),
8159 },
8160 ]
8161 );
8162 });
8163
8164 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8165
8166 tree.flush_fs_events(cx).await;
8167 project
8168 .update(cx, |project, cx| project.git_scans_complete(cx))
8169 .await;
8170 cx.executor().run_until_parked();
8171
8172 repository.read_with(cx, |repository, _| {
8173 let entries = repository.cached_status().collect::<Vec<_>>();
8174 assert_eq!(
8175 entries,
8176 [
8177 StatusEntry {
8178 repo_path: repo_path("a.txt"),
8179 status: StatusCode::Modified.worktree(),
8180 },
8181 StatusEntry {
8182 repo_path: repo_path("b.txt"),
8183 status: FileStatus::Untracked,
8184 },
8185 StatusEntry {
8186 repo_path: repo_path("c.txt"),
8187 status: StatusCode::Modified.worktree(),
8188 },
8189 StatusEntry {
8190 repo_path: repo_path("d.txt"),
8191 status: StatusCode::Deleted.worktree(),
8192 },
8193 ]
8194 );
8195 });
8196
8197 git_add("a.txt", &repo);
8198 git_add("c.txt", &repo);
8199 git_remove_index(Path::new("d.txt"), &repo);
8200 git_commit("Another commit", &repo);
8201 tree.flush_fs_events(cx).await;
8202 project
8203 .update(cx, |project, cx| project.git_scans_complete(cx))
8204 .await;
8205 cx.executor().run_until_parked();
8206
8207 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8208 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8209 tree.flush_fs_events(cx).await;
8210 project
8211 .update(cx, |project, cx| project.git_scans_complete(cx))
8212 .await;
8213 cx.executor().run_until_parked();
8214
8215 repository.read_with(cx, |repository, _cx| {
8216 let entries = repository.cached_status().collect::<Vec<_>>();
8217
8218 // Deleting an untracked entry, b.txt, should leave no status
8219 // a.txt was tracked, and so should have a status
8220 assert_eq!(
8221 entries,
8222 [StatusEntry {
8223 repo_path: repo_path("a.txt"),
8224 status: StatusCode::Deleted.worktree(),
8225 }]
8226 );
8227 });
8228}
8229
8230#[gpui::test]
8231async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8232 init_test(cx);
8233 cx.executor().allow_parking();
8234
8235 let root = TempTree::new(json!({
8236 "project": {
8237 "sub": {},
8238 "a.txt": "",
8239 },
8240 }));
8241
8242 let work_dir = root.path().join("project");
8243 let repo = git_init(work_dir.as_path());
8244 // a.txt exists in HEAD and the working copy but is deleted in the index.
8245 git_add("a.txt", &repo);
8246 git_commit("Initial commit", &repo);
8247 git_remove_index("a.txt".as_ref(), &repo);
8248 // `sub` is a nested git repository.
8249 let _sub = git_init(&work_dir.join("sub"));
8250
8251 let project = Project::test(
8252 Arc::new(RealFs::new(None, cx.executor())),
8253 [root.path()],
8254 cx,
8255 )
8256 .await;
8257
8258 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8259 tree.flush_fs_events(cx).await;
8260 project
8261 .update(cx, |project, cx| project.git_scans_complete(cx))
8262 .await;
8263 cx.executor().run_until_parked();
8264
8265 let repository = project.read_with(cx, |project, cx| {
8266 project
8267 .repositories(cx)
8268 .values()
8269 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8270 .unwrap()
8271 .clone()
8272 });
8273
8274 repository.read_with(cx, |repository, _cx| {
8275 let entries = repository.cached_status().collect::<Vec<_>>();
8276
8277 // `sub` doesn't appear in our computed statuses.
8278 // a.txt appears with a combined `DA` status.
8279 assert_eq!(
8280 entries,
8281 [StatusEntry {
8282 repo_path: repo_path("a.txt"),
8283 status: TrackedStatus {
8284 index_status: StatusCode::Deleted,
8285 worktree_status: StatusCode::Added
8286 }
8287 .into(),
8288 }]
8289 )
8290 });
8291}
8292
8293#[gpui::test]
8294async fn test_repository_subfolder_git_status(
8295 executor: gpui::BackgroundExecutor,
8296 cx: &mut gpui::TestAppContext,
8297) {
8298 init_test(cx);
8299
8300 let fs = FakeFs::new(executor);
8301 fs.insert_tree(
8302 path!("/root"),
8303 json!({
8304 "my-repo": {
8305 ".git": {},
8306 "a.txt": "a",
8307 "sub-folder-1": {
8308 "sub-folder-2": {
8309 "c.txt": "cc",
8310 "d": {
8311 "e.txt": "eee"
8312 }
8313 },
8314 }
8315 },
8316 }),
8317 )
8318 .await;
8319
8320 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8321 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8322
8323 fs.set_status_for_repo(
8324 path!("/root/my-repo/.git").as_ref(),
8325 &[(E_TXT, FileStatus::Untracked)],
8326 );
8327
8328 let project = Project::test(
8329 fs.clone(),
8330 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8331 cx,
8332 )
8333 .await;
8334
8335 project
8336 .update(cx, |project, cx| project.git_scans_complete(cx))
8337 .await;
8338 cx.run_until_parked();
8339
8340 let repository = project.read_with(cx, |project, cx| {
8341 project.repositories(cx).values().next().unwrap().clone()
8342 });
8343
8344 // Ensure that the git status is loaded correctly
8345 repository.read_with(cx, |repository, _cx| {
8346 assert_eq!(
8347 repository.work_directory_abs_path,
8348 Path::new(path!("/root/my-repo")).into()
8349 );
8350
8351 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8352 assert_eq!(
8353 repository
8354 .status_for_path(&repo_path(E_TXT))
8355 .unwrap()
8356 .status,
8357 FileStatus::Untracked
8358 );
8359 });
8360
8361 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8362 project
8363 .update(cx, |project, cx| project.git_scans_complete(cx))
8364 .await;
8365 cx.run_until_parked();
8366
8367 repository.read_with(cx, |repository, _cx| {
8368 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8369 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8370 });
8371}
8372
8373// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8374#[cfg(any())]
8375#[gpui::test]
8376async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8377 init_test(cx);
8378 cx.executor().allow_parking();
8379
8380 let root = TempTree::new(json!({
8381 "project": {
8382 "a.txt": "a",
8383 },
8384 }));
8385 let root_path = root.path();
8386
8387 let repo = git_init(&root_path.join("project"));
8388 git_add("a.txt", &repo);
8389 git_commit("init", &repo);
8390
8391 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8392
8393 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8394 tree.flush_fs_events(cx).await;
8395 project
8396 .update(cx, |project, cx| project.git_scans_complete(cx))
8397 .await;
8398 cx.executor().run_until_parked();
8399
8400 let repository = project.read_with(cx, |project, cx| {
8401 project.repositories(cx).values().next().unwrap().clone()
8402 });
8403
8404 git_branch("other-branch", &repo);
8405 git_checkout("refs/heads/other-branch", &repo);
8406 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8407 git_add("a.txt", &repo);
8408 git_commit("capitalize", &repo);
8409 let commit = repo
8410 .head()
8411 .expect("Failed to get HEAD")
8412 .peel_to_commit()
8413 .expect("HEAD is not a commit");
8414 git_checkout("refs/heads/main", &repo);
8415 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8416 git_add("a.txt", &repo);
8417 git_commit("improve letter", &repo);
8418 git_cherry_pick(&commit, &repo);
8419 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8420 .expect("No CHERRY_PICK_HEAD");
8421 pretty_assertions::assert_eq!(
8422 git_status(&repo),
8423 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8424 );
8425 tree.flush_fs_events(cx).await;
8426 project
8427 .update(cx, |project, cx| project.git_scans_complete(cx))
8428 .await;
8429 cx.executor().run_until_parked();
8430 let conflicts = repository.update(cx, |repository, _| {
8431 repository
8432 .merge_conflicts
8433 .iter()
8434 .cloned()
8435 .collect::<Vec<_>>()
8436 });
8437 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8438
8439 git_add("a.txt", &repo);
8440 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8441 git_commit("whatevs", &repo);
8442 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8443 .expect("Failed to remove CHERRY_PICK_HEAD");
8444 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8445 tree.flush_fs_events(cx).await;
8446 let conflicts = repository.update(cx, |repository, _| {
8447 repository
8448 .merge_conflicts
8449 .iter()
8450 .cloned()
8451 .collect::<Vec<_>>()
8452 });
8453 pretty_assertions::assert_eq!(conflicts, []);
8454}
8455
8456#[gpui::test]
8457async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8458 init_test(cx);
8459 let fs = FakeFs::new(cx.background_executor.clone());
8460 fs.insert_tree(
8461 path!("/root"),
8462 json!({
8463 ".git": {},
8464 ".gitignore": "*.txt\n",
8465 "a.xml": "<a></a>",
8466 "b.txt": "Some text"
8467 }),
8468 )
8469 .await;
8470
8471 fs.set_head_and_index_for_repo(
8472 path!("/root/.git").as_ref(),
8473 &[
8474 (".gitignore", "*.txt\n".into()),
8475 ("a.xml", "<a></a>".into()),
8476 ],
8477 );
8478
8479 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8480
8481 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8482 tree.flush_fs_events(cx).await;
8483 project
8484 .update(cx, |project, cx| project.git_scans_complete(cx))
8485 .await;
8486 cx.executor().run_until_parked();
8487
8488 let repository = project.read_with(cx, |project, cx| {
8489 project.repositories(cx).values().next().unwrap().clone()
8490 });
8491
8492 // One file is unmodified, the other is ignored.
8493 cx.read(|cx| {
8494 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8495 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8496 });
8497
8498 // Change the gitignore, and stage the newly non-ignored file.
8499 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8500 .await
8501 .unwrap();
8502 fs.set_index_for_repo(
8503 Path::new(path!("/root/.git")),
8504 &[
8505 (".gitignore", "*.txt\n".into()),
8506 ("a.xml", "<a></a>".into()),
8507 ("b.txt", "Some text".into()),
8508 ],
8509 );
8510
8511 cx.executor().run_until_parked();
8512 cx.read(|cx| {
8513 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8514 assert_entry_git_state(
8515 tree.read(cx),
8516 repository.read(cx),
8517 "b.txt",
8518 Some(StatusCode::Added),
8519 false,
8520 );
8521 });
8522}
8523
8524// NOTE:
8525// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8526// a directory which some program has already open.
8527// This is a limitation of the Windows.
8528// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8529#[gpui::test]
8530#[cfg_attr(target_os = "windows", ignore)]
8531async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8532 init_test(cx);
8533 cx.executor().allow_parking();
8534 let root = TempTree::new(json!({
8535 "projects": {
8536 "project1": {
8537 "a": "",
8538 "b": "",
8539 }
8540 },
8541
8542 }));
8543 let root_path = root.path();
8544
8545 let repo = git_init(&root_path.join("projects/project1"));
8546 git_add("a", &repo);
8547 git_commit("init", &repo);
8548 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8549
8550 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8551
8552 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8553 tree.flush_fs_events(cx).await;
8554 project
8555 .update(cx, |project, cx| project.git_scans_complete(cx))
8556 .await;
8557 cx.executor().run_until_parked();
8558
8559 let repository = project.read_with(cx, |project, cx| {
8560 project.repositories(cx).values().next().unwrap().clone()
8561 });
8562
8563 repository.read_with(cx, |repository, _| {
8564 assert_eq!(
8565 repository.work_directory_abs_path.as_ref(),
8566 root_path.join("projects/project1").as_path()
8567 );
8568 assert_eq!(
8569 repository
8570 .status_for_path(&repo_path("a"))
8571 .map(|entry| entry.status),
8572 Some(StatusCode::Modified.worktree()),
8573 );
8574 assert_eq!(
8575 repository
8576 .status_for_path(&repo_path("b"))
8577 .map(|entry| entry.status),
8578 Some(FileStatus::Untracked),
8579 );
8580 });
8581
8582 std::fs::rename(
8583 root_path.join("projects/project1"),
8584 root_path.join("projects/project2"),
8585 )
8586 .unwrap();
8587 tree.flush_fs_events(cx).await;
8588
8589 repository.read_with(cx, |repository, _| {
8590 assert_eq!(
8591 repository.work_directory_abs_path.as_ref(),
8592 root_path.join("projects/project2").as_path()
8593 );
8594 assert_eq!(
8595 repository.status_for_path(&repo_path("a")).unwrap().status,
8596 StatusCode::Modified.worktree(),
8597 );
8598 assert_eq!(
8599 repository.status_for_path(&repo_path("b")).unwrap().status,
8600 FileStatus::Untracked,
8601 );
8602 });
8603}
8604
8605// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8606// you can't rename a directory which some program has already open. This is a
8607// limitation of the Windows. See:
8608// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8609#[gpui::test]
8610#[cfg_attr(target_os = "windows", ignore)]
8611async fn test_file_status(cx: &mut gpui::TestAppContext) {
8612 init_test(cx);
8613 cx.executor().allow_parking();
8614 const IGNORE_RULE: &str = "**/target";
8615
8616 let root = TempTree::new(json!({
8617 "project": {
8618 "a.txt": "a",
8619 "b.txt": "bb",
8620 "c": {
8621 "d": {
8622 "e.txt": "eee"
8623 }
8624 },
8625 "f.txt": "ffff",
8626 "target": {
8627 "build_file": "???"
8628 },
8629 ".gitignore": IGNORE_RULE
8630 },
8631
8632 }));
8633 let root_path = root.path();
8634
8635 const A_TXT: &str = "a.txt";
8636 const B_TXT: &str = "b.txt";
8637 const E_TXT: &str = "c/d/e.txt";
8638 const F_TXT: &str = "f.txt";
8639 const DOTGITIGNORE: &str = ".gitignore";
8640 const BUILD_FILE: &str = "target/build_file";
8641
8642 // Set up git repository before creating the worktree.
8643 let work_dir = root.path().join("project");
8644 let mut repo = git_init(work_dir.as_path());
8645 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8646 git_add(A_TXT, &repo);
8647 git_add(E_TXT, &repo);
8648 git_add(DOTGITIGNORE, &repo);
8649 git_commit("Initial commit", &repo);
8650
8651 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8652
8653 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8654 tree.flush_fs_events(cx).await;
8655 project
8656 .update(cx, |project, cx| project.git_scans_complete(cx))
8657 .await;
8658 cx.executor().run_until_parked();
8659
8660 let repository = project.read_with(cx, |project, cx| {
8661 project.repositories(cx).values().next().unwrap().clone()
8662 });
8663
8664 // Check that the right git state is observed on startup
8665 repository.read_with(cx, |repository, _cx| {
8666 assert_eq!(
8667 repository.work_directory_abs_path.as_ref(),
8668 root_path.join("project").as_path()
8669 );
8670
8671 assert_eq!(
8672 repository
8673 .status_for_path(&repo_path(B_TXT))
8674 .unwrap()
8675 .status,
8676 FileStatus::Untracked,
8677 );
8678 assert_eq!(
8679 repository
8680 .status_for_path(&repo_path(F_TXT))
8681 .unwrap()
8682 .status,
8683 FileStatus::Untracked,
8684 );
8685 });
8686
8687 // Modify a file in the working copy.
8688 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8689 tree.flush_fs_events(cx).await;
8690 project
8691 .update(cx, |project, cx| project.git_scans_complete(cx))
8692 .await;
8693 cx.executor().run_until_parked();
8694
8695 // The worktree detects that the file's git status has changed.
8696 repository.read_with(cx, |repository, _| {
8697 assert_eq!(
8698 repository
8699 .status_for_path(&repo_path(A_TXT))
8700 .unwrap()
8701 .status,
8702 StatusCode::Modified.worktree(),
8703 );
8704 });
8705
8706 // Create a commit in the git repository.
8707 git_add(A_TXT, &repo);
8708 git_add(B_TXT, &repo);
8709 git_commit("Committing modified and added", &repo);
8710 tree.flush_fs_events(cx).await;
8711 project
8712 .update(cx, |project, cx| project.git_scans_complete(cx))
8713 .await;
8714 cx.executor().run_until_parked();
8715
8716 // The worktree detects that the files' git status have changed.
8717 repository.read_with(cx, |repository, _cx| {
8718 assert_eq!(
8719 repository
8720 .status_for_path(&repo_path(F_TXT))
8721 .unwrap()
8722 .status,
8723 FileStatus::Untracked,
8724 );
8725 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8726 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8727 });
8728
8729 // Modify files in the working copy and perform git operations on other files.
8730 git_reset(0, &repo);
8731 git_remove_index(Path::new(B_TXT), &repo);
8732 git_stash(&mut repo);
8733 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8734 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8735 tree.flush_fs_events(cx).await;
8736 project
8737 .update(cx, |project, cx| project.git_scans_complete(cx))
8738 .await;
8739 cx.executor().run_until_parked();
8740
8741 // Check that more complex repo changes are tracked
8742 repository.read_with(cx, |repository, _cx| {
8743 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8744 assert_eq!(
8745 repository
8746 .status_for_path(&repo_path(B_TXT))
8747 .unwrap()
8748 .status,
8749 FileStatus::Untracked,
8750 );
8751 assert_eq!(
8752 repository
8753 .status_for_path(&repo_path(E_TXT))
8754 .unwrap()
8755 .status,
8756 StatusCode::Modified.worktree(),
8757 );
8758 });
8759
8760 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8761 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8762 std::fs::write(
8763 work_dir.join(DOTGITIGNORE),
8764 [IGNORE_RULE, "f.txt"].join("\n"),
8765 )
8766 .unwrap();
8767
8768 git_add(Path::new(DOTGITIGNORE), &repo);
8769 git_commit("Committing modified git ignore", &repo);
8770
8771 tree.flush_fs_events(cx).await;
8772 cx.executor().run_until_parked();
8773
8774 let mut renamed_dir_name = "first_directory/second_directory";
8775 const RENAMED_FILE: &str = "rf.txt";
8776
8777 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8778 std::fs::write(
8779 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8780 "new-contents",
8781 )
8782 .unwrap();
8783
8784 tree.flush_fs_events(cx).await;
8785 project
8786 .update(cx, |project, cx| project.git_scans_complete(cx))
8787 .await;
8788 cx.executor().run_until_parked();
8789
8790 repository.read_with(cx, |repository, _cx| {
8791 assert_eq!(
8792 repository
8793 .status_for_path(
8794 &rel_path(renamed_dir_name)
8795 .join(rel_path(RENAMED_FILE))
8796 .into()
8797 )
8798 .unwrap()
8799 .status,
8800 FileStatus::Untracked,
8801 );
8802 });
8803
8804 renamed_dir_name = "new_first_directory/second_directory";
8805
8806 std::fs::rename(
8807 work_dir.join("first_directory"),
8808 work_dir.join("new_first_directory"),
8809 )
8810 .unwrap();
8811
8812 tree.flush_fs_events(cx).await;
8813 project
8814 .update(cx, |project, cx| project.git_scans_complete(cx))
8815 .await;
8816 cx.executor().run_until_parked();
8817
8818 repository.read_with(cx, |repository, _cx| {
8819 assert_eq!(
8820 repository
8821 .status_for_path(
8822 &rel_path(renamed_dir_name)
8823 .join(rel_path(RENAMED_FILE))
8824 .into()
8825 )
8826 .unwrap()
8827 .status,
8828 FileStatus::Untracked,
8829 );
8830 });
8831}
8832
8833#[gpui::test]
8834async fn test_repos_in_invisible_worktrees(
8835 executor: BackgroundExecutor,
8836 cx: &mut gpui::TestAppContext,
8837) {
8838 init_test(cx);
8839 let fs = FakeFs::new(executor);
8840 fs.insert_tree(
8841 path!("/root"),
8842 json!({
8843 "dir1": {
8844 ".git": {},
8845 "dep1": {
8846 ".git": {},
8847 "src": {
8848 "a.txt": "",
8849 },
8850 },
8851 "b.txt": "",
8852 },
8853 }),
8854 )
8855 .await;
8856
8857 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8858 let _visible_worktree =
8859 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8860 project
8861 .update(cx, |project, cx| project.git_scans_complete(cx))
8862 .await;
8863
8864 let repos = project.read_with(cx, |project, cx| {
8865 project
8866 .repositories(cx)
8867 .values()
8868 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8869 .collect::<Vec<_>>()
8870 });
8871 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8872
8873 let (_invisible_worktree, _) = project
8874 .update(cx, |project, cx| {
8875 project.worktree_store.update(cx, |worktree_store, cx| {
8876 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8877 })
8878 })
8879 .await
8880 .expect("failed to create worktree");
8881 project
8882 .update(cx, |project, cx| project.git_scans_complete(cx))
8883 .await;
8884
8885 let repos = project.read_with(cx, |project, cx| {
8886 project
8887 .repositories(cx)
8888 .values()
8889 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8890 .collect::<Vec<_>>()
8891 });
8892 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8893}
8894
8895#[gpui::test(iterations = 10)]
8896async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8897 init_test(cx);
8898 cx.update(|cx| {
8899 cx.update_global::<SettingsStore, _>(|store, cx| {
8900 store.update_user_settings(cx, |settings| {
8901 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
8902 });
8903 });
8904 });
8905 let fs = FakeFs::new(cx.background_executor.clone());
8906 fs.insert_tree(
8907 path!("/root"),
8908 json!({
8909 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8910 "tree": {
8911 ".git": {},
8912 ".gitignore": "ignored-dir\n",
8913 "tracked-dir": {
8914 "tracked-file1": "",
8915 "ancestor-ignored-file1": "",
8916 },
8917 "ignored-dir": {
8918 "ignored-file1": ""
8919 }
8920 }
8921 }),
8922 )
8923 .await;
8924 fs.set_head_and_index_for_repo(
8925 path!("/root/tree/.git").as_ref(),
8926 &[
8927 (".gitignore", "ignored-dir\n".into()),
8928 ("tracked-dir/tracked-file1", "".into()),
8929 ],
8930 );
8931
8932 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8933
8934 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8935 tree.flush_fs_events(cx).await;
8936 project
8937 .update(cx, |project, cx| project.git_scans_complete(cx))
8938 .await;
8939 cx.executor().run_until_parked();
8940
8941 let repository = project.read_with(cx, |project, cx| {
8942 project.repositories(cx).values().next().unwrap().clone()
8943 });
8944
8945 tree.read_with(cx, |tree, _| {
8946 tree.as_local()
8947 .unwrap()
8948 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
8949 })
8950 .recv()
8951 .await;
8952
8953 cx.read(|cx| {
8954 assert_entry_git_state(
8955 tree.read(cx),
8956 repository.read(cx),
8957 "tracked-dir/tracked-file1",
8958 None,
8959 false,
8960 );
8961 assert_entry_git_state(
8962 tree.read(cx),
8963 repository.read(cx),
8964 "tracked-dir/ancestor-ignored-file1",
8965 None,
8966 false,
8967 );
8968 assert_entry_git_state(
8969 tree.read(cx),
8970 repository.read(cx),
8971 "ignored-dir/ignored-file1",
8972 None,
8973 true,
8974 );
8975 });
8976
8977 fs.create_file(
8978 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8979 Default::default(),
8980 )
8981 .await
8982 .unwrap();
8983 fs.set_index_for_repo(
8984 path!("/root/tree/.git").as_ref(),
8985 &[
8986 (".gitignore", "ignored-dir\n".into()),
8987 ("tracked-dir/tracked-file1", "".into()),
8988 ("tracked-dir/tracked-file2", "".into()),
8989 ],
8990 );
8991 fs.create_file(
8992 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8993 Default::default(),
8994 )
8995 .await
8996 .unwrap();
8997 fs.create_file(
8998 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8999 Default::default(),
9000 )
9001 .await
9002 .unwrap();
9003
9004 cx.executor().run_until_parked();
9005 cx.read(|cx| {
9006 assert_entry_git_state(
9007 tree.read(cx),
9008 repository.read(cx),
9009 "tracked-dir/tracked-file2",
9010 Some(StatusCode::Added),
9011 false,
9012 );
9013 assert_entry_git_state(
9014 tree.read(cx),
9015 repository.read(cx),
9016 "tracked-dir/ancestor-ignored-file2",
9017 None,
9018 false,
9019 );
9020 assert_entry_git_state(
9021 tree.read(cx),
9022 repository.read(cx),
9023 "ignored-dir/ignored-file2",
9024 None,
9025 true,
9026 );
9027 assert!(
9028 tree.read(cx)
9029 .entry_for_path(&rel_path(".git"))
9030 .unwrap()
9031 .is_ignored
9032 );
9033 });
9034}
9035
9036#[gpui::test]
9037async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9038 init_test(cx);
9039
9040 let fs = FakeFs::new(cx.executor());
9041 fs.insert_tree(
9042 path!("/project"),
9043 json!({
9044 ".git": {
9045 "worktrees": {
9046 "some-worktree": {
9047 "commondir": "../..\n",
9048 // For is_git_dir
9049 "HEAD": "",
9050 "config": ""
9051 }
9052 },
9053 "modules": {
9054 "subdir": {
9055 "some-submodule": {
9056 // For is_git_dir
9057 "HEAD": "",
9058 "config": "",
9059 }
9060 }
9061 }
9062 },
9063 "src": {
9064 "a.txt": "A",
9065 },
9066 "some-worktree": {
9067 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9068 "src": {
9069 "b.txt": "B",
9070 }
9071 },
9072 "subdir": {
9073 "some-submodule": {
9074 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9075 "c.txt": "C",
9076 }
9077 }
9078 }),
9079 )
9080 .await;
9081
9082 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9083 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9084 scan_complete.await;
9085
9086 let mut repositories = project.update(cx, |project, cx| {
9087 project
9088 .repositories(cx)
9089 .values()
9090 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9091 .collect::<Vec<_>>()
9092 });
9093 repositories.sort();
9094 pretty_assertions::assert_eq!(
9095 repositories,
9096 [
9097 Path::new(path!("/project")).into(),
9098 Path::new(path!("/project/some-worktree")).into(),
9099 Path::new(path!("/project/subdir/some-submodule")).into(),
9100 ]
9101 );
9102
9103 // Generate a git-related event for the worktree and check that it's refreshed.
9104 fs.with_git_state(
9105 path!("/project/some-worktree/.git").as_ref(),
9106 true,
9107 |state| {
9108 state
9109 .head_contents
9110 .insert(repo_path("src/b.txt"), "b".to_owned());
9111 state
9112 .index_contents
9113 .insert(repo_path("src/b.txt"), "b".to_owned());
9114 },
9115 )
9116 .unwrap();
9117 cx.run_until_parked();
9118
9119 let buffer = project
9120 .update(cx, |project, cx| {
9121 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9122 })
9123 .await
9124 .unwrap();
9125 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9126 let (repo, _) = project
9127 .git_store()
9128 .read(cx)
9129 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9130 .unwrap();
9131 pretty_assertions::assert_eq!(
9132 repo.read(cx).work_directory_abs_path,
9133 Path::new(path!("/project/some-worktree")).into(),
9134 );
9135 let barrier = repo.update(cx, |repo, _| repo.barrier());
9136 (repo.clone(), barrier)
9137 });
9138 barrier.await.unwrap();
9139 worktree_repo.update(cx, |repo, _| {
9140 pretty_assertions::assert_eq!(
9141 repo.status_for_path(&repo_path("src/b.txt"))
9142 .unwrap()
9143 .status,
9144 StatusCode::Modified.worktree(),
9145 );
9146 });
9147
9148 // The same for the submodule.
9149 fs.with_git_state(
9150 path!("/project/subdir/some-submodule/.git").as_ref(),
9151 true,
9152 |state| {
9153 state
9154 .head_contents
9155 .insert(repo_path("c.txt"), "c".to_owned());
9156 state
9157 .index_contents
9158 .insert(repo_path("c.txt"), "c".to_owned());
9159 },
9160 )
9161 .unwrap();
9162 cx.run_until_parked();
9163
9164 let buffer = project
9165 .update(cx, |project, cx| {
9166 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9167 })
9168 .await
9169 .unwrap();
9170 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9171 let (repo, _) = project
9172 .git_store()
9173 .read(cx)
9174 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9175 .unwrap();
9176 pretty_assertions::assert_eq!(
9177 repo.read(cx).work_directory_abs_path,
9178 Path::new(path!("/project/subdir/some-submodule")).into(),
9179 );
9180 let barrier = repo.update(cx, |repo, _| repo.barrier());
9181 (repo.clone(), barrier)
9182 });
9183 barrier.await.unwrap();
9184 submodule_repo.update(cx, |repo, _| {
9185 pretty_assertions::assert_eq!(
9186 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9187 StatusCode::Modified.worktree(),
9188 );
9189 });
9190}
9191
9192#[gpui::test]
9193async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9194 init_test(cx);
9195 let fs = FakeFs::new(cx.background_executor.clone());
9196 fs.insert_tree(
9197 path!("/root"),
9198 json!({
9199 "project": {
9200 ".git": {},
9201 "child1": {
9202 "a.txt": "A",
9203 },
9204 "child2": {
9205 "b.txt": "B",
9206 }
9207 }
9208 }),
9209 )
9210 .await;
9211
9212 let project = Project::test(
9213 fs.clone(),
9214 [
9215 path!("/root/project/child1").as_ref(),
9216 path!("/root/project/child2").as_ref(),
9217 ],
9218 cx,
9219 )
9220 .await;
9221
9222 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9223 tree.flush_fs_events(cx).await;
9224 project
9225 .update(cx, |project, cx| project.git_scans_complete(cx))
9226 .await;
9227 cx.executor().run_until_parked();
9228
9229 let repos = project.read_with(cx, |project, cx| {
9230 project
9231 .repositories(cx)
9232 .values()
9233 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9234 .collect::<Vec<_>>()
9235 });
9236 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9237}
9238
9239async fn search(
9240 project: &Entity<Project>,
9241 query: SearchQuery,
9242 cx: &mut gpui::TestAppContext,
9243) -> Result<HashMap<String, Vec<Range<usize>>>> {
9244 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9245 let mut results = HashMap::default();
9246 while let Ok(search_result) = search_rx.recv().await {
9247 match search_result {
9248 SearchResult::Buffer { buffer, ranges } => {
9249 results.entry(buffer).or_insert(ranges);
9250 }
9251 SearchResult::LimitReached => {}
9252 }
9253 }
9254 Ok(results
9255 .into_iter()
9256 .map(|(buffer, ranges)| {
9257 buffer.update(cx, |buffer, cx| {
9258 let path = buffer
9259 .file()
9260 .unwrap()
9261 .full_path(cx)
9262 .to_string_lossy()
9263 .to_string();
9264 let ranges = ranges
9265 .into_iter()
9266 .map(|range| range.to_offset(buffer))
9267 .collect::<Vec<_>>();
9268 (path, ranges)
9269 })
9270 })
9271 .collect())
9272}
9273
9274pub fn init_test(cx: &mut gpui::TestAppContext) {
9275 zlog::init_test();
9276
9277 cx.update(|cx| {
9278 let settings_store = SettingsStore::test(cx);
9279 cx.set_global(settings_store);
9280 release_channel::init(SemanticVersion::default(), cx);
9281 language::init(cx);
9282 Project::init_settings(cx);
9283 });
9284}
9285
9286fn json_lang() -> Arc<Language> {
9287 Arc::new(Language::new(
9288 LanguageConfig {
9289 name: "JSON".into(),
9290 matcher: LanguageMatcher {
9291 path_suffixes: vec!["json".to_string()],
9292 ..Default::default()
9293 },
9294 ..Default::default()
9295 },
9296 None,
9297 ))
9298}
9299
9300fn js_lang() -> Arc<Language> {
9301 Arc::new(Language::new(
9302 LanguageConfig {
9303 name: "JavaScript".into(),
9304 matcher: LanguageMatcher {
9305 path_suffixes: vec!["js".to_string()],
9306 ..Default::default()
9307 },
9308 ..Default::default()
9309 },
9310 None,
9311 ))
9312}
9313
9314fn rust_lang() -> Arc<Language> {
9315 Arc::new(Language::new(
9316 LanguageConfig {
9317 name: "Rust".into(),
9318 matcher: LanguageMatcher {
9319 path_suffixes: vec!["rs".to_string()],
9320 ..Default::default()
9321 },
9322 ..Default::default()
9323 },
9324 Some(tree_sitter_rust::LANGUAGE.into()),
9325 ))
9326}
9327
9328fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9329 struct PythonMootToolchainLister(Arc<FakeFs>);
9330 #[async_trait]
9331 impl ToolchainLister for PythonMootToolchainLister {
9332 async fn list(
9333 &self,
9334 worktree_root: PathBuf,
9335 subroot_relative_path: Arc<RelPath>,
9336 _: Option<HashMap<String, String>>,
9337 ) -> ToolchainList {
9338 // This lister will always return a path .venv directories within ancestors
9339 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9340 let mut toolchains = vec![];
9341 for ancestor in ancestors {
9342 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9343 if self.0.is_dir(&venv_path).await {
9344 toolchains.push(Toolchain {
9345 name: SharedString::new("Python Venv"),
9346 path: venv_path.to_string_lossy().into_owned().into(),
9347 language_name: LanguageName(SharedString::new_static("Python")),
9348 as_json: serde_json::Value::Null,
9349 })
9350 }
9351 }
9352 ToolchainList {
9353 toolchains,
9354 ..Default::default()
9355 }
9356 }
9357 async fn resolve(
9358 &self,
9359 _: PathBuf,
9360 _: Option<HashMap<String, String>>,
9361 ) -> anyhow::Result<Toolchain> {
9362 Err(anyhow::anyhow!("Not implemented"))
9363 }
9364 fn meta(&self) -> ToolchainMetadata {
9365 ToolchainMetadata {
9366 term: SharedString::new_static("Virtual Environment"),
9367 new_toolchain_placeholder: SharedString::new_static(
9368 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9369 ),
9370 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9371 }
9372 }
9373 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9374 vec![]
9375 }
9376 }
9377 Arc::new(
9378 Language::new(
9379 LanguageConfig {
9380 name: "Python".into(),
9381 matcher: LanguageMatcher {
9382 path_suffixes: vec!["py".to_string()],
9383 ..Default::default()
9384 },
9385 ..Default::default()
9386 },
9387 None, // We're not testing Python parsing with this language.
9388 )
9389 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9390 "pyproject.toml",
9391 ))))
9392 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9393 )
9394}
9395
9396fn typescript_lang() -> Arc<Language> {
9397 Arc::new(Language::new(
9398 LanguageConfig {
9399 name: "TypeScript".into(),
9400 matcher: LanguageMatcher {
9401 path_suffixes: vec!["ts".to_string()],
9402 ..Default::default()
9403 },
9404 ..Default::default()
9405 },
9406 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9407 ))
9408}
9409
9410fn tsx_lang() -> Arc<Language> {
9411 Arc::new(Language::new(
9412 LanguageConfig {
9413 name: "tsx".into(),
9414 matcher: LanguageMatcher {
9415 path_suffixes: vec!["tsx".to_string()],
9416 ..Default::default()
9417 },
9418 ..Default::default()
9419 },
9420 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9421 ))
9422}
9423
9424fn get_all_tasks(
9425 project: &Entity<Project>,
9426 task_contexts: Arc<TaskContexts>,
9427 cx: &mut App,
9428) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9429 let new_tasks = project.update(cx, |project, cx| {
9430 project.task_store.update(cx, |task_store, cx| {
9431 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9432 this.used_and_current_resolved_tasks(task_contexts, cx)
9433 })
9434 })
9435 });
9436
9437 cx.background_spawn(async move {
9438 let (mut old, new) = new_tasks.await;
9439 old.extend(new);
9440 old
9441 })
9442}
9443
9444#[track_caller]
9445fn assert_entry_git_state(
9446 tree: &Worktree,
9447 repository: &Repository,
9448 path: &str,
9449 index_status: Option<StatusCode>,
9450 is_ignored: bool,
9451) {
9452 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9453 let entry = tree
9454 .entry_for_path(&rel_path(path))
9455 .unwrap_or_else(|| panic!("entry {path} not found"));
9456 let status = repository
9457 .status_for_path(&repo_path(path))
9458 .map(|entry| entry.status);
9459 let expected = index_status.map(|index_status| {
9460 TrackedStatus {
9461 index_status,
9462 worktree_status: StatusCode::Unmodified,
9463 }
9464 .into()
9465 });
9466 assert_eq!(
9467 status, expected,
9468 "expected {path} to have git status: {expected:?}"
9469 );
9470 assert_eq!(
9471 entry.is_ignored, is_ignored,
9472 "expected {path} to have is_ignored: {is_ignored}"
9473 );
9474}
9475
9476#[track_caller]
9477fn git_init(path: &Path) -> git2::Repository {
9478 let mut init_opts = RepositoryInitOptions::new();
9479 init_opts.initial_head("main");
9480 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9481}
9482
9483#[track_caller]
9484fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9485 let path = path.as_ref();
9486 let mut index = repo.index().expect("Failed to get index");
9487 index.add_path(path).expect("Failed to add file");
9488 index.write().expect("Failed to write index");
9489}
9490
9491#[track_caller]
9492fn git_remove_index(path: &Path, repo: &git2::Repository) {
9493 let mut index = repo.index().expect("Failed to get index");
9494 index.remove_path(path).expect("Failed to add file");
9495 index.write().expect("Failed to write index");
9496}
9497
9498#[track_caller]
9499fn git_commit(msg: &'static str, repo: &git2::Repository) {
9500 use git2::Signature;
9501
9502 let signature = Signature::now("test", "test@zed.dev").unwrap();
9503 let oid = repo.index().unwrap().write_tree().unwrap();
9504 let tree = repo.find_tree(oid).unwrap();
9505 if let Ok(head) = repo.head() {
9506 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9507
9508 let parent_commit = parent_obj.as_commit().unwrap();
9509
9510 repo.commit(
9511 Some("HEAD"),
9512 &signature,
9513 &signature,
9514 msg,
9515 &tree,
9516 &[parent_commit],
9517 )
9518 .expect("Failed to commit with parent");
9519 } else {
9520 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9521 .expect("Failed to commit");
9522 }
9523}
9524
9525#[cfg(any())]
9526#[track_caller]
9527fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9528 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9529}
9530
9531#[track_caller]
9532fn git_stash(repo: &mut git2::Repository) {
9533 use git2::Signature;
9534
9535 let signature = Signature::now("test", "test@zed.dev").unwrap();
9536 repo.stash_save(&signature, "N/A", None)
9537 .expect("Failed to stash");
9538}
9539
9540#[track_caller]
9541fn git_reset(offset: usize, repo: &git2::Repository) {
9542 let head = repo.head().expect("Couldn't get repo head");
9543 let object = head.peel(git2::ObjectType::Commit).unwrap();
9544 let commit = object.as_commit().unwrap();
9545 let new_head = commit
9546 .parents()
9547 .inspect(|parnet| {
9548 parnet.message();
9549 })
9550 .nth(offset)
9551 .expect("Not enough history");
9552 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9553 .expect("Could not reset");
9554}
9555
9556#[cfg(any())]
9557#[track_caller]
9558fn git_branch(name: &str, repo: &git2::Repository) {
9559 let head = repo
9560 .head()
9561 .expect("Couldn't get repo head")
9562 .peel_to_commit()
9563 .expect("HEAD is not a commit");
9564 repo.branch(name, &head, false).expect("Failed to commit");
9565}
9566
9567#[cfg(any())]
9568#[track_caller]
9569fn git_checkout(name: &str, repo: &git2::Repository) {
9570 repo.set_head(name).expect("Failed to set head");
9571 repo.checkout_head(None).expect("Failed to check out head");
9572}
9573
9574#[cfg(any())]
9575#[track_caller]
9576fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9577 repo.statuses(None)
9578 .unwrap()
9579 .iter()
9580 .map(|status| (status.path().unwrap().to_string(), status.status()))
9581 .collect()
9582}
9583
9584#[gpui::test]
9585async fn test_find_project_path_abs(
9586 background_executor: BackgroundExecutor,
9587 cx: &mut gpui::TestAppContext,
9588) {
9589 // find_project_path should work with absolute paths
9590 init_test(cx);
9591
9592 let fs = FakeFs::new(background_executor);
9593 fs.insert_tree(
9594 path!("/root"),
9595 json!({
9596 "project1": {
9597 "file1.txt": "content1",
9598 "subdir": {
9599 "file2.txt": "content2"
9600 }
9601 },
9602 "project2": {
9603 "file3.txt": "content3"
9604 }
9605 }),
9606 )
9607 .await;
9608
9609 let project = Project::test(
9610 fs.clone(),
9611 [
9612 path!("/root/project1").as_ref(),
9613 path!("/root/project2").as_ref(),
9614 ],
9615 cx,
9616 )
9617 .await;
9618
9619 // Make sure the worktrees are fully initialized
9620 project
9621 .update(cx, |project, cx| project.git_scans_complete(cx))
9622 .await;
9623 cx.run_until_parked();
9624
9625 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9626 project.read_with(cx, |project, cx| {
9627 let worktrees: Vec<_> = project.worktrees(cx).collect();
9628 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9629 let id1 = worktrees[0].read(cx).id();
9630 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9631 let id2 = worktrees[1].read(cx).id();
9632 (abs_path1, id1, abs_path2, id2)
9633 });
9634
9635 project.update(cx, |project, cx| {
9636 let abs_path = project1_abs_path.join("file1.txt");
9637 let found_path = project.find_project_path(abs_path, cx).unwrap();
9638 assert_eq!(found_path.worktree_id, project1_id);
9639 assert_eq!(&*found_path.path, rel_path("file1.txt"));
9640
9641 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9642 let found_path = project.find_project_path(abs_path, cx).unwrap();
9643 assert_eq!(found_path.worktree_id, project1_id);
9644 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
9645
9646 let abs_path = project2_abs_path.join("file3.txt");
9647 let found_path = project.find_project_path(abs_path, cx).unwrap();
9648 assert_eq!(found_path.worktree_id, project2_id);
9649 assert_eq!(&*found_path.path, rel_path("file3.txt"));
9650
9651 let abs_path = project1_abs_path.join("nonexistent.txt");
9652 let found_path = project.find_project_path(abs_path, cx);
9653 assert!(
9654 found_path.is_some(),
9655 "Should find project path for nonexistent file in worktree"
9656 );
9657
9658 // Test with an absolute path outside any worktree
9659 let abs_path = Path::new("/some/other/path");
9660 let found_path = project.find_project_path(abs_path, cx);
9661 assert!(
9662 found_path.is_none(),
9663 "Should not find project path for path outside any worktree"
9664 );
9665 });
9666}