1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use async_trait::async_trait;
8use buffer_diff::{
9 BufferDiffEvent, DiffHunkSecondaryStatus, DiffHunkStatus, DiffHunkStatusKind, assert_hunks,
10};
11use fs::FakeFs;
12use futures::{StreamExt, future};
13use git::{
14 GitHostingProviderRegistry,
15 repository::RepoPath,
16 status::{StatusCode, TrackedStatus},
17};
18use git2::RepositoryInitOptions;
19use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
20use itertools::Itertools;
21use language::{
22 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
23 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
24 ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList, ToolchainLister,
25 language_settings::{AllLanguageSettings, LanguageSettingsContent, language_settings},
26 tree_sitter_rust, tree_sitter_typescript,
27};
28use lsp::{
29 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
30 Uri, WillRenameFiles, notification::DidRenameFiles,
31};
32use parking_lot::Mutex;
33use paths::{config_dir, tasks_file};
34use postage::stream::Stream as _;
35use pretty_assertions::{assert_eq, assert_matches};
36use rand::{Rng as _, rngs::StdRng};
37use serde_json::json;
38#[cfg(not(windows))]
39use std::os;
40use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
41use task::{ResolvedTask, ShellKind, TaskContext};
42use unindent::Unindent as _;
43use util::{
44 TryFutureExt as _, assert_set_eq, maybe, path,
45 paths::PathMatcher,
46 test::{TempTree, marked_text_offsets},
47 uri,
48};
49use worktree::WorktreeModelHandle as _;
50
51#[gpui::test]
52async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
53 cx.executor().allow_parking();
54
55 let (tx, mut rx) = futures::channel::mpsc::unbounded();
56 let _thread = std::thread::spawn(move || {
57 #[cfg(not(target_os = "windows"))]
58 std::fs::metadata("/tmp").unwrap();
59 #[cfg(target_os = "windows")]
60 std::fs::metadata("C:/Windows").unwrap();
61 std::thread::sleep(Duration::from_millis(1000));
62 tx.unbounded_send(1).unwrap();
63 });
64 rx.next().await.unwrap();
65}
66
67#[gpui::test]
68async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
69 cx.executor().allow_parking();
70
71 let io_task = smol::unblock(move || {
72 println!("sleeping on thread {:?}", std::thread::current().id());
73 std::thread::sleep(Duration::from_millis(10));
74 1
75 });
76
77 let task = cx.foreground_executor().spawn(async move {
78 io_task.await;
79 });
80
81 task.await;
82}
83
84#[cfg(not(windows))]
85#[gpui::test]
86async fn test_symlinks(cx: &mut gpui::TestAppContext) {
87 init_test(cx);
88 cx.executor().allow_parking();
89
90 let dir = TempTree::new(json!({
91 "root": {
92 "apple": "",
93 "banana": {
94 "carrot": {
95 "date": "",
96 "endive": "",
97 }
98 },
99 "fennel": {
100 "grape": "",
101 }
102 }
103 }));
104
105 let root_link_path = dir.path().join("root_link");
106 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
107 os::unix::fs::symlink(
108 dir.path().join("root/fennel"),
109 dir.path().join("root/finnochio"),
110 )
111 .unwrap();
112
113 let project = Project::test(
114 Arc::new(RealFs::new(None, cx.executor())),
115 [root_link_path.as_ref()],
116 cx,
117 )
118 .await;
119
120 project.update(cx, |project, cx| {
121 let tree = project.worktrees(cx).next().unwrap().read(cx);
122 assert_eq!(tree.file_count(), 5);
123 assert_eq!(
124 tree.inode_for_path("fennel/grape"),
125 tree.inode_for_path("finnochio/grape")
126 );
127 });
128}
129
130#[gpui::test]
131async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
132 init_test(cx);
133
134 let dir = TempTree::new(json!({
135 ".editorconfig": r#"
136 root = true
137 [*.rs]
138 indent_style = tab
139 indent_size = 3
140 end_of_line = lf
141 insert_final_newline = true
142 trim_trailing_whitespace = true
143 max_line_length = 120
144 [*.js]
145 tab_width = 10
146 max_line_length = off
147 "#,
148 ".zed": {
149 "settings.json": r#"{
150 "tab_size": 8,
151 "hard_tabs": false,
152 "ensure_final_newline_on_save": false,
153 "remove_trailing_whitespace_on_save": false,
154 "preferred_line_length": 64,
155 "soft_wrap": "editor_width",
156 }"#,
157 },
158 "a.rs": "fn a() {\n A\n}",
159 "b": {
160 ".editorconfig": r#"
161 [*.rs]
162 indent_size = 2
163 max_line_length = off,
164 "#,
165 "b.rs": "fn b() {\n B\n}",
166 },
167 "c.js": "def c\n C\nend",
168 "README.json": "tabs are better\n",
169 }));
170
171 let path = dir.path();
172 let fs = FakeFs::new(cx.executor());
173 fs.insert_tree_from_real_fs(path, path).await;
174 let project = Project::test(fs, [path], cx).await;
175
176 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
177 language_registry.add(js_lang());
178 language_registry.add(json_lang());
179 language_registry.add(rust_lang());
180
181 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
182
183 cx.executor().run_until_parked();
184
185 cx.update(|cx| {
186 let tree = worktree.read(cx);
187 let settings_for = |path: &str| {
188 let file_entry = tree.entry_for_path(path).unwrap().clone();
189 let file = File::for_entry(file_entry, worktree.clone());
190 let file_language = project
191 .read(cx)
192 .languages()
193 .language_for_file_path(file.path.as_ref());
194 let file_language = cx
195 .foreground_executor()
196 .block_on(file_language)
197 .expect("Failed to get file language");
198 let file = file as _;
199 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
200 };
201
202 let settings_a = settings_for("a.rs");
203 let settings_b = settings_for("b/b.rs");
204 let settings_c = settings_for("c.js");
205 let settings_readme = settings_for("README.json");
206
207 // .editorconfig overrides .zed/settings
208 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
209 assert_eq!(settings_a.hard_tabs, true);
210 assert_eq!(settings_a.ensure_final_newline_on_save, true);
211 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
212 assert_eq!(settings_a.preferred_line_length, 120);
213
214 // .editorconfig in b/ overrides .editorconfig in root
215 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
216
217 // "indent_size" is not set, so "tab_width" is used
218 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
219
220 // When max_line_length is "off", default to .zed/settings.json
221 assert_eq!(settings_b.preferred_line_length, 64);
222 assert_eq!(settings_c.preferred_line_length, 64);
223
224 // README.md should not be affected by .editorconfig's globe "*.rs"
225 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
226 });
227}
228
229#[gpui::test]
230async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
231 init_test(cx);
232 cx.update(|cx| {
233 GitHostingProviderRegistry::default_global(cx);
234 git_hosting_providers::init(cx);
235 });
236
237 let fs = FakeFs::new(cx.executor());
238 let str_path = path!("/dir");
239 let path = Path::new(str_path);
240
241 fs.insert_tree(
242 path!("/dir"),
243 json!({
244 ".zed": {
245 "settings.json": r#"{
246 "git_hosting_providers": [
247 {
248 "provider": "gitlab",
249 "base_url": "https://google.com",
250 "name": "foo"
251 }
252 ]
253 }"#
254 },
255 }),
256 )
257 .await;
258
259 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
260 let (_worktree, _) =
261 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
262 cx.executor().run_until_parked();
263
264 cx.update(|cx| {
265 let provider = GitHostingProviderRegistry::global(cx);
266 assert!(
267 provider
268 .list_hosting_providers()
269 .into_iter()
270 .any(|provider| provider.name() == "foo")
271 );
272 });
273
274 fs.atomic_write(
275 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
276 "{}".into(),
277 )
278 .await
279 .unwrap();
280
281 cx.run_until_parked();
282
283 cx.update(|cx| {
284 let provider = GitHostingProviderRegistry::global(cx);
285 assert!(
286 !provider
287 .list_hosting_providers()
288 .into_iter()
289 .any(|provider| provider.name() == "foo")
290 );
291 });
292}
293
294#[gpui::test]
295async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
296 init_test(cx);
297 TaskStore::init(None);
298
299 let fs = FakeFs::new(cx.executor());
300 fs.insert_tree(
301 path!("/dir"),
302 json!({
303 ".zed": {
304 "settings.json": r#"{ "tab_size": 8 }"#,
305 "tasks.json": r#"[{
306 "label": "cargo check all",
307 "command": "cargo",
308 "args": ["check", "--all"]
309 },]"#,
310 },
311 "a": {
312 "a.rs": "fn a() {\n A\n}"
313 },
314 "b": {
315 ".zed": {
316 "settings.json": r#"{ "tab_size": 2 }"#,
317 "tasks.json": r#"[{
318 "label": "cargo check",
319 "command": "cargo",
320 "args": ["check"]
321 },]"#,
322 },
323 "b.rs": "fn b() {\n B\n}"
324 }
325 }),
326 )
327 .await;
328
329 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
330 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
331
332 cx.executor().run_until_parked();
333 let worktree_id = cx.update(|cx| {
334 project.update(cx, |project, cx| {
335 project.worktrees(cx).next().unwrap().read(cx).id()
336 })
337 });
338
339 let mut task_contexts = TaskContexts::default();
340 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
341 let task_contexts = Arc::new(task_contexts);
342
343 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
344 id: worktree_id,
345 directory_in_worktree: PathBuf::from(".zed"),
346 id_base: "local worktree tasks from directory \".zed\"".into(),
347 };
348
349 let all_tasks = cx
350 .update(|cx| {
351 let tree = worktree.read(cx);
352
353 let file_a = File::for_entry(
354 tree.entry_for_path("a/a.rs").unwrap().clone(),
355 worktree.clone(),
356 ) as _;
357 let settings_a = language_settings(None, Some(&file_a), cx);
358 let file_b = File::for_entry(
359 tree.entry_for_path("b/b.rs").unwrap().clone(),
360 worktree.clone(),
361 ) as _;
362 let settings_b = language_settings(None, Some(&file_b), cx);
363
364 assert_eq!(settings_a.tab_size.get(), 8);
365 assert_eq!(settings_b.tab_size.get(), 2);
366
367 get_all_tasks(&project, task_contexts.clone(), cx)
368 })
369 .await
370 .into_iter()
371 .map(|(source_kind, task)| {
372 let resolved = task.resolved;
373 (
374 source_kind,
375 task.resolved_label,
376 resolved.args,
377 resolved.env,
378 )
379 })
380 .collect::<Vec<_>>();
381 assert_eq!(
382 all_tasks,
383 vec![
384 (
385 TaskSourceKind::Worktree {
386 id: worktree_id,
387 directory_in_worktree: PathBuf::from(path!("b/.zed")),
388 id_base: if cfg!(windows) {
389 "local worktree tasks from directory \"b\\\\.zed\"".into()
390 } else {
391 "local worktree tasks from directory \"b/.zed\"".into()
392 },
393 },
394 "cargo check".to_string(),
395 vec!["check".to_string()],
396 HashMap::default(),
397 ),
398 (
399 topmost_local_task_source_kind.clone(),
400 "cargo check all".to_string(),
401 vec!["check".to_string(), "--all".to_string()],
402 HashMap::default(),
403 ),
404 ]
405 );
406
407 let (_, resolved_task) = cx
408 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
409 .await
410 .into_iter()
411 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
412 .expect("should have one global task");
413 project.update(cx, |project, cx| {
414 let task_inventory = project
415 .task_store
416 .read(cx)
417 .task_inventory()
418 .cloned()
419 .unwrap();
420 task_inventory.update(cx, |inventory, _| {
421 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
422 inventory
423 .update_file_based_tasks(
424 TaskSettingsLocation::Global(tasks_file()),
425 Some(
426 &json!([{
427 "label": "cargo check unstable",
428 "command": "cargo",
429 "args": [
430 "check",
431 "--all",
432 "--all-targets"
433 ],
434 "env": {
435 "RUSTFLAGS": "-Zunstable-options"
436 }
437 }])
438 .to_string(),
439 ),
440 )
441 .unwrap();
442 });
443 });
444 cx.run_until_parked();
445
446 let all_tasks = cx
447 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
448 .await
449 .into_iter()
450 .map(|(source_kind, task)| {
451 let resolved = task.resolved;
452 (
453 source_kind,
454 task.resolved_label,
455 resolved.args,
456 resolved.env,
457 )
458 })
459 .collect::<Vec<_>>();
460 assert_eq!(
461 all_tasks,
462 vec![
463 (
464 topmost_local_task_source_kind.clone(),
465 "cargo check all".to_string(),
466 vec!["check".to_string(), "--all".to_string()],
467 HashMap::default(),
468 ),
469 (
470 TaskSourceKind::Worktree {
471 id: worktree_id,
472 directory_in_worktree: PathBuf::from(path!("b/.zed")),
473 id_base: if cfg!(windows) {
474 "local worktree tasks from directory \"b\\\\.zed\"".into()
475 } else {
476 "local worktree tasks from directory \"b/.zed\"".into()
477 },
478 },
479 "cargo check".to_string(),
480 vec!["check".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::AbsPath {
485 abs_path: paths::tasks_file().clone(),
486 id_base: "global tasks.json".into(),
487 },
488 "cargo check unstable".to_string(),
489 vec![
490 "check".to_string(),
491 "--all".to_string(),
492 "--all-targets".to_string(),
493 ],
494 HashMap::from_iter(Some((
495 "RUSTFLAGS".to_string(),
496 "-Zunstable-options".to_string()
497 ))),
498 ),
499 ]
500 );
501}
502
503#[gpui::test]
504async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
505 init_test(cx);
506 TaskStore::init(None);
507
508 let fs = FakeFs::new(cx.executor());
509 fs.insert_tree(
510 path!("/dir"),
511 json!({
512 ".zed": {
513 "tasks.json": r#"[{
514 "label": "test worktree root",
515 "command": "echo $ZED_WORKTREE_ROOT"
516 }]"#,
517 },
518 "a": {
519 "a.rs": "fn a() {\n A\n}"
520 },
521 }),
522 )
523 .await;
524
525 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
526 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
527
528 cx.executor().run_until_parked();
529 let worktree_id = cx.update(|cx| {
530 project.update(cx, |project, cx| {
531 project.worktrees(cx).next().unwrap().read(cx).id()
532 })
533 });
534
535 let active_non_worktree_item_tasks = cx
536 .update(|cx| {
537 get_all_tasks(
538 &project,
539 Arc::new(TaskContexts {
540 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
541 active_worktree_context: None,
542 other_worktree_contexts: Vec::new(),
543 lsp_task_sources: HashMap::default(),
544 latest_selection: None,
545 }),
546 cx,
547 )
548 })
549 .await;
550 assert!(
551 active_non_worktree_item_tasks.is_empty(),
552 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
553 );
554
555 let active_worktree_tasks = cx
556 .update(|cx| {
557 get_all_tasks(
558 &project,
559 Arc::new(TaskContexts {
560 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
561 active_worktree_context: Some((worktree_id, {
562 let mut worktree_context = TaskContext::default();
563 worktree_context
564 .task_variables
565 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
566 worktree_context
567 })),
568 other_worktree_contexts: Vec::new(),
569 lsp_task_sources: HashMap::default(),
570 latest_selection: None,
571 }),
572 cx,
573 )
574 })
575 .await;
576 assert_eq!(
577 active_worktree_tasks
578 .into_iter()
579 .map(|(source_kind, task)| {
580 let resolved = task.resolved;
581 (source_kind, resolved.command.unwrap())
582 })
583 .collect::<Vec<_>>(),
584 vec![(
585 TaskSourceKind::Worktree {
586 id: worktree_id,
587 directory_in_worktree: PathBuf::from(path!(".zed")),
588 id_base: if cfg!(windows) {
589 "local worktree tasks from directory \".zed\"".into()
590 } else {
591 "local worktree tasks from directory \".zed\"".into()
592 },
593 },
594 "echo /dir".to_string(),
595 )]
596 );
597}
598
599#[gpui::test]
600async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
601 cx: &mut gpui::TestAppContext,
602) {
603 pub(crate) struct PyprojectTomlManifestProvider;
604
605 impl ManifestProvider for PyprojectTomlManifestProvider {
606 fn name(&self) -> ManifestName {
607 SharedString::new_static("pyproject.toml").into()
608 }
609
610 fn search(
611 &self,
612 ManifestQuery {
613 path,
614 depth,
615 delegate,
616 }: ManifestQuery,
617 ) -> Option<Arc<Path>> {
618 for path in path.ancestors().take(depth) {
619 let p = path.join("pyproject.toml");
620 if delegate.exists(&p, Some(false)) {
621 return Some(path.into());
622 }
623 }
624
625 None
626 }
627 }
628
629 init_test(cx);
630 let fs = FakeFs::new(cx.executor());
631
632 fs.insert_tree(
633 path!("/the-root"),
634 json!({
635 ".zed": {
636 "settings.json": r#"
637 {
638 "languages": {
639 "Python": {
640 "language_servers": ["ty"]
641 }
642 }
643 }"#
644 },
645 "project-a": {
646 ".venv": {},
647 "file.py": "",
648 "pyproject.toml": ""
649 },
650 "project-b": {
651 ".venv": {},
652 "source_file.py":"",
653 "another_file.py": "",
654 "pyproject.toml": ""
655 }
656 }),
657 )
658 .await;
659 cx.update(|cx| {
660 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
661 });
662
663 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
664 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
665 let _fake_python_server = language_registry.register_fake_lsp(
666 "Python",
667 FakeLspAdapter {
668 name: "ty",
669 capabilities: lsp::ServerCapabilities {
670 ..Default::default()
671 },
672 ..Default::default()
673 },
674 );
675
676 language_registry.add(python_lang(fs.clone()));
677 let (first_buffer, _handle) = project
678 .update(cx, |project, cx| {
679 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
680 })
681 .await
682 .unwrap();
683 cx.executor().run_until_parked();
684 let servers = project.update(cx, |project, cx| {
685 project.lsp_store.update(cx, |this, cx| {
686 first_buffer.update(cx, |buffer, cx| {
687 this.language_servers_for_local_buffer(buffer, cx)
688 .map(|(adapter, server)| (adapter.clone(), server.clone()))
689 .collect::<Vec<_>>()
690 })
691 })
692 });
693 cx.executor().run_until_parked();
694 assert_eq!(servers.len(), 1);
695 let (adapter, server) = servers.into_iter().next().unwrap();
696 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
697 assert_eq!(server.server_id(), LanguageServerId(0));
698 // `workspace_folders` are set to the rooting point.
699 assert_eq!(
700 server.workspace_folders(),
701 BTreeSet::from_iter(
702 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
703 )
704 );
705
706 let (second_project_buffer, _other_handle) = project
707 .update(cx, |project, cx| {
708 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
709 })
710 .await
711 .unwrap();
712 cx.executor().run_until_parked();
713 let servers = project.update(cx, |project, cx| {
714 project.lsp_store.update(cx, |this, cx| {
715 second_project_buffer.update(cx, |buffer, cx| {
716 this.language_servers_for_local_buffer(buffer, cx)
717 .map(|(adapter, server)| (adapter.clone(), server.clone()))
718 .collect::<Vec<_>>()
719 })
720 })
721 });
722 cx.executor().run_until_parked();
723 assert_eq!(servers.len(), 1);
724 let (adapter, server) = servers.into_iter().next().unwrap();
725 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
726 // We're not using venvs at all here, so both folders should fall under the same root.
727 assert_eq!(server.server_id(), LanguageServerId(0));
728 // Now, let's select a different toolchain for one of subprojects.
729
730 let Toolchains {
731 toolchains: available_toolchains_for_b,
732 root_path,
733 ..
734 } = project
735 .update(cx, |this, cx| {
736 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
737 this.available_toolchains(
738 ProjectPath {
739 worktree_id,
740 path: Arc::from("project-b/source_file.py".as_ref()),
741 },
742 LanguageName::new("Python"),
743 cx,
744 )
745 })
746 .await
747 .expect("A toolchain to be discovered");
748 assert_eq!(root_path.as_ref(), Path::new("project-b"));
749 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
750 let currently_active_toolchain = project
751 .update(cx, |this, cx| {
752 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
753 this.active_toolchain(
754 ProjectPath {
755 worktree_id,
756 path: Arc::from("project-b/source_file.py".as_ref()),
757 },
758 LanguageName::new("Python"),
759 cx,
760 )
761 })
762 .await;
763
764 assert!(currently_active_toolchain.is_none());
765 let _ = project
766 .update(cx, |this, cx| {
767 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
768 this.activate_toolchain(
769 ProjectPath {
770 worktree_id,
771 path: root_path,
772 },
773 available_toolchains_for_b
774 .toolchains
775 .into_iter()
776 .next()
777 .unwrap(),
778 cx,
779 )
780 })
781 .await
782 .unwrap();
783 cx.run_until_parked();
784 let servers = project.update(cx, |project, cx| {
785 project.lsp_store.update(cx, |this, cx| {
786 second_project_buffer.update(cx, |buffer, cx| {
787 this.language_servers_for_local_buffer(buffer, cx)
788 .map(|(adapter, server)| (adapter.clone(), server.clone()))
789 .collect::<Vec<_>>()
790 })
791 })
792 });
793 cx.executor().run_until_parked();
794 assert_eq!(servers.len(), 1);
795 let (adapter, server) = servers.into_iter().next().unwrap();
796 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
797 // There's a new language server in town.
798 assert_eq!(server.server_id(), LanguageServerId(1));
799}
800
801#[gpui::test]
802async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
803 init_test(cx);
804
805 let fs = FakeFs::new(cx.executor());
806 fs.insert_tree(
807 path!("/dir"),
808 json!({
809 "test.rs": "const A: i32 = 1;",
810 "test2.rs": "",
811 "Cargo.toml": "a = 1",
812 "package.json": "{\"a\": 1}",
813 }),
814 )
815 .await;
816
817 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
818 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
819
820 let mut fake_rust_servers = language_registry.register_fake_lsp(
821 "Rust",
822 FakeLspAdapter {
823 name: "the-rust-language-server",
824 capabilities: lsp::ServerCapabilities {
825 completion_provider: Some(lsp::CompletionOptions {
826 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
827 ..Default::default()
828 }),
829 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
830 lsp::TextDocumentSyncOptions {
831 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
832 ..Default::default()
833 },
834 )),
835 ..Default::default()
836 },
837 ..Default::default()
838 },
839 );
840 let mut fake_json_servers = language_registry.register_fake_lsp(
841 "JSON",
842 FakeLspAdapter {
843 name: "the-json-language-server",
844 capabilities: lsp::ServerCapabilities {
845 completion_provider: Some(lsp::CompletionOptions {
846 trigger_characters: Some(vec![":".to_string()]),
847 ..Default::default()
848 }),
849 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
850 lsp::TextDocumentSyncOptions {
851 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
852 ..Default::default()
853 },
854 )),
855 ..Default::default()
856 },
857 ..Default::default()
858 },
859 );
860
861 // Open a buffer without an associated language server.
862 let (toml_buffer, _handle) = project
863 .update(cx, |project, cx| {
864 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
865 })
866 .await
867 .unwrap();
868
869 // Open a buffer with an associated language server before the language for it has been loaded.
870 let (rust_buffer, _handle2) = project
871 .update(cx, |project, cx| {
872 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
873 })
874 .await
875 .unwrap();
876 rust_buffer.update(cx, |buffer, _| {
877 assert_eq!(buffer.language().map(|l| l.name()), None);
878 });
879
880 // Now we add the languages to the project, and ensure they get assigned to all
881 // the relevant open buffers.
882 language_registry.add(json_lang());
883 language_registry.add(rust_lang());
884 cx.executor().run_until_parked();
885 rust_buffer.update(cx, |buffer, _| {
886 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
887 });
888
889 // A server is started up, and it is notified about Rust files.
890 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
891 assert_eq!(
892 fake_rust_server
893 .receive_notification::<lsp::notification::DidOpenTextDocument>()
894 .await
895 .text_document,
896 lsp::TextDocumentItem {
897 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
898 version: 0,
899 text: "const A: i32 = 1;".to_string(),
900 language_id: "rust".to_string(),
901 }
902 );
903
904 // The buffer is configured based on the language server's capabilities.
905 rust_buffer.update(cx, |buffer, _| {
906 assert_eq!(
907 buffer
908 .completion_triggers()
909 .iter()
910 .cloned()
911 .collect::<Vec<_>>(),
912 &[".".to_string(), "::".to_string()]
913 );
914 });
915 toml_buffer.update(cx, |buffer, _| {
916 assert!(buffer.completion_triggers().is_empty());
917 });
918
919 // Edit a buffer. The changes are reported to the language server.
920 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
921 assert_eq!(
922 fake_rust_server
923 .receive_notification::<lsp::notification::DidChangeTextDocument>()
924 .await
925 .text_document,
926 lsp::VersionedTextDocumentIdentifier::new(
927 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
928 1
929 )
930 );
931
932 // Open a third buffer with a different associated language server.
933 let (json_buffer, _json_handle) = project
934 .update(cx, |project, cx| {
935 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
936 })
937 .await
938 .unwrap();
939
940 // A json language server is started up and is only notified about the json buffer.
941 let mut fake_json_server = fake_json_servers.next().await.unwrap();
942 assert_eq!(
943 fake_json_server
944 .receive_notification::<lsp::notification::DidOpenTextDocument>()
945 .await
946 .text_document,
947 lsp::TextDocumentItem {
948 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
949 version: 0,
950 text: "{\"a\": 1}".to_string(),
951 language_id: "json".to_string(),
952 }
953 );
954
955 // This buffer is configured based on the second language server's
956 // capabilities.
957 json_buffer.update(cx, |buffer, _| {
958 assert_eq!(
959 buffer
960 .completion_triggers()
961 .iter()
962 .cloned()
963 .collect::<Vec<_>>(),
964 &[":".to_string()]
965 );
966 });
967
968 // When opening another buffer whose language server is already running,
969 // it is also configured based on the existing language server's capabilities.
970 let (rust_buffer2, _handle4) = project
971 .update(cx, |project, cx| {
972 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
973 })
974 .await
975 .unwrap();
976 rust_buffer2.update(cx, |buffer, _| {
977 assert_eq!(
978 buffer
979 .completion_triggers()
980 .iter()
981 .cloned()
982 .collect::<Vec<_>>(),
983 &[".".to_string(), "::".to_string()]
984 );
985 });
986
987 // Changes are reported only to servers matching the buffer's language.
988 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
989 rust_buffer2.update(cx, |buffer, cx| {
990 buffer.edit([(0..0, "let x = 1;")], None, cx)
991 });
992 assert_eq!(
993 fake_rust_server
994 .receive_notification::<lsp::notification::DidChangeTextDocument>()
995 .await
996 .text_document,
997 lsp::VersionedTextDocumentIdentifier::new(
998 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
999 1
1000 )
1001 );
1002
1003 // Save notifications are reported to all servers.
1004 project
1005 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1006 .await
1007 .unwrap();
1008 assert_eq!(
1009 fake_rust_server
1010 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1011 .await
1012 .text_document,
1013 lsp::TextDocumentIdentifier::new(
1014 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1015 )
1016 );
1017 assert_eq!(
1018 fake_json_server
1019 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1020 .await
1021 .text_document,
1022 lsp::TextDocumentIdentifier::new(
1023 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1024 )
1025 );
1026
1027 // Renames are reported only to servers matching the buffer's language.
1028 fs.rename(
1029 Path::new(path!("/dir/test2.rs")),
1030 Path::new(path!("/dir/test3.rs")),
1031 Default::default(),
1032 )
1033 .await
1034 .unwrap();
1035 assert_eq!(
1036 fake_rust_server
1037 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1038 .await
1039 .text_document,
1040 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1041 );
1042 assert_eq!(
1043 fake_rust_server
1044 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1045 .await
1046 .text_document,
1047 lsp::TextDocumentItem {
1048 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1049 version: 0,
1050 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1051 language_id: "rust".to_string(),
1052 },
1053 );
1054
1055 rust_buffer2.update(cx, |buffer, cx| {
1056 buffer.update_diagnostics(
1057 LanguageServerId(0),
1058 DiagnosticSet::from_sorted_entries(
1059 vec![DiagnosticEntry {
1060 diagnostic: Default::default(),
1061 range: Anchor::MIN..Anchor::MAX,
1062 }],
1063 &buffer.snapshot(),
1064 ),
1065 cx,
1066 );
1067 assert_eq!(
1068 buffer
1069 .snapshot()
1070 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1071 .count(),
1072 1
1073 );
1074 });
1075
1076 // When the rename changes the extension of the file, the buffer gets closed on the old
1077 // language server and gets opened on the new one.
1078 fs.rename(
1079 Path::new(path!("/dir/test3.rs")),
1080 Path::new(path!("/dir/test3.json")),
1081 Default::default(),
1082 )
1083 .await
1084 .unwrap();
1085 assert_eq!(
1086 fake_rust_server
1087 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1088 .await
1089 .text_document,
1090 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1091 );
1092 assert_eq!(
1093 fake_json_server
1094 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1095 .await
1096 .text_document,
1097 lsp::TextDocumentItem {
1098 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1099 version: 0,
1100 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1101 language_id: "json".to_string(),
1102 },
1103 );
1104
1105 // We clear the diagnostics, since the language has changed.
1106 rust_buffer2.update(cx, |buffer, _| {
1107 assert_eq!(
1108 buffer
1109 .snapshot()
1110 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1111 .count(),
1112 0
1113 );
1114 });
1115
1116 // The renamed file's version resets after changing language server.
1117 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1118 assert_eq!(
1119 fake_json_server
1120 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1121 .await
1122 .text_document,
1123 lsp::VersionedTextDocumentIdentifier::new(
1124 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1125 1
1126 )
1127 );
1128
1129 // Restart language servers
1130 project.update(cx, |project, cx| {
1131 project.restart_language_servers_for_buffers(
1132 vec![rust_buffer.clone(), json_buffer.clone()],
1133 HashSet::default(),
1134 cx,
1135 );
1136 });
1137
1138 let mut rust_shutdown_requests = fake_rust_server
1139 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1140 let mut json_shutdown_requests = fake_json_server
1141 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1142 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1143
1144 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1145 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1146
1147 // Ensure rust document is reopened in new rust language server
1148 assert_eq!(
1149 fake_rust_server
1150 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1151 .await
1152 .text_document,
1153 lsp::TextDocumentItem {
1154 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1155 version: 0,
1156 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1157 language_id: "rust".to_string(),
1158 }
1159 );
1160
1161 // Ensure json documents are reopened in new json language server
1162 assert_set_eq!(
1163 [
1164 fake_json_server
1165 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1166 .await
1167 .text_document,
1168 fake_json_server
1169 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1170 .await
1171 .text_document,
1172 ],
1173 [
1174 lsp::TextDocumentItem {
1175 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1176 version: 0,
1177 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1178 language_id: "json".to_string(),
1179 },
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1182 version: 0,
1183 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 }
1186 ]
1187 );
1188
1189 // Close notifications are reported only to servers matching the buffer's language.
1190 cx.update(|_| drop(_json_handle));
1191 let close_message = lsp::DidCloseTextDocumentParams {
1192 text_document: lsp::TextDocumentIdentifier::new(
1193 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1194 ),
1195 };
1196 assert_eq!(
1197 fake_json_server
1198 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1199 .await,
1200 close_message,
1201 );
1202}
1203
1204#[gpui::test]
1205async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1206 init_test(cx);
1207
1208 let fs = FakeFs::new(cx.executor());
1209 fs.insert_tree(
1210 path!("/the-root"),
1211 json!({
1212 ".gitignore": "target\n",
1213 "Cargo.lock": "",
1214 "src": {
1215 "a.rs": "",
1216 "b.rs": "",
1217 },
1218 "target": {
1219 "x": {
1220 "out": {
1221 "x.rs": ""
1222 }
1223 },
1224 "y": {
1225 "out": {
1226 "y.rs": "",
1227 }
1228 },
1229 "z": {
1230 "out": {
1231 "z.rs": ""
1232 }
1233 }
1234 }
1235 }),
1236 )
1237 .await;
1238 fs.insert_tree(
1239 path!("/the-registry"),
1240 json!({
1241 "dep1": {
1242 "src": {
1243 "dep1.rs": "",
1244 }
1245 },
1246 "dep2": {
1247 "src": {
1248 "dep2.rs": "",
1249 }
1250 },
1251 }),
1252 )
1253 .await;
1254 fs.insert_tree(
1255 path!("/the/stdlib"),
1256 json!({
1257 "LICENSE": "",
1258 "src": {
1259 "string.rs": "",
1260 }
1261 }),
1262 )
1263 .await;
1264
1265 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1266 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1267 (project.languages().clone(), project.lsp_store())
1268 });
1269 language_registry.add(rust_lang());
1270 let mut fake_servers = language_registry.register_fake_lsp(
1271 "Rust",
1272 FakeLspAdapter {
1273 name: "the-language-server",
1274 ..Default::default()
1275 },
1276 );
1277
1278 cx.executor().run_until_parked();
1279
1280 // Start the language server by opening a buffer with a compatible file extension.
1281 project
1282 .update(cx, |project, cx| {
1283 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1284 })
1285 .await
1286 .unwrap();
1287
1288 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1289 project.update(cx, |project, cx| {
1290 let worktree = project.worktrees(cx).next().unwrap();
1291 assert_eq!(
1292 worktree
1293 .read(cx)
1294 .snapshot()
1295 .entries(true, 0)
1296 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1297 .collect::<Vec<_>>(),
1298 &[
1299 (Path::new(""), false),
1300 (Path::new(".gitignore"), false),
1301 (Path::new("Cargo.lock"), false),
1302 (Path::new("src"), false),
1303 (Path::new("src/a.rs"), false),
1304 (Path::new("src/b.rs"), false),
1305 (Path::new("target"), true),
1306 ]
1307 );
1308 });
1309
1310 let prev_read_dir_count = fs.read_dir_call_count();
1311
1312 let fake_server = fake_servers.next().await.unwrap();
1313 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1314 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1315 id
1316 });
1317
1318 // Simulate jumping to a definition in a dependency outside of the worktree.
1319 let _out_of_worktree_buffer = project
1320 .update(cx, |project, cx| {
1321 project.open_local_buffer_via_lsp(
1322 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1323 server_id,
1324 cx,
1325 )
1326 })
1327 .await
1328 .unwrap();
1329
1330 // Keep track of the FS events reported to the language server.
1331 let file_changes = Arc::new(Mutex::new(Vec::new()));
1332 fake_server
1333 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1334 registrations: vec![lsp::Registration {
1335 id: Default::default(),
1336 method: "workspace/didChangeWatchedFiles".to_string(),
1337 register_options: serde_json::to_value(
1338 lsp::DidChangeWatchedFilesRegistrationOptions {
1339 watchers: vec![
1340 lsp::FileSystemWatcher {
1341 glob_pattern: lsp::GlobPattern::String(
1342 path!("/the-root/Cargo.toml").to_string(),
1343 ),
1344 kind: None,
1345 },
1346 lsp::FileSystemWatcher {
1347 glob_pattern: lsp::GlobPattern::String(
1348 path!("/the-root/src/*.{rs,c}").to_string(),
1349 ),
1350 kind: None,
1351 },
1352 lsp::FileSystemWatcher {
1353 glob_pattern: lsp::GlobPattern::String(
1354 path!("/the-root/target/y/**/*.rs").to_string(),
1355 ),
1356 kind: None,
1357 },
1358 lsp::FileSystemWatcher {
1359 glob_pattern: lsp::GlobPattern::String(
1360 path!("/the/stdlib/src/**/*.rs").to_string(),
1361 ),
1362 kind: None,
1363 },
1364 lsp::FileSystemWatcher {
1365 glob_pattern: lsp::GlobPattern::String(
1366 path!("**/Cargo.lock").to_string(),
1367 ),
1368 kind: None,
1369 },
1370 ],
1371 },
1372 )
1373 .ok(),
1374 }],
1375 })
1376 .await
1377 .into_response()
1378 .unwrap();
1379 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1380 let file_changes = file_changes.clone();
1381 move |params, _| {
1382 let mut file_changes = file_changes.lock();
1383 file_changes.extend(params.changes);
1384 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1385 }
1386 });
1387
1388 cx.executor().run_until_parked();
1389 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1390 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1391
1392 let mut new_watched_paths = fs.watched_paths();
1393 new_watched_paths.retain(|path| !path.starts_with(config_dir()));
1394 assert_eq!(
1395 &new_watched_paths,
1396 &[
1397 Path::new(path!("/the-root")),
1398 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1399 Path::new(path!("/the/stdlib/src"))
1400 ]
1401 );
1402
1403 // Now the language server has asked us to watch an ignored directory path,
1404 // so we recursively load it.
1405 project.update(cx, |project, cx| {
1406 let worktree = project.visible_worktrees(cx).next().unwrap();
1407 assert_eq!(
1408 worktree
1409 .read(cx)
1410 .snapshot()
1411 .entries(true, 0)
1412 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
1413 .collect::<Vec<_>>(),
1414 &[
1415 (Path::new(""), false),
1416 (Path::new(".gitignore"), false),
1417 (Path::new("Cargo.lock"), false),
1418 (Path::new("src"), false),
1419 (Path::new("src/a.rs"), false),
1420 (Path::new("src/b.rs"), false),
1421 (Path::new("target"), true),
1422 (Path::new("target/x"), true),
1423 (Path::new("target/y"), true),
1424 (Path::new("target/y/out"), true),
1425 (Path::new("target/y/out/y.rs"), true),
1426 (Path::new("target/z"), true),
1427 ]
1428 );
1429 });
1430
1431 // Perform some file system mutations, two of which match the watched patterns,
1432 // and one of which does not.
1433 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1434 .await
1435 .unwrap();
1436 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1437 .await
1438 .unwrap();
1439 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1440 .await
1441 .unwrap();
1442 fs.create_file(
1443 path!("/the-root/target/x/out/x2.rs").as_ref(),
1444 Default::default(),
1445 )
1446 .await
1447 .unwrap();
1448 fs.create_file(
1449 path!("/the-root/target/y/out/y2.rs").as_ref(),
1450 Default::default(),
1451 )
1452 .await
1453 .unwrap();
1454 fs.save(
1455 path!("/the-root/Cargo.lock").as_ref(),
1456 &"".into(),
1457 Default::default(),
1458 )
1459 .await
1460 .unwrap();
1461 fs.save(
1462 path!("/the-stdlib/LICENSE").as_ref(),
1463 &"".into(),
1464 Default::default(),
1465 )
1466 .await
1467 .unwrap();
1468 fs.save(
1469 path!("/the/stdlib/src/string.rs").as_ref(),
1470 &"".into(),
1471 Default::default(),
1472 )
1473 .await
1474 .unwrap();
1475
1476 // The language server receives events for the FS mutations that match its watch patterns.
1477 cx.executor().run_until_parked();
1478 assert_eq!(
1479 &*file_changes.lock(),
1480 &[
1481 lsp::FileEvent {
1482 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1483 typ: lsp::FileChangeType::CHANGED,
1484 },
1485 lsp::FileEvent {
1486 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1487 typ: lsp::FileChangeType::DELETED,
1488 },
1489 lsp::FileEvent {
1490 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1491 typ: lsp::FileChangeType::CREATED,
1492 },
1493 lsp::FileEvent {
1494 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1495 typ: lsp::FileChangeType::CREATED,
1496 },
1497 lsp::FileEvent {
1498 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1499 typ: lsp::FileChangeType::CHANGED,
1500 },
1501 ]
1502 );
1503}
1504
1505#[gpui::test]
1506async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1507 init_test(cx);
1508
1509 let fs = FakeFs::new(cx.executor());
1510 fs.insert_tree(
1511 path!("/dir"),
1512 json!({
1513 "a.rs": "let a = 1;",
1514 "b.rs": "let b = 2;"
1515 }),
1516 )
1517 .await;
1518
1519 let project = Project::test(
1520 fs,
1521 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1522 cx,
1523 )
1524 .await;
1525 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1526
1527 let buffer_a = project
1528 .update(cx, |project, cx| {
1529 project.open_local_buffer(path!("/dir/a.rs"), cx)
1530 })
1531 .await
1532 .unwrap();
1533 let buffer_b = project
1534 .update(cx, |project, cx| {
1535 project.open_local_buffer(path!("/dir/b.rs"), cx)
1536 })
1537 .await
1538 .unwrap();
1539
1540 lsp_store.update(cx, |lsp_store, cx| {
1541 lsp_store
1542 .update_diagnostics(
1543 LanguageServerId(0),
1544 lsp::PublishDiagnosticsParams {
1545 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1546 version: None,
1547 diagnostics: vec![lsp::Diagnostic {
1548 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1549 severity: Some(lsp::DiagnosticSeverity::ERROR),
1550 message: "error 1".to_string(),
1551 ..Default::default()
1552 }],
1553 },
1554 None,
1555 DiagnosticSourceKind::Pushed,
1556 &[],
1557 cx,
1558 )
1559 .unwrap();
1560 lsp_store
1561 .update_diagnostics(
1562 LanguageServerId(0),
1563 lsp::PublishDiagnosticsParams {
1564 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1565 version: None,
1566 diagnostics: vec![lsp::Diagnostic {
1567 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1568 severity: Some(DiagnosticSeverity::WARNING),
1569 message: "error 2".to_string(),
1570 ..Default::default()
1571 }],
1572 },
1573 None,
1574 DiagnosticSourceKind::Pushed,
1575 &[],
1576 cx,
1577 )
1578 .unwrap();
1579 });
1580
1581 buffer_a.update(cx, |buffer, _| {
1582 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1583 assert_eq!(
1584 chunks
1585 .iter()
1586 .map(|(s, d)| (s.as_str(), *d))
1587 .collect::<Vec<_>>(),
1588 &[
1589 ("let ", None),
1590 ("a", Some(DiagnosticSeverity::ERROR)),
1591 (" = 1;", None),
1592 ]
1593 );
1594 });
1595 buffer_b.update(cx, |buffer, _| {
1596 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1597 assert_eq!(
1598 chunks
1599 .iter()
1600 .map(|(s, d)| (s.as_str(), *d))
1601 .collect::<Vec<_>>(),
1602 &[
1603 ("let ", None),
1604 ("b", Some(DiagnosticSeverity::WARNING)),
1605 (" = 2;", None),
1606 ]
1607 );
1608 });
1609}
1610
1611#[gpui::test]
1612async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1613 init_test(cx);
1614
1615 let fs = FakeFs::new(cx.executor());
1616 fs.insert_tree(
1617 path!("/root"),
1618 json!({
1619 "dir": {
1620 ".git": {
1621 "HEAD": "ref: refs/heads/main",
1622 },
1623 ".gitignore": "b.rs",
1624 "a.rs": "let a = 1;",
1625 "b.rs": "let b = 2;",
1626 },
1627 "other.rs": "let b = c;"
1628 }),
1629 )
1630 .await;
1631
1632 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1633 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1634 let (worktree, _) = project
1635 .update(cx, |project, cx| {
1636 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1637 })
1638 .await
1639 .unwrap();
1640 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1641
1642 let (worktree, _) = project
1643 .update(cx, |project, cx| {
1644 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1645 })
1646 .await
1647 .unwrap();
1648 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1649
1650 let server_id = LanguageServerId(0);
1651 lsp_store.update(cx, |lsp_store, cx| {
1652 lsp_store
1653 .update_diagnostics(
1654 server_id,
1655 lsp::PublishDiagnosticsParams {
1656 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1657 version: None,
1658 diagnostics: vec![lsp::Diagnostic {
1659 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1660 severity: Some(lsp::DiagnosticSeverity::ERROR),
1661 message: "unused variable 'b'".to_string(),
1662 ..Default::default()
1663 }],
1664 },
1665 None,
1666 DiagnosticSourceKind::Pushed,
1667 &[],
1668 cx,
1669 )
1670 .unwrap();
1671 lsp_store
1672 .update_diagnostics(
1673 server_id,
1674 lsp::PublishDiagnosticsParams {
1675 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1676 version: None,
1677 diagnostics: vec![lsp::Diagnostic {
1678 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1679 severity: Some(lsp::DiagnosticSeverity::ERROR),
1680 message: "unknown variable 'c'".to_string(),
1681 ..Default::default()
1682 }],
1683 },
1684 None,
1685 DiagnosticSourceKind::Pushed,
1686 &[],
1687 cx,
1688 )
1689 .unwrap();
1690 });
1691
1692 let main_ignored_buffer = project
1693 .update(cx, |project, cx| {
1694 project.open_buffer((main_worktree_id, "b.rs"), cx)
1695 })
1696 .await
1697 .unwrap();
1698 main_ignored_buffer.update(cx, |buffer, _| {
1699 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1700 assert_eq!(
1701 chunks
1702 .iter()
1703 .map(|(s, d)| (s.as_str(), *d))
1704 .collect::<Vec<_>>(),
1705 &[
1706 ("let ", None),
1707 ("b", Some(DiagnosticSeverity::ERROR)),
1708 (" = 2;", None),
1709 ],
1710 "Gigitnored buffers should still get in-buffer diagnostics",
1711 );
1712 });
1713 let other_buffer = project
1714 .update(cx, |project, cx| {
1715 project.open_buffer((other_worktree_id, ""), cx)
1716 })
1717 .await
1718 .unwrap();
1719 other_buffer.update(cx, |buffer, _| {
1720 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1721 assert_eq!(
1722 chunks
1723 .iter()
1724 .map(|(s, d)| (s.as_str(), *d))
1725 .collect::<Vec<_>>(),
1726 &[
1727 ("let b = ", None),
1728 ("c", Some(DiagnosticSeverity::ERROR)),
1729 (";", None),
1730 ],
1731 "Buffers from hidden projects should still get in-buffer diagnostics"
1732 );
1733 });
1734
1735 project.update(cx, |project, cx| {
1736 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1737 assert_eq!(
1738 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1739 vec![(
1740 ProjectPath {
1741 worktree_id: main_worktree_id,
1742 path: Arc::from(Path::new("b.rs")),
1743 },
1744 server_id,
1745 DiagnosticSummary {
1746 error_count: 1,
1747 warning_count: 0,
1748 }
1749 )]
1750 );
1751 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1752 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1753 });
1754}
1755
1756#[gpui::test]
1757async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1758 init_test(cx);
1759
1760 let progress_token = "the-progress-token";
1761
1762 let fs = FakeFs::new(cx.executor());
1763 fs.insert_tree(
1764 path!("/dir"),
1765 json!({
1766 "a.rs": "fn a() { A }",
1767 "b.rs": "const y: i32 = 1",
1768 }),
1769 )
1770 .await;
1771
1772 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1773 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1774
1775 language_registry.add(rust_lang());
1776 let mut fake_servers = language_registry.register_fake_lsp(
1777 "Rust",
1778 FakeLspAdapter {
1779 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1780 disk_based_diagnostics_sources: vec!["disk".into()],
1781 ..Default::default()
1782 },
1783 );
1784
1785 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1786
1787 // Cause worktree to start the fake language server
1788 let _ = project
1789 .update(cx, |project, cx| {
1790 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1791 })
1792 .await
1793 .unwrap();
1794
1795 let mut events = cx.events(&project);
1796
1797 let fake_server = fake_servers.next().await.unwrap();
1798 assert_eq!(
1799 events.next().await.unwrap(),
1800 Event::LanguageServerAdded(
1801 LanguageServerId(0),
1802 fake_server.server.name(),
1803 Some(worktree_id)
1804 ),
1805 );
1806
1807 fake_server
1808 .start_progress(format!("{}/0", progress_token))
1809 .await;
1810 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1811 assert_eq!(
1812 events.next().await.unwrap(),
1813 Event::DiskBasedDiagnosticsStarted {
1814 language_server_id: LanguageServerId(0),
1815 }
1816 );
1817
1818 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1819 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1820 version: None,
1821 diagnostics: vec![lsp::Diagnostic {
1822 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1823 severity: Some(lsp::DiagnosticSeverity::ERROR),
1824 message: "undefined variable 'A'".to_string(),
1825 ..Default::default()
1826 }],
1827 });
1828 assert_eq!(
1829 events.next().await.unwrap(),
1830 Event::DiagnosticsUpdated {
1831 language_server_id: LanguageServerId(0),
1832 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1833 }
1834 );
1835
1836 fake_server.end_progress(format!("{}/0", progress_token));
1837 assert_eq!(
1838 events.next().await.unwrap(),
1839 Event::DiskBasedDiagnosticsFinished {
1840 language_server_id: LanguageServerId(0)
1841 }
1842 );
1843
1844 let buffer = project
1845 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1846 .await
1847 .unwrap();
1848
1849 buffer.update(cx, |buffer, _| {
1850 let snapshot = buffer.snapshot();
1851 let diagnostics = snapshot
1852 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1853 .collect::<Vec<_>>();
1854 assert_eq!(
1855 diagnostics,
1856 &[DiagnosticEntry {
1857 range: Point::new(0, 9)..Point::new(0, 10),
1858 diagnostic: Diagnostic {
1859 severity: lsp::DiagnosticSeverity::ERROR,
1860 message: "undefined variable 'A'".to_string(),
1861 group_id: 0,
1862 is_primary: true,
1863 source_kind: DiagnosticSourceKind::Pushed,
1864 ..Diagnostic::default()
1865 }
1866 }]
1867 )
1868 });
1869
1870 // Ensure publishing empty diagnostics twice only results in one update event.
1871 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1872 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1873 version: None,
1874 diagnostics: Default::default(),
1875 });
1876 assert_eq!(
1877 events.next().await.unwrap(),
1878 Event::DiagnosticsUpdated {
1879 language_server_id: LanguageServerId(0),
1880 paths: vec![(worktree_id, Path::new("a.rs")).into()],
1881 }
1882 );
1883
1884 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1885 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1886 version: None,
1887 diagnostics: Default::default(),
1888 });
1889 cx.executor().run_until_parked();
1890 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1891}
1892
1893#[gpui::test]
1894async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1895 init_test(cx);
1896
1897 let progress_token = "the-progress-token";
1898
1899 let fs = FakeFs::new(cx.executor());
1900 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1901
1902 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1903
1904 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1905 language_registry.add(rust_lang());
1906 let mut fake_servers = language_registry.register_fake_lsp(
1907 "Rust",
1908 FakeLspAdapter {
1909 name: "the-language-server",
1910 disk_based_diagnostics_sources: vec!["disk".into()],
1911 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1912 ..FakeLspAdapter::default()
1913 },
1914 );
1915
1916 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1917
1918 let (buffer, _handle) = project
1919 .update(cx, |project, cx| {
1920 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1921 })
1922 .await
1923 .unwrap();
1924 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1925 // Simulate diagnostics starting to update.
1926 let fake_server = fake_servers.next().await.unwrap();
1927 fake_server.start_progress(progress_token).await;
1928
1929 // Restart the server before the diagnostics finish updating.
1930 project.update(cx, |project, cx| {
1931 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1932 });
1933 let mut events = cx.events(&project);
1934
1935 // Simulate the newly started server sending more diagnostics.
1936 let fake_server = fake_servers.next().await.unwrap();
1937 assert_eq!(
1938 events.next().await.unwrap(),
1939 Event::LanguageServerRemoved(LanguageServerId(0))
1940 );
1941 assert_eq!(
1942 events.next().await.unwrap(),
1943 Event::LanguageServerAdded(
1944 LanguageServerId(1),
1945 fake_server.server.name(),
1946 Some(worktree_id)
1947 )
1948 );
1949 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1950 fake_server.start_progress(progress_token).await;
1951 assert_eq!(
1952 events.next().await.unwrap(),
1953 Event::LanguageServerBufferRegistered {
1954 server_id: LanguageServerId(1),
1955 buffer_id,
1956 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1957 name: Some(fake_server.server.name())
1958 }
1959 );
1960 assert_eq!(
1961 events.next().await.unwrap(),
1962 Event::DiskBasedDiagnosticsStarted {
1963 language_server_id: LanguageServerId(1)
1964 }
1965 );
1966 project.update(cx, |project, cx| {
1967 assert_eq!(
1968 project
1969 .language_servers_running_disk_based_diagnostics(cx)
1970 .collect::<Vec<_>>(),
1971 [LanguageServerId(1)]
1972 );
1973 });
1974
1975 // All diagnostics are considered done, despite the old server's diagnostic
1976 // task never completing.
1977 fake_server.end_progress(progress_token);
1978 assert_eq!(
1979 events.next().await.unwrap(),
1980 Event::DiskBasedDiagnosticsFinished {
1981 language_server_id: LanguageServerId(1)
1982 }
1983 );
1984 project.update(cx, |project, cx| {
1985 assert_eq!(
1986 project
1987 .language_servers_running_disk_based_diagnostics(cx)
1988 .collect::<Vec<_>>(),
1989 [] as [language::LanguageServerId; 0]
1990 );
1991 });
1992}
1993
1994#[gpui::test]
1995async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1996 init_test(cx);
1997
1998 let fs = FakeFs::new(cx.executor());
1999 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2000
2001 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2002
2003 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2004 language_registry.add(rust_lang());
2005 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2006
2007 let (buffer, _) = project
2008 .update(cx, |project, cx| {
2009 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2010 })
2011 .await
2012 .unwrap();
2013
2014 // Publish diagnostics
2015 let fake_server = fake_servers.next().await.unwrap();
2016 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2017 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2018 version: None,
2019 diagnostics: vec![lsp::Diagnostic {
2020 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2021 severity: Some(lsp::DiagnosticSeverity::ERROR),
2022 message: "the message".to_string(),
2023 ..Default::default()
2024 }],
2025 });
2026
2027 cx.executor().run_until_parked();
2028 buffer.update(cx, |buffer, _| {
2029 assert_eq!(
2030 buffer
2031 .snapshot()
2032 .diagnostics_in_range::<_, usize>(0..1, false)
2033 .map(|entry| entry.diagnostic.message)
2034 .collect::<Vec<_>>(),
2035 ["the message".to_string()]
2036 );
2037 });
2038 project.update(cx, |project, cx| {
2039 assert_eq!(
2040 project.diagnostic_summary(false, cx),
2041 DiagnosticSummary {
2042 error_count: 1,
2043 warning_count: 0,
2044 }
2045 );
2046 });
2047
2048 project.update(cx, |project, cx| {
2049 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2050 });
2051
2052 // The diagnostics are cleared.
2053 cx.executor().run_until_parked();
2054 buffer.update(cx, |buffer, _| {
2055 assert_eq!(
2056 buffer
2057 .snapshot()
2058 .diagnostics_in_range::<_, usize>(0..1, false)
2059 .map(|entry| entry.diagnostic.message)
2060 .collect::<Vec<_>>(),
2061 Vec::<String>::new(),
2062 );
2063 });
2064 project.update(cx, |project, cx| {
2065 assert_eq!(
2066 project.diagnostic_summary(false, cx),
2067 DiagnosticSummary {
2068 error_count: 0,
2069 warning_count: 0,
2070 }
2071 );
2072 });
2073}
2074
2075#[gpui::test]
2076async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2077 init_test(cx);
2078
2079 let fs = FakeFs::new(cx.executor());
2080 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2081
2082 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2083 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2084
2085 language_registry.add(rust_lang());
2086 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2087
2088 let (buffer, _handle) = project
2089 .update(cx, |project, cx| {
2090 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2091 })
2092 .await
2093 .unwrap();
2094
2095 // Before restarting the server, report diagnostics with an unknown buffer version.
2096 let fake_server = fake_servers.next().await.unwrap();
2097 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2098 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2099 version: Some(10000),
2100 diagnostics: Vec::new(),
2101 });
2102 cx.executor().run_until_parked();
2103 project.update(cx, |project, cx| {
2104 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2105 });
2106
2107 let mut fake_server = fake_servers.next().await.unwrap();
2108 let notification = fake_server
2109 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2110 .await
2111 .text_document;
2112 assert_eq!(notification.version, 0);
2113}
2114
2115#[gpui::test]
2116async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2117 init_test(cx);
2118
2119 let progress_token = "the-progress-token";
2120
2121 let fs = FakeFs::new(cx.executor());
2122 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2123
2124 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2125
2126 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2127 language_registry.add(rust_lang());
2128 let mut fake_servers = language_registry.register_fake_lsp(
2129 "Rust",
2130 FakeLspAdapter {
2131 name: "the-language-server",
2132 disk_based_diagnostics_sources: vec!["disk".into()],
2133 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2134 ..Default::default()
2135 },
2136 );
2137
2138 let (buffer, _handle) = project
2139 .update(cx, |project, cx| {
2140 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2141 })
2142 .await
2143 .unwrap();
2144
2145 // Simulate diagnostics starting to update.
2146 let mut fake_server = fake_servers.next().await.unwrap();
2147 fake_server
2148 .start_progress_with(
2149 "another-token",
2150 lsp::WorkDoneProgressBegin {
2151 cancellable: Some(false),
2152 ..Default::default()
2153 },
2154 )
2155 .await;
2156 fake_server
2157 .start_progress_with(
2158 progress_token,
2159 lsp::WorkDoneProgressBegin {
2160 cancellable: Some(true),
2161 ..Default::default()
2162 },
2163 )
2164 .await;
2165 cx.executor().run_until_parked();
2166
2167 project.update(cx, |project, cx| {
2168 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2169 });
2170
2171 let cancel_notification = fake_server
2172 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2173 .await;
2174 assert_eq!(
2175 cancel_notification.token,
2176 NumberOrString::String(progress_token.into())
2177 );
2178}
2179
2180#[gpui::test]
2181async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2182 init_test(cx);
2183
2184 let fs = FakeFs::new(cx.executor());
2185 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2186 .await;
2187
2188 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2189 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2190
2191 let mut fake_rust_servers = language_registry.register_fake_lsp(
2192 "Rust",
2193 FakeLspAdapter {
2194 name: "rust-lsp",
2195 ..Default::default()
2196 },
2197 );
2198 let mut fake_js_servers = language_registry.register_fake_lsp(
2199 "JavaScript",
2200 FakeLspAdapter {
2201 name: "js-lsp",
2202 ..Default::default()
2203 },
2204 );
2205 language_registry.add(rust_lang());
2206 language_registry.add(js_lang());
2207
2208 let _rs_buffer = project
2209 .update(cx, |project, cx| {
2210 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2211 })
2212 .await
2213 .unwrap();
2214 let _js_buffer = project
2215 .update(cx, |project, cx| {
2216 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2217 })
2218 .await
2219 .unwrap();
2220
2221 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2222 assert_eq!(
2223 fake_rust_server_1
2224 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2225 .await
2226 .text_document
2227 .uri
2228 .as_str(),
2229 uri!("file:///dir/a.rs")
2230 );
2231
2232 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2233 assert_eq!(
2234 fake_js_server
2235 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2236 .await
2237 .text_document
2238 .uri
2239 .as_str(),
2240 uri!("file:///dir/b.js")
2241 );
2242
2243 // Disable Rust language server, ensuring only that server gets stopped.
2244 cx.update(|cx| {
2245 SettingsStore::update_global(cx, |settings, cx| {
2246 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2247 settings.languages.0.insert(
2248 "Rust".into(),
2249 LanguageSettingsContent {
2250 enable_language_server: Some(false),
2251 ..Default::default()
2252 },
2253 );
2254 });
2255 })
2256 });
2257 fake_rust_server_1
2258 .receive_notification::<lsp::notification::Exit>()
2259 .await;
2260
2261 // Enable Rust and disable JavaScript language servers, ensuring that the
2262 // former gets started again and that the latter stops.
2263 cx.update(|cx| {
2264 SettingsStore::update_global(cx, |settings, cx| {
2265 settings.update_user_settings::<AllLanguageSettings>(cx, |settings| {
2266 settings.languages.0.insert(
2267 LanguageName::new("Rust"),
2268 LanguageSettingsContent {
2269 enable_language_server: Some(true),
2270 ..Default::default()
2271 },
2272 );
2273 settings.languages.0.insert(
2274 LanguageName::new("JavaScript"),
2275 LanguageSettingsContent {
2276 enable_language_server: Some(false),
2277 ..Default::default()
2278 },
2279 );
2280 });
2281 })
2282 });
2283 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2284 assert_eq!(
2285 fake_rust_server_2
2286 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2287 .await
2288 .text_document
2289 .uri
2290 .as_str(),
2291 uri!("file:///dir/a.rs")
2292 );
2293 fake_js_server
2294 .receive_notification::<lsp::notification::Exit>()
2295 .await;
2296}
2297
2298#[gpui::test(iterations = 3)]
2299async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2300 init_test(cx);
2301
2302 let text = "
2303 fn a() { A }
2304 fn b() { BB }
2305 fn c() { CCC }
2306 "
2307 .unindent();
2308
2309 let fs = FakeFs::new(cx.executor());
2310 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2311
2312 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2313 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2314
2315 language_registry.add(rust_lang());
2316 let mut fake_servers = language_registry.register_fake_lsp(
2317 "Rust",
2318 FakeLspAdapter {
2319 disk_based_diagnostics_sources: vec!["disk".into()],
2320 ..Default::default()
2321 },
2322 );
2323
2324 let buffer = project
2325 .update(cx, |project, cx| {
2326 project.open_local_buffer(path!("/dir/a.rs"), cx)
2327 })
2328 .await
2329 .unwrap();
2330
2331 let _handle = project.update(cx, |project, cx| {
2332 project.register_buffer_with_language_servers(&buffer, cx)
2333 });
2334
2335 let mut fake_server = fake_servers.next().await.unwrap();
2336 let open_notification = fake_server
2337 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2338 .await;
2339
2340 // Edit the buffer, moving the content down
2341 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2342 let change_notification_1 = fake_server
2343 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2344 .await;
2345 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2346
2347 // Report some diagnostics for the initial version of the buffer
2348 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2349 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2350 version: Some(open_notification.text_document.version),
2351 diagnostics: vec![
2352 lsp::Diagnostic {
2353 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2354 severity: Some(DiagnosticSeverity::ERROR),
2355 message: "undefined variable 'A'".to_string(),
2356 source: Some("disk".to_string()),
2357 ..Default::default()
2358 },
2359 lsp::Diagnostic {
2360 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2361 severity: Some(DiagnosticSeverity::ERROR),
2362 message: "undefined variable 'BB'".to_string(),
2363 source: Some("disk".to_string()),
2364 ..Default::default()
2365 },
2366 lsp::Diagnostic {
2367 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2368 severity: Some(DiagnosticSeverity::ERROR),
2369 source: Some("disk".to_string()),
2370 message: "undefined variable 'CCC'".to_string(),
2371 ..Default::default()
2372 },
2373 ],
2374 });
2375
2376 // The diagnostics have moved down since they were created.
2377 cx.executor().run_until_parked();
2378 buffer.update(cx, |buffer, _| {
2379 assert_eq!(
2380 buffer
2381 .snapshot()
2382 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2383 .collect::<Vec<_>>(),
2384 &[
2385 DiagnosticEntry {
2386 range: Point::new(3, 9)..Point::new(3, 11),
2387 diagnostic: Diagnostic {
2388 source: Some("disk".into()),
2389 severity: DiagnosticSeverity::ERROR,
2390 message: "undefined variable 'BB'".to_string(),
2391 is_disk_based: true,
2392 group_id: 1,
2393 is_primary: true,
2394 source_kind: DiagnosticSourceKind::Pushed,
2395 ..Diagnostic::default()
2396 },
2397 },
2398 DiagnosticEntry {
2399 range: Point::new(4, 9)..Point::new(4, 12),
2400 diagnostic: Diagnostic {
2401 source: Some("disk".into()),
2402 severity: DiagnosticSeverity::ERROR,
2403 message: "undefined variable 'CCC'".to_string(),
2404 is_disk_based: true,
2405 group_id: 2,
2406 is_primary: true,
2407 source_kind: DiagnosticSourceKind::Pushed,
2408 ..Diagnostic::default()
2409 }
2410 }
2411 ]
2412 );
2413 assert_eq!(
2414 chunks_with_diagnostics(buffer, 0..buffer.len()),
2415 [
2416 ("\n\nfn a() { ".to_string(), None),
2417 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2418 (" }\nfn b() { ".to_string(), None),
2419 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2420 (" }\nfn c() { ".to_string(), None),
2421 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2422 (" }\n".to_string(), None),
2423 ]
2424 );
2425 assert_eq!(
2426 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2427 [
2428 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2429 (" }\nfn c() { ".to_string(), None),
2430 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2431 ]
2432 );
2433 });
2434
2435 // Ensure overlapping diagnostics are highlighted correctly.
2436 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2437 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2438 version: Some(open_notification.text_document.version),
2439 diagnostics: vec![
2440 lsp::Diagnostic {
2441 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2442 severity: Some(DiagnosticSeverity::ERROR),
2443 message: "undefined variable 'A'".to_string(),
2444 source: Some("disk".to_string()),
2445 ..Default::default()
2446 },
2447 lsp::Diagnostic {
2448 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2449 severity: Some(DiagnosticSeverity::WARNING),
2450 message: "unreachable statement".to_string(),
2451 source: Some("disk".to_string()),
2452 ..Default::default()
2453 },
2454 ],
2455 });
2456
2457 cx.executor().run_until_parked();
2458 buffer.update(cx, |buffer, _| {
2459 assert_eq!(
2460 buffer
2461 .snapshot()
2462 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2463 .collect::<Vec<_>>(),
2464 &[
2465 DiagnosticEntry {
2466 range: Point::new(2, 9)..Point::new(2, 12),
2467 diagnostic: Diagnostic {
2468 source: Some("disk".into()),
2469 severity: DiagnosticSeverity::WARNING,
2470 message: "unreachable statement".to_string(),
2471 is_disk_based: true,
2472 group_id: 4,
2473 is_primary: true,
2474 source_kind: DiagnosticSourceKind::Pushed,
2475 ..Diagnostic::default()
2476 }
2477 },
2478 DiagnosticEntry {
2479 range: Point::new(2, 9)..Point::new(2, 10),
2480 diagnostic: Diagnostic {
2481 source: Some("disk".into()),
2482 severity: DiagnosticSeverity::ERROR,
2483 message: "undefined variable 'A'".to_string(),
2484 is_disk_based: true,
2485 group_id: 3,
2486 is_primary: true,
2487 source_kind: DiagnosticSourceKind::Pushed,
2488 ..Diagnostic::default()
2489 },
2490 }
2491 ]
2492 );
2493 assert_eq!(
2494 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2495 [
2496 ("fn a() { ".to_string(), None),
2497 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2498 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2499 ("\n".to_string(), None),
2500 ]
2501 );
2502 assert_eq!(
2503 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2504 [
2505 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2506 ("\n".to_string(), None),
2507 ]
2508 );
2509 });
2510
2511 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2512 // changes since the last save.
2513 buffer.update(cx, |buffer, cx| {
2514 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2515 buffer.edit(
2516 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2517 None,
2518 cx,
2519 );
2520 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2521 });
2522 let change_notification_2 = fake_server
2523 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2524 .await;
2525 assert!(
2526 change_notification_2.text_document.version > change_notification_1.text_document.version
2527 );
2528
2529 // Handle out-of-order diagnostics
2530 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2531 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2532 version: Some(change_notification_2.text_document.version),
2533 diagnostics: vec![
2534 lsp::Diagnostic {
2535 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2536 severity: Some(DiagnosticSeverity::ERROR),
2537 message: "undefined variable 'BB'".to_string(),
2538 source: Some("disk".to_string()),
2539 ..Default::default()
2540 },
2541 lsp::Diagnostic {
2542 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2543 severity: Some(DiagnosticSeverity::WARNING),
2544 message: "undefined variable 'A'".to_string(),
2545 source: Some("disk".to_string()),
2546 ..Default::default()
2547 },
2548 ],
2549 });
2550
2551 cx.executor().run_until_parked();
2552 buffer.update(cx, |buffer, _| {
2553 assert_eq!(
2554 buffer
2555 .snapshot()
2556 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2557 .collect::<Vec<_>>(),
2558 &[
2559 DiagnosticEntry {
2560 range: Point::new(2, 21)..Point::new(2, 22),
2561 diagnostic: Diagnostic {
2562 source: Some("disk".into()),
2563 severity: DiagnosticSeverity::WARNING,
2564 message: "undefined variable 'A'".to_string(),
2565 is_disk_based: true,
2566 group_id: 6,
2567 is_primary: true,
2568 source_kind: DiagnosticSourceKind::Pushed,
2569 ..Diagnostic::default()
2570 }
2571 },
2572 DiagnosticEntry {
2573 range: Point::new(3, 9)..Point::new(3, 14),
2574 diagnostic: Diagnostic {
2575 source: Some("disk".into()),
2576 severity: DiagnosticSeverity::ERROR,
2577 message: "undefined variable 'BB'".to_string(),
2578 is_disk_based: true,
2579 group_id: 5,
2580 is_primary: true,
2581 source_kind: DiagnosticSourceKind::Pushed,
2582 ..Diagnostic::default()
2583 },
2584 }
2585 ]
2586 );
2587 });
2588}
2589
2590#[gpui::test]
2591async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2592 init_test(cx);
2593
2594 let text = concat!(
2595 "let one = ;\n", //
2596 "let two = \n",
2597 "let three = 3;\n",
2598 );
2599
2600 let fs = FakeFs::new(cx.executor());
2601 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2602
2603 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2604 let buffer = project
2605 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2606 .await
2607 .unwrap();
2608
2609 project.update(cx, |project, cx| {
2610 project.lsp_store.update(cx, |lsp_store, cx| {
2611 lsp_store
2612 .update_diagnostic_entries(
2613 LanguageServerId(0),
2614 PathBuf::from("/dir/a.rs"),
2615 None,
2616 None,
2617 vec![
2618 DiagnosticEntry {
2619 range: Unclipped(PointUtf16::new(0, 10))
2620 ..Unclipped(PointUtf16::new(0, 10)),
2621 diagnostic: Diagnostic {
2622 severity: DiagnosticSeverity::ERROR,
2623 message: "syntax error 1".to_string(),
2624 source_kind: DiagnosticSourceKind::Pushed,
2625 ..Diagnostic::default()
2626 },
2627 },
2628 DiagnosticEntry {
2629 range: Unclipped(PointUtf16::new(1, 10))
2630 ..Unclipped(PointUtf16::new(1, 10)),
2631 diagnostic: Diagnostic {
2632 severity: DiagnosticSeverity::ERROR,
2633 message: "syntax error 2".to_string(),
2634 source_kind: DiagnosticSourceKind::Pushed,
2635 ..Diagnostic::default()
2636 },
2637 },
2638 ],
2639 cx,
2640 )
2641 .unwrap();
2642 })
2643 });
2644
2645 // An empty range is extended forward to include the following character.
2646 // At the end of a line, an empty range is extended backward to include
2647 // the preceding character.
2648 buffer.update(cx, |buffer, _| {
2649 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2650 assert_eq!(
2651 chunks
2652 .iter()
2653 .map(|(s, d)| (s.as_str(), *d))
2654 .collect::<Vec<_>>(),
2655 &[
2656 ("let one = ", None),
2657 (";", Some(DiagnosticSeverity::ERROR)),
2658 ("\nlet two =", None),
2659 (" ", Some(DiagnosticSeverity::ERROR)),
2660 ("\nlet three = 3;\n", None)
2661 ]
2662 );
2663 });
2664}
2665
2666#[gpui::test]
2667async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2668 init_test(cx);
2669
2670 let fs = FakeFs::new(cx.executor());
2671 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2672 .await;
2673
2674 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2675 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2676
2677 lsp_store.update(cx, |lsp_store, cx| {
2678 lsp_store
2679 .update_diagnostic_entries(
2680 LanguageServerId(0),
2681 Path::new("/dir/a.rs").to_owned(),
2682 None,
2683 None,
2684 vec![DiagnosticEntry {
2685 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2686 diagnostic: Diagnostic {
2687 severity: DiagnosticSeverity::ERROR,
2688 is_primary: true,
2689 message: "syntax error a1".to_string(),
2690 source_kind: DiagnosticSourceKind::Pushed,
2691 ..Diagnostic::default()
2692 },
2693 }],
2694 cx,
2695 )
2696 .unwrap();
2697 lsp_store
2698 .update_diagnostic_entries(
2699 LanguageServerId(1),
2700 Path::new("/dir/a.rs").to_owned(),
2701 None,
2702 None,
2703 vec![DiagnosticEntry {
2704 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2705 diagnostic: Diagnostic {
2706 severity: DiagnosticSeverity::ERROR,
2707 is_primary: true,
2708 message: "syntax error b1".to_string(),
2709 source_kind: DiagnosticSourceKind::Pushed,
2710 ..Diagnostic::default()
2711 },
2712 }],
2713 cx,
2714 )
2715 .unwrap();
2716
2717 assert_eq!(
2718 lsp_store.diagnostic_summary(false, cx),
2719 DiagnosticSummary {
2720 error_count: 2,
2721 warning_count: 0,
2722 }
2723 );
2724 });
2725}
2726
2727#[gpui::test]
2728async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2729 init_test(cx);
2730
2731 let text = "
2732 fn a() {
2733 f1();
2734 }
2735 fn b() {
2736 f2();
2737 }
2738 fn c() {
2739 f3();
2740 }
2741 "
2742 .unindent();
2743
2744 let fs = FakeFs::new(cx.executor());
2745 fs.insert_tree(
2746 path!("/dir"),
2747 json!({
2748 "a.rs": text.clone(),
2749 }),
2750 )
2751 .await;
2752
2753 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2754 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2755
2756 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2757 language_registry.add(rust_lang());
2758 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2759
2760 let (buffer, _handle) = project
2761 .update(cx, |project, cx| {
2762 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2763 })
2764 .await
2765 .unwrap();
2766
2767 let mut fake_server = fake_servers.next().await.unwrap();
2768 let lsp_document_version = fake_server
2769 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2770 .await
2771 .text_document
2772 .version;
2773
2774 // Simulate editing the buffer after the language server computes some edits.
2775 buffer.update(cx, |buffer, cx| {
2776 buffer.edit(
2777 [(
2778 Point::new(0, 0)..Point::new(0, 0),
2779 "// above first function\n",
2780 )],
2781 None,
2782 cx,
2783 );
2784 buffer.edit(
2785 [(
2786 Point::new(2, 0)..Point::new(2, 0),
2787 " // inside first function\n",
2788 )],
2789 None,
2790 cx,
2791 );
2792 buffer.edit(
2793 [(
2794 Point::new(6, 4)..Point::new(6, 4),
2795 "// inside second function ",
2796 )],
2797 None,
2798 cx,
2799 );
2800
2801 assert_eq!(
2802 buffer.text(),
2803 "
2804 // above first function
2805 fn a() {
2806 // inside first function
2807 f1();
2808 }
2809 fn b() {
2810 // inside second function f2();
2811 }
2812 fn c() {
2813 f3();
2814 }
2815 "
2816 .unindent()
2817 );
2818 });
2819
2820 let edits = lsp_store
2821 .update(cx, |lsp_store, cx| {
2822 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2823 &buffer,
2824 vec![
2825 // replace body of first function
2826 lsp::TextEdit {
2827 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2828 new_text: "
2829 fn a() {
2830 f10();
2831 }
2832 "
2833 .unindent(),
2834 },
2835 // edit inside second function
2836 lsp::TextEdit {
2837 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2838 new_text: "00".into(),
2839 },
2840 // edit inside third function via two distinct edits
2841 lsp::TextEdit {
2842 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2843 new_text: "4000".into(),
2844 },
2845 lsp::TextEdit {
2846 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2847 new_text: "".into(),
2848 },
2849 ],
2850 LanguageServerId(0),
2851 Some(lsp_document_version),
2852 cx,
2853 )
2854 })
2855 .await
2856 .unwrap();
2857
2858 buffer.update(cx, |buffer, cx| {
2859 for (range, new_text) in edits {
2860 buffer.edit([(range, new_text)], None, cx);
2861 }
2862 assert_eq!(
2863 buffer.text(),
2864 "
2865 // above first function
2866 fn a() {
2867 // inside first function
2868 f10();
2869 }
2870 fn b() {
2871 // inside second function f200();
2872 }
2873 fn c() {
2874 f4000();
2875 }
2876 "
2877 .unindent()
2878 );
2879 });
2880}
2881
2882#[gpui::test]
2883async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2884 init_test(cx);
2885
2886 let text = "
2887 use a::b;
2888 use a::c;
2889
2890 fn f() {
2891 b();
2892 c();
2893 }
2894 "
2895 .unindent();
2896
2897 let fs = FakeFs::new(cx.executor());
2898 fs.insert_tree(
2899 path!("/dir"),
2900 json!({
2901 "a.rs": text.clone(),
2902 }),
2903 )
2904 .await;
2905
2906 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2907 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2908 let buffer = project
2909 .update(cx, |project, cx| {
2910 project.open_local_buffer(path!("/dir/a.rs"), cx)
2911 })
2912 .await
2913 .unwrap();
2914
2915 // Simulate the language server sending us a small edit in the form of a very large diff.
2916 // Rust-analyzer does this when performing a merge-imports code action.
2917 let edits = lsp_store
2918 .update(cx, |lsp_store, cx| {
2919 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2920 &buffer,
2921 [
2922 // Replace the first use statement without editing the semicolon.
2923 lsp::TextEdit {
2924 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2925 new_text: "a::{b, c}".into(),
2926 },
2927 // Reinsert the remainder of the file between the semicolon and the final
2928 // newline of the file.
2929 lsp::TextEdit {
2930 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2931 new_text: "\n\n".into(),
2932 },
2933 lsp::TextEdit {
2934 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2935 new_text: "
2936 fn f() {
2937 b();
2938 c();
2939 }"
2940 .unindent(),
2941 },
2942 // Delete everything after the first newline of the file.
2943 lsp::TextEdit {
2944 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2945 new_text: "".into(),
2946 },
2947 ],
2948 LanguageServerId(0),
2949 None,
2950 cx,
2951 )
2952 })
2953 .await
2954 .unwrap();
2955
2956 buffer.update(cx, |buffer, cx| {
2957 let edits = edits
2958 .into_iter()
2959 .map(|(range, text)| {
2960 (
2961 range.start.to_point(buffer)..range.end.to_point(buffer),
2962 text,
2963 )
2964 })
2965 .collect::<Vec<_>>();
2966
2967 assert_eq!(
2968 edits,
2969 [
2970 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2971 (Point::new(1, 0)..Point::new(2, 0), "".into())
2972 ]
2973 );
2974
2975 for (range, new_text) in edits {
2976 buffer.edit([(range, new_text)], None, cx);
2977 }
2978 assert_eq!(
2979 buffer.text(),
2980 "
2981 use a::{b, c};
2982
2983 fn f() {
2984 b();
2985 c();
2986 }
2987 "
2988 .unindent()
2989 );
2990 });
2991}
2992
2993#[gpui::test]
2994async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2995 cx: &mut gpui::TestAppContext,
2996) {
2997 init_test(cx);
2998
2999 let text = "Path()";
3000
3001 let fs = FakeFs::new(cx.executor());
3002 fs.insert_tree(
3003 path!("/dir"),
3004 json!({
3005 "a.rs": text
3006 }),
3007 )
3008 .await;
3009
3010 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3011 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3012 let buffer = project
3013 .update(cx, |project, cx| {
3014 project.open_local_buffer(path!("/dir/a.rs"), cx)
3015 })
3016 .await
3017 .unwrap();
3018
3019 // Simulate the language server sending us a pair of edits at the same location,
3020 // with an insertion following a replacement (which violates the LSP spec).
3021 let edits = lsp_store
3022 .update(cx, |lsp_store, cx| {
3023 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3024 &buffer,
3025 [
3026 lsp::TextEdit {
3027 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3028 new_text: "Path".into(),
3029 },
3030 lsp::TextEdit {
3031 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3032 new_text: "from path import Path\n\n\n".into(),
3033 },
3034 ],
3035 LanguageServerId(0),
3036 None,
3037 cx,
3038 )
3039 })
3040 .await
3041 .unwrap();
3042
3043 buffer.update(cx, |buffer, cx| {
3044 buffer.edit(edits, None, cx);
3045 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3046 });
3047}
3048
3049#[gpui::test]
3050async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3051 init_test(cx);
3052
3053 let text = "
3054 use a::b;
3055 use a::c;
3056
3057 fn f() {
3058 b();
3059 c();
3060 }
3061 "
3062 .unindent();
3063
3064 let fs = FakeFs::new(cx.executor());
3065 fs.insert_tree(
3066 path!("/dir"),
3067 json!({
3068 "a.rs": text.clone(),
3069 }),
3070 )
3071 .await;
3072
3073 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3074 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3075 let buffer = project
3076 .update(cx, |project, cx| {
3077 project.open_local_buffer(path!("/dir/a.rs"), cx)
3078 })
3079 .await
3080 .unwrap();
3081
3082 // Simulate the language server sending us edits in a non-ordered fashion,
3083 // with ranges sometimes being inverted or pointing to invalid locations.
3084 let edits = lsp_store
3085 .update(cx, |lsp_store, cx| {
3086 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3087 &buffer,
3088 [
3089 lsp::TextEdit {
3090 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3091 new_text: "\n\n".into(),
3092 },
3093 lsp::TextEdit {
3094 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3095 new_text: "a::{b, c}".into(),
3096 },
3097 lsp::TextEdit {
3098 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3099 new_text: "".into(),
3100 },
3101 lsp::TextEdit {
3102 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3103 new_text: "
3104 fn f() {
3105 b();
3106 c();
3107 }"
3108 .unindent(),
3109 },
3110 ],
3111 LanguageServerId(0),
3112 None,
3113 cx,
3114 )
3115 })
3116 .await
3117 .unwrap();
3118
3119 buffer.update(cx, |buffer, cx| {
3120 let edits = edits
3121 .into_iter()
3122 .map(|(range, text)| {
3123 (
3124 range.start.to_point(buffer)..range.end.to_point(buffer),
3125 text,
3126 )
3127 })
3128 .collect::<Vec<_>>();
3129
3130 assert_eq!(
3131 edits,
3132 [
3133 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3134 (Point::new(1, 0)..Point::new(2, 0), "".into())
3135 ]
3136 );
3137
3138 for (range, new_text) in edits {
3139 buffer.edit([(range, new_text)], None, cx);
3140 }
3141 assert_eq!(
3142 buffer.text(),
3143 "
3144 use a::{b, c};
3145
3146 fn f() {
3147 b();
3148 c();
3149 }
3150 "
3151 .unindent()
3152 );
3153 });
3154}
3155
3156fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3157 buffer: &Buffer,
3158 range: Range<T>,
3159) -> Vec<(String, Option<DiagnosticSeverity>)> {
3160 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3161 for chunk in buffer.snapshot().chunks(range, true) {
3162 if chunks
3163 .last()
3164 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3165 {
3166 chunks.last_mut().unwrap().0.push_str(chunk.text);
3167 } else {
3168 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3169 }
3170 }
3171 chunks
3172}
3173
3174#[gpui::test(iterations = 10)]
3175async fn test_definition(cx: &mut gpui::TestAppContext) {
3176 init_test(cx);
3177
3178 let fs = FakeFs::new(cx.executor());
3179 fs.insert_tree(
3180 path!("/dir"),
3181 json!({
3182 "a.rs": "const fn a() { A }",
3183 "b.rs": "const y: i32 = crate::a()",
3184 }),
3185 )
3186 .await;
3187
3188 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3189
3190 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3191 language_registry.add(rust_lang());
3192 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3193
3194 let (buffer, _handle) = project
3195 .update(cx, |project, cx| {
3196 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3197 })
3198 .await
3199 .unwrap();
3200
3201 let fake_server = fake_servers.next().await.unwrap();
3202 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3203 let params = params.text_document_position_params;
3204 assert_eq!(
3205 params.text_document.uri.to_file_path().unwrap(),
3206 Path::new(path!("/dir/b.rs")),
3207 );
3208 assert_eq!(params.position, lsp::Position::new(0, 22));
3209
3210 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3211 lsp::Location::new(
3212 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3213 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3214 ),
3215 )))
3216 });
3217 let mut definitions = project
3218 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3219 .await
3220 .unwrap()
3221 .unwrap();
3222
3223 // Assert no new language server started
3224 cx.executor().run_until_parked();
3225 assert!(fake_servers.try_next().is_err());
3226
3227 assert_eq!(definitions.len(), 1);
3228 let definition = definitions.pop().unwrap();
3229 cx.update(|cx| {
3230 let target_buffer = definition.target.buffer.read(cx);
3231 assert_eq!(
3232 target_buffer
3233 .file()
3234 .unwrap()
3235 .as_local()
3236 .unwrap()
3237 .abs_path(cx),
3238 Path::new(path!("/dir/a.rs")),
3239 );
3240 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3241 assert_eq!(
3242 list_worktrees(&project, cx),
3243 [
3244 (path!("/dir/a.rs").as_ref(), false),
3245 (path!("/dir/b.rs").as_ref(), true)
3246 ],
3247 );
3248
3249 drop(definition);
3250 });
3251 cx.update(|cx| {
3252 assert_eq!(
3253 list_worktrees(&project, cx),
3254 [(path!("/dir/b.rs").as_ref(), true)]
3255 );
3256 });
3257
3258 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3259 project
3260 .read(cx)
3261 .worktrees(cx)
3262 .map(|worktree| {
3263 let worktree = worktree.read(cx);
3264 (
3265 worktree.as_local().unwrap().abs_path().as_ref(),
3266 worktree.is_visible(),
3267 )
3268 })
3269 .collect::<Vec<_>>()
3270 }
3271}
3272
3273#[gpui::test]
3274async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3275 init_test(cx);
3276
3277 let fs = FakeFs::new(cx.executor());
3278 fs.insert_tree(
3279 path!("/dir"),
3280 json!({
3281 "a.ts": "",
3282 }),
3283 )
3284 .await;
3285
3286 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3287
3288 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3289 language_registry.add(typescript_lang());
3290 let mut fake_language_servers = language_registry.register_fake_lsp(
3291 "TypeScript",
3292 FakeLspAdapter {
3293 capabilities: lsp::ServerCapabilities {
3294 completion_provider: Some(lsp::CompletionOptions {
3295 trigger_characters: Some(vec![".".to_string()]),
3296 ..Default::default()
3297 }),
3298 ..Default::default()
3299 },
3300 ..Default::default()
3301 },
3302 );
3303
3304 let (buffer, _handle) = project
3305 .update(cx, |p, cx| {
3306 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3307 })
3308 .await
3309 .unwrap();
3310
3311 let fake_server = fake_language_servers.next().await.unwrap();
3312
3313 // When text_edit exists, it takes precedence over insert_text and label
3314 let text = "let a = obj.fqn";
3315 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3316 let completions = project.update(cx, |project, cx| {
3317 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3318 });
3319
3320 fake_server
3321 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3322 Ok(Some(lsp::CompletionResponse::Array(vec![
3323 lsp::CompletionItem {
3324 label: "labelText".into(),
3325 insert_text: Some("insertText".into()),
3326 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3327 range: lsp::Range::new(
3328 lsp::Position::new(0, text.len() as u32 - 3),
3329 lsp::Position::new(0, text.len() as u32),
3330 ),
3331 new_text: "textEditText".into(),
3332 })),
3333 ..Default::default()
3334 },
3335 ])))
3336 })
3337 .next()
3338 .await;
3339
3340 let completions = completions
3341 .await
3342 .unwrap()
3343 .into_iter()
3344 .flat_map(|response| response.completions)
3345 .collect::<Vec<_>>();
3346 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3347
3348 assert_eq!(completions.len(), 1);
3349 assert_eq!(completions[0].new_text, "textEditText");
3350 assert_eq!(
3351 completions[0].replace_range.to_offset(&snapshot),
3352 text.len() - 3..text.len()
3353 );
3354}
3355
3356#[gpui::test]
3357async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3358 init_test(cx);
3359
3360 let fs = FakeFs::new(cx.executor());
3361 fs.insert_tree(
3362 path!("/dir"),
3363 json!({
3364 "a.ts": "",
3365 }),
3366 )
3367 .await;
3368
3369 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3370
3371 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3372 language_registry.add(typescript_lang());
3373 let mut fake_language_servers = language_registry.register_fake_lsp(
3374 "TypeScript",
3375 FakeLspAdapter {
3376 capabilities: lsp::ServerCapabilities {
3377 completion_provider: Some(lsp::CompletionOptions {
3378 trigger_characters: Some(vec![".".to_string()]),
3379 ..Default::default()
3380 }),
3381 ..Default::default()
3382 },
3383 ..Default::default()
3384 },
3385 );
3386
3387 let (buffer, _handle) = project
3388 .update(cx, |p, cx| {
3389 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3390 })
3391 .await
3392 .unwrap();
3393
3394 let fake_server = fake_language_servers.next().await.unwrap();
3395 let text = "let a = obj.fqn";
3396
3397 // Test 1: When text_edit is None but insert_text exists with default edit_range
3398 {
3399 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3400 let completions = project.update(cx, |project, cx| {
3401 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3402 });
3403
3404 fake_server
3405 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3406 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3407 is_incomplete: false,
3408 item_defaults: Some(lsp::CompletionListItemDefaults {
3409 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3410 lsp::Range::new(
3411 lsp::Position::new(0, text.len() as u32 - 3),
3412 lsp::Position::new(0, text.len() as u32),
3413 ),
3414 )),
3415 ..Default::default()
3416 }),
3417 items: vec![lsp::CompletionItem {
3418 label: "labelText".into(),
3419 insert_text: Some("insertText".into()),
3420 text_edit: None,
3421 ..Default::default()
3422 }],
3423 })))
3424 })
3425 .next()
3426 .await;
3427
3428 let completions = completions
3429 .await
3430 .unwrap()
3431 .into_iter()
3432 .flat_map(|response| response.completions)
3433 .collect::<Vec<_>>();
3434 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3435
3436 assert_eq!(completions.len(), 1);
3437 assert_eq!(completions[0].new_text, "insertText");
3438 assert_eq!(
3439 completions[0].replace_range.to_offset(&snapshot),
3440 text.len() - 3..text.len()
3441 );
3442 }
3443
3444 // Test 2: When both text_edit and insert_text are None with default edit_range
3445 {
3446 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3447 let completions = project.update(cx, |project, cx| {
3448 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3449 });
3450
3451 fake_server
3452 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3453 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3454 is_incomplete: false,
3455 item_defaults: Some(lsp::CompletionListItemDefaults {
3456 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3457 lsp::Range::new(
3458 lsp::Position::new(0, text.len() as u32 - 3),
3459 lsp::Position::new(0, text.len() as u32),
3460 ),
3461 )),
3462 ..Default::default()
3463 }),
3464 items: vec![lsp::CompletionItem {
3465 label: "labelText".into(),
3466 insert_text: None,
3467 text_edit: None,
3468 ..Default::default()
3469 }],
3470 })))
3471 })
3472 .next()
3473 .await;
3474
3475 let completions = completions
3476 .await
3477 .unwrap()
3478 .into_iter()
3479 .flat_map(|response| response.completions)
3480 .collect::<Vec<_>>();
3481 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3482
3483 assert_eq!(completions.len(), 1);
3484 assert_eq!(completions[0].new_text, "labelText");
3485 assert_eq!(
3486 completions[0].replace_range.to_offset(&snapshot),
3487 text.len() - 3..text.len()
3488 );
3489 }
3490}
3491
3492#[gpui::test]
3493async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3494 init_test(cx);
3495
3496 let fs = FakeFs::new(cx.executor());
3497 fs.insert_tree(
3498 path!("/dir"),
3499 json!({
3500 "a.ts": "",
3501 }),
3502 )
3503 .await;
3504
3505 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3506
3507 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3508 language_registry.add(typescript_lang());
3509 let mut fake_language_servers = language_registry.register_fake_lsp(
3510 "TypeScript",
3511 FakeLspAdapter {
3512 capabilities: lsp::ServerCapabilities {
3513 completion_provider: Some(lsp::CompletionOptions {
3514 trigger_characters: Some(vec![":".to_string()]),
3515 ..Default::default()
3516 }),
3517 ..Default::default()
3518 },
3519 ..Default::default()
3520 },
3521 );
3522
3523 let (buffer, _handle) = project
3524 .update(cx, |p, cx| {
3525 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3526 })
3527 .await
3528 .unwrap();
3529
3530 let fake_server = fake_language_servers.next().await.unwrap();
3531
3532 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3533 let text = "let a = b.fqn";
3534 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3535 let completions = project.update(cx, |project, cx| {
3536 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3537 });
3538
3539 fake_server
3540 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3541 Ok(Some(lsp::CompletionResponse::Array(vec![
3542 lsp::CompletionItem {
3543 label: "fullyQualifiedName?".into(),
3544 insert_text: Some("fullyQualifiedName".into()),
3545 ..Default::default()
3546 },
3547 ])))
3548 })
3549 .next()
3550 .await;
3551 let completions = completions
3552 .await
3553 .unwrap()
3554 .into_iter()
3555 .flat_map(|response| response.completions)
3556 .collect::<Vec<_>>();
3557 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3558 assert_eq!(completions.len(), 1);
3559 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3560 assert_eq!(
3561 completions[0].replace_range.to_offset(&snapshot),
3562 text.len() - 3..text.len()
3563 );
3564
3565 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3566 let text = "let a = \"atoms/cmp\"";
3567 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3568 let completions = project.update(cx, |project, cx| {
3569 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3570 });
3571
3572 fake_server
3573 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3574 Ok(Some(lsp::CompletionResponse::Array(vec![
3575 lsp::CompletionItem {
3576 label: "component".into(),
3577 ..Default::default()
3578 },
3579 ])))
3580 })
3581 .next()
3582 .await;
3583 let completions = completions
3584 .await
3585 .unwrap()
3586 .into_iter()
3587 .flat_map(|response| response.completions)
3588 .collect::<Vec<_>>();
3589 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3590 assert_eq!(completions.len(), 1);
3591 assert_eq!(completions[0].new_text, "component");
3592 assert_eq!(
3593 completions[0].replace_range.to_offset(&snapshot),
3594 text.len() - 4..text.len() - 1
3595 );
3596}
3597
3598#[gpui::test]
3599async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3600 init_test(cx);
3601
3602 let fs = FakeFs::new(cx.executor());
3603 fs.insert_tree(
3604 path!("/dir"),
3605 json!({
3606 "a.ts": "",
3607 }),
3608 )
3609 .await;
3610
3611 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3612
3613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3614 language_registry.add(typescript_lang());
3615 let mut fake_language_servers = language_registry.register_fake_lsp(
3616 "TypeScript",
3617 FakeLspAdapter {
3618 capabilities: lsp::ServerCapabilities {
3619 completion_provider: Some(lsp::CompletionOptions {
3620 trigger_characters: Some(vec![":".to_string()]),
3621 ..Default::default()
3622 }),
3623 ..Default::default()
3624 },
3625 ..Default::default()
3626 },
3627 );
3628
3629 let (buffer, _handle) = project
3630 .update(cx, |p, cx| {
3631 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3632 })
3633 .await
3634 .unwrap();
3635
3636 let fake_server = fake_language_servers.next().await.unwrap();
3637
3638 let text = "let a = b.fqn";
3639 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3640 let completions = project.update(cx, |project, cx| {
3641 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3642 });
3643
3644 fake_server
3645 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3646 Ok(Some(lsp::CompletionResponse::Array(vec![
3647 lsp::CompletionItem {
3648 label: "fullyQualifiedName?".into(),
3649 insert_text: Some("fully\rQualified\r\nName".into()),
3650 ..Default::default()
3651 },
3652 ])))
3653 })
3654 .next()
3655 .await;
3656 let completions = completions
3657 .await
3658 .unwrap()
3659 .into_iter()
3660 .flat_map(|response| response.completions)
3661 .collect::<Vec<_>>();
3662 assert_eq!(completions.len(), 1);
3663 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3664}
3665
3666#[gpui::test(iterations = 10)]
3667async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3668 init_test(cx);
3669
3670 let fs = FakeFs::new(cx.executor());
3671 fs.insert_tree(
3672 path!("/dir"),
3673 json!({
3674 "a.ts": "a",
3675 }),
3676 )
3677 .await;
3678
3679 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3680
3681 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3682 language_registry.add(typescript_lang());
3683 let mut fake_language_servers = language_registry.register_fake_lsp(
3684 "TypeScript",
3685 FakeLspAdapter {
3686 capabilities: lsp::ServerCapabilities {
3687 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3688 lsp::CodeActionOptions {
3689 resolve_provider: Some(true),
3690 ..lsp::CodeActionOptions::default()
3691 },
3692 )),
3693 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3694 commands: vec!["_the/command".to_string()],
3695 ..lsp::ExecuteCommandOptions::default()
3696 }),
3697 ..lsp::ServerCapabilities::default()
3698 },
3699 ..FakeLspAdapter::default()
3700 },
3701 );
3702
3703 let (buffer, _handle) = project
3704 .update(cx, |p, cx| {
3705 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3706 })
3707 .await
3708 .unwrap();
3709
3710 let fake_server = fake_language_servers.next().await.unwrap();
3711
3712 // Language server returns code actions that contain commands, and not edits.
3713 let actions = project.update(cx, |project, cx| {
3714 project.code_actions(&buffer, 0..0, None, cx)
3715 });
3716 fake_server
3717 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3718 Ok(Some(vec![
3719 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3720 title: "The code action".into(),
3721 data: Some(serde_json::json!({
3722 "command": "_the/command",
3723 })),
3724 ..lsp::CodeAction::default()
3725 }),
3726 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3727 title: "two".into(),
3728 ..lsp::CodeAction::default()
3729 }),
3730 ]))
3731 })
3732 .next()
3733 .await;
3734
3735 let action = actions.await.unwrap().unwrap()[0].clone();
3736 let apply = project.update(cx, |project, cx| {
3737 project.apply_code_action(buffer.clone(), action, true, cx)
3738 });
3739
3740 // Resolving the code action does not populate its edits. In absence of
3741 // edits, we must execute the given command.
3742 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3743 |mut action, _| async move {
3744 if action.data.is_some() {
3745 action.command = Some(lsp::Command {
3746 title: "The command".into(),
3747 command: "_the/command".into(),
3748 arguments: Some(vec![json!("the-argument")]),
3749 });
3750 }
3751 Ok(action)
3752 },
3753 );
3754
3755 // While executing the command, the language server sends the editor
3756 // a `workspaceEdit` request.
3757 fake_server
3758 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3759 let fake = fake_server.clone();
3760 move |params, _| {
3761 assert_eq!(params.command, "_the/command");
3762 let fake = fake.clone();
3763 async move {
3764 fake.server
3765 .request::<lsp::request::ApplyWorkspaceEdit>(
3766 lsp::ApplyWorkspaceEditParams {
3767 label: None,
3768 edit: lsp::WorkspaceEdit {
3769 changes: Some(
3770 [(
3771 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3772 vec![lsp::TextEdit {
3773 range: lsp::Range::new(
3774 lsp::Position::new(0, 0),
3775 lsp::Position::new(0, 0),
3776 ),
3777 new_text: "X".into(),
3778 }],
3779 )]
3780 .into_iter()
3781 .collect(),
3782 ),
3783 ..Default::default()
3784 },
3785 },
3786 )
3787 .await
3788 .into_response()
3789 .unwrap();
3790 Ok(Some(json!(null)))
3791 }
3792 }
3793 })
3794 .next()
3795 .await;
3796
3797 // Applying the code action returns a project transaction containing the edits
3798 // sent by the language server in its `workspaceEdit` request.
3799 let transaction = apply.await.unwrap();
3800 assert!(transaction.0.contains_key(&buffer));
3801 buffer.update(cx, |buffer, cx| {
3802 assert_eq!(buffer.text(), "Xa");
3803 buffer.undo(cx);
3804 assert_eq!(buffer.text(), "a");
3805 });
3806}
3807
3808#[gpui::test(iterations = 10)]
3809async fn test_save_file(cx: &mut gpui::TestAppContext) {
3810 init_test(cx);
3811
3812 let fs = FakeFs::new(cx.executor());
3813 fs.insert_tree(
3814 path!("/dir"),
3815 json!({
3816 "file1": "the old contents",
3817 }),
3818 )
3819 .await;
3820
3821 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3822 let buffer = project
3823 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3824 .await
3825 .unwrap();
3826 buffer.update(cx, |buffer, cx| {
3827 assert_eq!(buffer.text(), "the old contents");
3828 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3829 });
3830
3831 project
3832 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3833 .await
3834 .unwrap();
3835
3836 let new_text = fs
3837 .load(Path::new(path!("/dir/file1")))
3838 .await
3839 .unwrap()
3840 .replace("\r\n", "\n");
3841 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3842}
3843
3844#[gpui::test(iterations = 10)]
3845async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3846 // Issue: #24349
3847 init_test(cx);
3848
3849 let fs = FakeFs::new(cx.executor());
3850 fs.insert_tree(path!("/dir"), json!({})).await;
3851
3852 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3853 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3854
3855 language_registry.add(rust_lang());
3856 let mut fake_rust_servers = language_registry.register_fake_lsp(
3857 "Rust",
3858 FakeLspAdapter {
3859 name: "the-rust-language-server",
3860 capabilities: lsp::ServerCapabilities {
3861 completion_provider: Some(lsp::CompletionOptions {
3862 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3863 ..Default::default()
3864 }),
3865 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3866 lsp::TextDocumentSyncOptions {
3867 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3868 ..Default::default()
3869 },
3870 )),
3871 ..Default::default()
3872 },
3873 ..Default::default()
3874 },
3875 );
3876
3877 let buffer = project
3878 .update(cx, |this, cx| this.create_buffer(false, cx))
3879 .unwrap()
3880 .await;
3881 project.update(cx, |this, cx| {
3882 this.register_buffer_with_language_servers(&buffer, cx);
3883 buffer.update(cx, |buffer, cx| {
3884 assert!(!this.has_language_servers_for(buffer, cx));
3885 })
3886 });
3887
3888 project
3889 .update(cx, |this, cx| {
3890 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3891 this.save_buffer_as(
3892 buffer.clone(),
3893 ProjectPath {
3894 worktree_id,
3895 path: Arc::from("file.rs".as_ref()),
3896 },
3897 cx,
3898 )
3899 })
3900 .await
3901 .unwrap();
3902 // A server is started up, and it is notified about Rust files.
3903 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
3904 assert_eq!(
3905 fake_rust_server
3906 .receive_notification::<lsp::notification::DidOpenTextDocument>()
3907 .await
3908 .text_document,
3909 lsp::TextDocumentItem {
3910 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
3911 version: 0,
3912 text: "".to_string(),
3913 language_id: "rust".to_string(),
3914 }
3915 );
3916
3917 project.update(cx, |this, cx| {
3918 buffer.update(cx, |buffer, cx| {
3919 assert!(this.has_language_servers_for(buffer, cx));
3920 })
3921 });
3922}
3923
3924#[gpui::test(iterations = 100)]
3925async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
3926 init_test(cx);
3927
3928 let fs = FakeFs::new(cx.executor());
3929 fs.insert_tree(
3930 path!("/dir"),
3931 json!({
3932 "file1": "the original contents",
3933 }),
3934 )
3935 .await;
3936
3937 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3938 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3939 let buffer = project
3940 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3941 .await
3942 .unwrap();
3943
3944 // Change the buffer's file on disk, and then wait for the file change
3945 // to be detected by the worktree, so that the buffer starts reloading.
3946 fs.save(
3947 path!("/dir/file1").as_ref(),
3948 &"the first contents".into(),
3949 Default::default(),
3950 )
3951 .await
3952 .unwrap();
3953 worktree.next_event(cx).await;
3954
3955 // Change the buffer's file again. Depending on the random seed, the
3956 // previous file change may still be in progress.
3957 fs.save(
3958 path!("/dir/file1").as_ref(),
3959 &"the second contents".into(),
3960 Default::default(),
3961 )
3962 .await
3963 .unwrap();
3964 worktree.next_event(cx).await;
3965
3966 cx.executor().run_until_parked();
3967 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
3968 buffer.read_with(cx, |buffer, _| {
3969 assert_eq!(buffer.text(), on_disk_text);
3970 assert!(!buffer.is_dirty(), "buffer should not be dirty");
3971 assert!(!buffer.has_conflict(), "buffer should not be dirty");
3972 });
3973}
3974
3975#[gpui::test(iterations = 100)]
3976async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
3977 init_test(cx);
3978
3979 let fs = FakeFs::new(cx.executor());
3980 fs.insert_tree(
3981 path!("/dir"),
3982 json!({
3983 "file1": "the original contents",
3984 }),
3985 )
3986 .await;
3987
3988 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3989 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
3990 let buffer = project
3991 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3992 .await
3993 .unwrap();
3994
3995 // Change the buffer's file on disk, and then wait for the file change
3996 // to be detected by the worktree, so that the buffer starts reloading.
3997 fs.save(
3998 path!("/dir/file1").as_ref(),
3999 &"the first contents".into(),
4000 Default::default(),
4001 )
4002 .await
4003 .unwrap();
4004 worktree.next_event(cx).await;
4005
4006 cx.executor()
4007 .spawn(cx.executor().simulate_random_delay())
4008 .await;
4009
4010 // Perform a noop edit, causing the buffer's version to increase.
4011 buffer.update(cx, |buffer, cx| {
4012 buffer.edit([(0..0, " ")], None, cx);
4013 buffer.undo(cx);
4014 });
4015
4016 cx.executor().run_until_parked();
4017 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4018 buffer.read_with(cx, |buffer, _| {
4019 let buffer_text = buffer.text();
4020 if buffer_text == on_disk_text {
4021 assert!(
4022 !buffer.is_dirty() && !buffer.has_conflict(),
4023 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4024 );
4025 }
4026 // If the file change occurred while the buffer was processing the first
4027 // change, the buffer will be in a conflicting state.
4028 else {
4029 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4030 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4031 }
4032 });
4033}
4034
4035#[gpui::test]
4036async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4037 init_test(cx);
4038
4039 let fs = FakeFs::new(cx.executor());
4040 fs.insert_tree(
4041 path!("/dir"),
4042 json!({
4043 "file1": "the old contents",
4044 }),
4045 )
4046 .await;
4047
4048 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4049 let buffer = project
4050 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4051 .await
4052 .unwrap();
4053 buffer.update(cx, |buffer, cx| {
4054 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4055 });
4056
4057 project
4058 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4059 .await
4060 .unwrap();
4061
4062 let new_text = fs
4063 .load(Path::new(path!("/dir/file1")))
4064 .await
4065 .unwrap()
4066 .replace("\r\n", "\n");
4067 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4068}
4069
4070#[gpui::test]
4071async fn test_save_as(cx: &mut gpui::TestAppContext) {
4072 init_test(cx);
4073
4074 let fs = FakeFs::new(cx.executor());
4075 fs.insert_tree("/dir", json!({})).await;
4076
4077 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4078
4079 let languages = project.update(cx, |project, _| project.languages().clone());
4080 languages.add(rust_lang());
4081
4082 let buffer = project.update(cx, |project, cx| {
4083 project.create_local_buffer("", None, false, cx)
4084 });
4085 buffer.update(cx, |buffer, cx| {
4086 buffer.edit([(0..0, "abc")], None, cx);
4087 assert!(buffer.is_dirty());
4088 assert!(!buffer.has_conflict());
4089 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4090 });
4091 project
4092 .update(cx, |project, cx| {
4093 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4094 let path = ProjectPath {
4095 worktree_id,
4096 path: Arc::from(Path::new("file1.rs")),
4097 };
4098 project.save_buffer_as(buffer.clone(), path, cx)
4099 })
4100 .await
4101 .unwrap();
4102 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4103
4104 cx.executor().run_until_parked();
4105 buffer.update(cx, |buffer, cx| {
4106 assert_eq!(
4107 buffer.file().unwrap().full_path(cx),
4108 Path::new("dir/file1.rs")
4109 );
4110 assert!(!buffer.is_dirty());
4111 assert!(!buffer.has_conflict());
4112 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4113 });
4114
4115 let opened_buffer = project
4116 .update(cx, |project, cx| {
4117 project.open_local_buffer("/dir/file1.rs", cx)
4118 })
4119 .await
4120 .unwrap();
4121 assert_eq!(opened_buffer, buffer);
4122}
4123
4124#[gpui::test(retries = 5)]
4125async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4126 use worktree::WorktreeModelHandle as _;
4127
4128 init_test(cx);
4129 cx.executor().allow_parking();
4130
4131 let dir = TempTree::new(json!({
4132 "a": {
4133 "file1": "",
4134 "file2": "",
4135 "file3": "",
4136 },
4137 "b": {
4138 "c": {
4139 "file4": "",
4140 "file5": "",
4141 }
4142 }
4143 }));
4144
4145 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4146
4147 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4148 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4149 async move { buffer.await.unwrap() }
4150 };
4151 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4152 project.update(cx, |project, cx| {
4153 let tree = project.worktrees(cx).next().unwrap();
4154 tree.read(cx)
4155 .entry_for_path(path)
4156 .unwrap_or_else(|| panic!("no entry for path {}", path))
4157 .id
4158 })
4159 };
4160
4161 let buffer2 = buffer_for_path("a/file2", cx).await;
4162 let buffer3 = buffer_for_path("a/file3", cx).await;
4163 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4164 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4165
4166 let file2_id = id_for_path("a/file2", cx);
4167 let file3_id = id_for_path("a/file3", cx);
4168 let file4_id = id_for_path("b/c/file4", cx);
4169
4170 // Create a remote copy of this worktree.
4171 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4172 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4173
4174 let updates = Arc::new(Mutex::new(Vec::new()));
4175 tree.update(cx, |tree, cx| {
4176 let updates = updates.clone();
4177 tree.observe_updates(0, cx, move |update| {
4178 updates.lock().push(update);
4179 async { true }
4180 });
4181 });
4182
4183 let remote =
4184 cx.update(|cx| Worktree::remote(0, 1, metadata, project.read(cx).client().into(), cx));
4185
4186 cx.executor().run_until_parked();
4187
4188 cx.update(|cx| {
4189 assert!(!buffer2.read(cx).is_dirty());
4190 assert!(!buffer3.read(cx).is_dirty());
4191 assert!(!buffer4.read(cx).is_dirty());
4192 assert!(!buffer5.read(cx).is_dirty());
4193 });
4194
4195 // Rename and delete files and directories.
4196 tree.flush_fs_events(cx).await;
4197 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4198 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4199 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4200 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4201 tree.flush_fs_events(cx).await;
4202
4203 cx.update(|app| {
4204 assert_eq!(
4205 tree.read(app)
4206 .paths()
4207 .map(|p| p.to_str().unwrap())
4208 .collect::<Vec<_>>(),
4209 vec![
4210 "a",
4211 path!("a/file1"),
4212 path!("a/file2.new"),
4213 "b",
4214 "d",
4215 path!("d/file3"),
4216 path!("d/file4"),
4217 ]
4218 );
4219 });
4220
4221 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4222 assert_eq!(id_for_path("d/file3", cx), file3_id);
4223 assert_eq!(id_for_path("d/file4", cx), file4_id);
4224
4225 cx.update(|cx| {
4226 assert_eq!(
4227 buffer2.read(cx).file().unwrap().path().as_ref(),
4228 Path::new("a/file2.new")
4229 );
4230 assert_eq!(
4231 buffer3.read(cx).file().unwrap().path().as_ref(),
4232 Path::new("d/file3")
4233 );
4234 assert_eq!(
4235 buffer4.read(cx).file().unwrap().path().as_ref(),
4236 Path::new("d/file4")
4237 );
4238 assert_eq!(
4239 buffer5.read(cx).file().unwrap().path().as_ref(),
4240 Path::new("b/c/file5")
4241 );
4242
4243 assert_matches!(
4244 buffer2.read(cx).file().unwrap().disk_state(),
4245 DiskState::Present { .. }
4246 );
4247 assert_matches!(
4248 buffer3.read(cx).file().unwrap().disk_state(),
4249 DiskState::Present { .. }
4250 );
4251 assert_matches!(
4252 buffer4.read(cx).file().unwrap().disk_state(),
4253 DiskState::Present { .. }
4254 );
4255 assert_eq!(
4256 buffer5.read(cx).file().unwrap().disk_state(),
4257 DiskState::Deleted
4258 );
4259 });
4260
4261 // Update the remote worktree. Check that it becomes consistent with the
4262 // local worktree.
4263 cx.executor().run_until_parked();
4264
4265 remote.update(cx, |remote, _| {
4266 for update in updates.lock().drain(..) {
4267 remote.as_remote_mut().unwrap().update_from_remote(update);
4268 }
4269 });
4270 cx.executor().run_until_parked();
4271 remote.update(cx, |remote, _| {
4272 assert_eq!(
4273 remote
4274 .paths()
4275 .map(|p| p.to_str().unwrap())
4276 .collect::<Vec<_>>(),
4277 vec![
4278 "a",
4279 path!("a/file1"),
4280 path!("a/file2.new"),
4281 "b",
4282 "d",
4283 path!("d/file3"),
4284 path!("d/file4"),
4285 ]
4286 );
4287 });
4288}
4289
4290#[gpui::test(iterations = 10)]
4291async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4292 init_test(cx);
4293
4294 let fs = FakeFs::new(cx.executor());
4295 fs.insert_tree(
4296 path!("/dir"),
4297 json!({
4298 "a": {
4299 "file1": "",
4300 }
4301 }),
4302 )
4303 .await;
4304
4305 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4306 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4307 let tree_id = tree.update(cx, |tree, _| tree.id());
4308
4309 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4310 project.update(cx, |project, cx| {
4311 let tree = project.worktrees(cx).next().unwrap();
4312 tree.read(cx)
4313 .entry_for_path(path)
4314 .unwrap_or_else(|| panic!("no entry for path {}", path))
4315 .id
4316 })
4317 };
4318
4319 let dir_id = id_for_path("a", cx);
4320 let file_id = id_for_path("a/file1", cx);
4321 let buffer = project
4322 .update(cx, |p, cx| p.open_buffer((tree_id, "a/file1"), cx))
4323 .await
4324 .unwrap();
4325 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4326
4327 project
4328 .update(cx, |project, cx| {
4329 project.rename_entry(dir_id, Path::new("b"), cx)
4330 })
4331 .unwrap()
4332 .await
4333 .into_included()
4334 .unwrap();
4335 cx.executor().run_until_parked();
4336
4337 assert_eq!(id_for_path("b", cx), dir_id);
4338 assert_eq!(id_for_path("b/file1", cx), file_id);
4339 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4340}
4341
4342#[gpui::test]
4343async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4344 init_test(cx);
4345
4346 let fs = FakeFs::new(cx.executor());
4347 fs.insert_tree(
4348 "/dir",
4349 json!({
4350 "a.txt": "a-contents",
4351 "b.txt": "b-contents",
4352 }),
4353 )
4354 .await;
4355
4356 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4357
4358 // Spawn multiple tasks to open paths, repeating some paths.
4359 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4360 (
4361 p.open_local_buffer("/dir/a.txt", cx),
4362 p.open_local_buffer("/dir/b.txt", cx),
4363 p.open_local_buffer("/dir/a.txt", cx),
4364 )
4365 });
4366
4367 let buffer_a_1 = buffer_a_1.await.unwrap();
4368 let buffer_a_2 = buffer_a_2.await.unwrap();
4369 let buffer_b = buffer_b.await.unwrap();
4370 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4371 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4372
4373 // There is only one buffer per path.
4374 let buffer_a_id = buffer_a_1.entity_id();
4375 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4376
4377 // Open the same path again while it is still open.
4378 drop(buffer_a_1);
4379 let buffer_a_3 = project
4380 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4381 .await
4382 .unwrap();
4383
4384 // There's still only one buffer per path.
4385 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4386}
4387
4388#[gpui::test]
4389async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4390 init_test(cx);
4391
4392 let fs = FakeFs::new(cx.executor());
4393 fs.insert_tree(
4394 path!("/dir"),
4395 json!({
4396 "file1": "abc",
4397 "file2": "def",
4398 "file3": "ghi",
4399 }),
4400 )
4401 .await;
4402
4403 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4404
4405 let buffer1 = project
4406 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4407 .await
4408 .unwrap();
4409 let events = Arc::new(Mutex::new(Vec::new()));
4410
4411 // initially, the buffer isn't dirty.
4412 buffer1.update(cx, |buffer, cx| {
4413 cx.subscribe(&buffer1, {
4414 let events = events.clone();
4415 move |_, _, event, _| match event {
4416 BufferEvent::Operation { .. } => {}
4417 _ => events.lock().push(event.clone()),
4418 }
4419 })
4420 .detach();
4421
4422 assert!(!buffer.is_dirty());
4423 assert!(events.lock().is_empty());
4424
4425 buffer.edit([(1..2, "")], None, cx);
4426 });
4427
4428 // after the first edit, the buffer is dirty, and emits a dirtied event.
4429 buffer1.update(cx, |buffer, cx| {
4430 assert!(buffer.text() == "ac");
4431 assert!(buffer.is_dirty());
4432 assert_eq!(
4433 *events.lock(),
4434 &[
4435 language::BufferEvent::Edited,
4436 language::BufferEvent::DirtyChanged
4437 ]
4438 );
4439 events.lock().clear();
4440 buffer.did_save(
4441 buffer.version(),
4442 buffer.file().unwrap().disk_state().mtime(),
4443 cx,
4444 );
4445 });
4446
4447 // after saving, the buffer is not dirty, and emits a saved event.
4448 buffer1.update(cx, |buffer, cx| {
4449 assert!(!buffer.is_dirty());
4450 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4451 events.lock().clear();
4452
4453 buffer.edit([(1..1, "B")], None, cx);
4454 buffer.edit([(2..2, "D")], None, cx);
4455 });
4456
4457 // after editing again, the buffer is dirty, and emits another dirty event.
4458 buffer1.update(cx, |buffer, cx| {
4459 assert!(buffer.text() == "aBDc");
4460 assert!(buffer.is_dirty());
4461 assert_eq!(
4462 *events.lock(),
4463 &[
4464 language::BufferEvent::Edited,
4465 language::BufferEvent::DirtyChanged,
4466 language::BufferEvent::Edited,
4467 ],
4468 );
4469 events.lock().clear();
4470
4471 // After restoring the buffer to its previously-saved state,
4472 // the buffer is not considered dirty anymore.
4473 buffer.edit([(1..3, "")], None, cx);
4474 assert!(buffer.text() == "ac");
4475 assert!(!buffer.is_dirty());
4476 });
4477
4478 assert_eq!(
4479 *events.lock(),
4480 &[
4481 language::BufferEvent::Edited,
4482 language::BufferEvent::DirtyChanged
4483 ]
4484 );
4485
4486 // When a file is deleted, it is not considered dirty.
4487 let events = Arc::new(Mutex::new(Vec::new()));
4488 let buffer2 = project
4489 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4490 .await
4491 .unwrap();
4492 buffer2.update(cx, |_, cx| {
4493 cx.subscribe(&buffer2, {
4494 let events = events.clone();
4495 move |_, _, event, _| match event {
4496 BufferEvent::Operation { .. } => {}
4497 _ => events.lock().push(event.clone()),
4498 }
4499 })
4500 .detach();
4501 });
4502
4503 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4504 .await
4505 .unwrap();
4506 cx.executor().run_until_parked();
4507 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4508 assert_eq!(
4509 mem::take(&mut *events.lock()),
4510 &[language::BufferEvent::FileHandleChanged]
4511 );
4512
4513 // Buffer becomes dirty when edited.
4514 buffer2.update(cx, |buffer, cx| {
4515 buffer.edit([(2..3, "")], None, cx);
4516 assert_eq!(buffer.is_dirty(), true);
4517 });
4518 assert_eq!(
4519 mem::take(&mut *events.lock()),
4520 &[
4521 language::BufferEvent::Edited,
4522 language::BufferEvent::DirtyChanged
4523 ]
4524 );
4525
4526 // Buffer becomes clean again when all of its content is removed, because
4527 // the file was deleted.
4528 buffer2.update(cx, |buffer, cx| {
4529 buffer.edit([(0..2, "")], None, cx);
4530 assert_eq!(buffer.is_empty(), true);
4531 assert_eq!(buffer.is_dirty(), false);
4532 });
4533 assert_eq!(
4534 *events.lock(),
4535 &[
4536 language::BufferEvent::Edited,
4537 language::BufferEvent::DirtyChanged
4538 ]
4539 );
4540
4541 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4542 let events = Arc::new(Mutex::new(Vec::new()));
4543 let buffer3 = project
4544 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4545 .await
4546 .unwrap();
4547 buffer3.update(cx, |_, cx| {
4548 cx.subscribe(&buffer3, {
4549 let events = events.clone();
4550 move |_, _, event, _| match event {
4551 BufferEvent::Operation { .. } => {}
4552 _ => events.lock().push(event.clone()),
4553 }
4554 })
4555 .detach();
4556 });
4557
4558 buffer3.update(cx, |buffer, cx| {
4559 buffer.edit([(0..0, "x")], None, cx);
4560 });
4561 events.lock().clear();
4562 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4563 .await
4564 .unwrap();
4565 cx.executor().run_until_parked();
4566 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4567 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4568}
4569
4570#[gpui::test]
4571async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4572 init_test(cx);
4573
4574 let (initial_contents, initial_offsets) =
4575 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4576 let fs = FakeFs::new(cx.executor());
4577 fs.insert_tree(
4578 path!("/dir"),
4579 json!({
4580 "the-file": initial_contents,
4581 }),
4582 )
4583 .await;
4584 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4585 let buffer = project
4586 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4587 .await
4588 .unwrap();
4589
4590 let anchors = initial_offsets
4591 .iter()
4592 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4593 .collect::<Vec<_>>();
4594
4595 // Change the file on disk, adding two new lines of text, and removing
4596 // one line.
4597 buffer.update(cx, |buffer, _| {
4598 assert!(!buffer.is_dirty());
4599 assert!(!buffer.has_conflict());
4600 });
4601
4602 let (new_contents, new_offsets) =
4603 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4604 fs.save(
4605 path!("/dir/the-file").as_ref(),
4606 &new_contents.as_str().into(),
4607 LineEnding::Unix,
4608 )
4609 .await
4610 .unwrap();
4611
4612 // Because the buffer was not modified, it is reloaded from disk. Its
4613 // contents are edited according to the diff between the old and new
4614 // file contents.
4615 cx.executor().run_until_parked();
4616 buffer.update(cx, |buffer, _| {
4617 assert_eq!(buffer.text(), new_contents);
4618 assert!(!buffer.is_dirty());
4619 assert!(!buffer.has_conflict());
4620
4621 let anchor_offsets = anchors
4622 .iter()
4623 .map(|anchor| anchor.to_offset(&*buffer))
4624 .collect::<Vec<_>>();
4625 assert_eq!(anchor_offsets, new_offsets);
4626 });
4627
4628 // Modify the buffer
4629 buffer.update(cx, |buffer, cx| {
4630 buffer.edit([(0..0, " ")], None, cx);
4631 assert!(buffer.is_dirty());
4632 assert!(!buffer.has_conflict());
4633 });
4634
4635 // Change the file on disk again, adding blank lines to the beginning.
4636 fs.save(
4637 path!("/dir/the-file").as_ref(),
4638 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4639 LineEnding::Unix,
4640 )
4641 .await
4642 .unwrap();
4643
4644 // Because the buffer is modified, it doesn't reload from disk, but is
4645 // marked as having a conflict.
4646 cx.executor().run_until_parked();
4647 buffer.update(cx, |buffer, _| {
4648 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4649 assert!(buffer.has_conflict());
4650 });
4651}
4652
4653#[gpui::test]
4654async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4655 init_test(cx);
4656
4657 let fs = FakeFs::new(cx.executor());
4658 fs.insert_tree(
4659 path!("/dir"),
4660 json!({
4661 "file1": "a\nb\nc\n",
4662 "file2": "one\r\ntwo\r\nthree\r\n",
4663 }),
4664 )
4665 .await;
4666
4667 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4668 let buffer1 = project
4669 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4670 .await
4671 .unwrap();
4672 let buffer2 = project
4673 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4674 .await
4675 .unwrap();
4676
4677 buffer1.update(cx, |buffer, _| {
4678 assert_eq!(buffer.text(), "a\nb\nc\n");
4679 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4680 });
4681 buffer2.update(cx, |buffer, _| {
4682 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4683 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4684 });
4685
4686 // Change a file's line endings on disk from unix to windows. The buffer's
4687 // state updates correctly.
4688 fs.save(
4689 path!("/dir/file1").as_ref(),
4690 &"aaa\nb\nc\n".into(),
4691 LineEnding::Windows,
4692 )
4693 .await
4694 .unwrap();
4695 cx.executor().run_until_parked();
4696 buffer1.update(cx, |buffer, _| {
4697 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4698 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4699 });
4700
4701 // Save a file with windows line endings. The file is written correctly.
4702 buffer2.update(cx, |buffer, cx| {
4703 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4704 });
4705 project
4706 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4707 .await
4708 .unwrap();
4709 assert_eq!(
4710 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4711 "one\r\ntwo\r\nthree\r\nfour\r\n",
4712 );
4713}
4714
4715#[gpui::test]
4716async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4717 init_test(cx);
4718
4719 let fs = FakeFs::new(cx.executor());
4720 fs.insert_tree(
4721 path!("/dir"),
4722 json!({
4723 "a.rs": "
4724 fn foo(mut v: Vec<usize>) {
4725 for x in &v {
4726 v.push(1);
4727 }
4728 }
4729 "
4730 .unindent(),
4731 }),
4732 )
4733 .await;
4734
4735 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4736 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4737 let buffer = project
4738 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4739 .await
4740 .unwrap();
4741
4742 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4743 let message = lsp::PublishDiagnosticsParams {
4744 uri: buffer_uri.clone(),
4745 diagnostics: vec![
4746 lsp::Diagnostic {
4747 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4748 severity: Some(DiagnosticSeverity::WARNING),
4749 message: "error 1".to_string(),
4750 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4751 location: lsp::Location {
4752 uri: buffer_uri.clone(),
4753 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4754 },
4755 message: "error 1 hint 1".to_string(),
4756 }]),
4757 ..Default::default()
4758 },
4759 lsp::Diagnostic {
4760 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4761 severity: Some(DiagnosticSeverity::HINT),
4762 message: "error 1 hint 1".to_string(),
4763 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4764 location: lsp::Location {
4765 uri: buffer_uri.clone(),
4766 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4767 },
4768 message: "original diagnostic".to_string(),
4769 }]),
4770 ..Default::default()
4771 },
4772 lsp::Diagnostic {
4773 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4774 severity: Some(DiagnosticSeverity::ERROR),
4775 message: "error 2".to_string(),
4776 related_information: Some(vec![
4777 lsp::DiagnosticRelatedInformation {
4778 location: lsp::Location {
4779 uri: buffer_uri.clone(),
4780 range: lsp::Range::new(
4781 lsp::Position::new(1, 13),
4782 lsp::Position::new(1, 15),
4783 ),
4784 },
4785 message: "error 2 hint 1".to_string(),
4786 },
4787 lsp::DiagnosticRelatedInformation {
4788 location: lsp::Location {
4789 uri: buffer_uri.clone(),
4790 range: lsp::Range::new(
4791 lsp::Position::new(1, 13),
4792 lsp::Position::new(1, 15),
4793 ),
4794 },
4795 message: "error 2 hint 2".to_string(),
4796 },
4797 ]),
4798 ..Default::default()
4799 },
4800 lsp::Diagnostic {
4801 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4802 severity: Some(DiagnosticSeverity::HINT),
4803 message: "error 2 hint 1".to_string(),
4804 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4805 location: lsp::Location {
4806 uri: buffer_uri.clone(),
4807 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4808 },
4809 message: "original diagnostic".to_string(),
4810 }]),
4811 ..Default::default()
4812 },
4813 lsp::Diagnostic {
4814 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4815 severity: Some(DiagnosticSeverity::HINT),
4816 message: "error 2 hint 2".to_string(),
4817 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4818 location: lsp::Location {
4819 uri: buffer_uri,
4820 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4821 },
4822 message: "original diagnostic".to_string(),
4823 }]),
4824 ..Default::default()
4825 },
4826 ],
4827 version: None,
4828 };
4829
4830 lsp_store
4831 .update(cx, |lsp_store, cx| {
4832 lsp_store.update_diagnostics(
4833 LanguageServerId(0),
4834 message,
4835 None,
4836 DiagnosticSourceKind::Pushed,
4837 &[],
4838 cx,
4839 )
4840 })
4841 .unwrap();
4842 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4843
4844 assert_eq!(
4845 buffer
4846 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4847 .collect::<Vec<_>>(),
4848 &[
4849 DiagnosticEntry {
4850 range: Point::new(1, 8)..Point::new(1, 9),
4851 diagnostic: Diagnostic {
4852 severity: DiagnosticSeverity::WARNING,
4853 message: "error 1".to_string(),
4854 group_id: 1,
4855 is_primary: true,
4856 source_kind: DiagnosticSourceKind::Pushed,
4857 ..Diagnostic::default()
4858 }
4859 },
4860 DiagnosticEntry {
4861 range: Point::new(1, 8)..Point::new(1, 9),
4862 diagnostic: Diagnostic {
4863 severity: DiagnosticSeverity::HINT,
4864 message: "error 1 hint 1".to_string(),
4865 group_id: 1,
4866 is_primary: false,
4867 source_kind: DiagnosticSourceKind::Pushed,
4868 ..Diagnostic::default()
4869 }
4870 },
4871 DiagnosticEntry {
4872 range: Point::new(1, 13)..Point::new(1, 15),
4873 diagnostic: Diagnostic {
4874 severity: DiagnosticSeverity::HINT,
4875 message: "error 2 hint 1".to_string(),
4876 group_id: 0,
4877 is_primary: false,
4878 source_kind: DiagnosticSourceKind::Pushed,
4879 ..Diagnostic::default()
4880 }
4881 },
4882 DiagnosticEntry {
4883 range: Point::new(1, 13)..Point::new(1, 15),
4884 diagnostic: Diagnostic {
4885 severity: DiagnosticSeverity::HINT,
4886 message: "error 2 hint 2".to_string(),
4887 group_id: 0,
4888 is_primary: false,
4889 source_kind: DiagnosticSourceKind::Pushed,
4890 ..Diagnostic::default()
4891 }
4892 },
4893 DiagnosticEntry {
4894 range: Point::new(2, 8)..Point::new(2, 17),
4895 diagnostic: Diagnostic {
4896 severity: DiagnosticSeverity::ERROR,
4897 message: "error 2".to_string(),
4898 group_id: 0,
4899 is_primary: true,
4900 source_kind: DiagnosticSourceKind::Pushed,
4901 ..Diagnostic::default()
4902 }
4903 }
4904 ]
4905 );
4906
4907 assert_eq!(
4908 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
4909 &[
4910 DiagnosticEntry {
4911 range: Point::new(1, 13)..Point::new(1, 15),
4912 diagnostic: Diagnostic {
4913 severity: DiagnosticSeverity::HINT,
4914 message: "error 2 hint 1".to_string(),
4915 group_id: 0,
4916 is_primary: false,
4917 source_kind: DiagnosticSourceKind::Pushed,
4918 ..Diagnostic::default()
4919 }
4920 },
4921 DiagnosticEntry {
4922 range: Point::new(1, 13)..Point::new(1, 15),
4923 diagnostic: Diagnostic {
4924 severity: DiagnosticSeverity::HINT,
4925 message: "error 2 hint 2".to_string(),
4926 group_id: 0,
4927 is_primary: false,
4928 source_kind: DiagnosticSourceKind::Pushed,
4929 ..Diagnostic::default()
4930 }
4931 },
4932 DiagnosticEntry {
4933 range: Point::new(2, 8)..Point::new(2, 17),
4934 diagnostic: Diagnostic {
4935 severity: DiagnosticSeverity::ERROR,
4936 message: "error 2".to_string(),
4937 group_id: 0,
4938 is_primary: true,
4939 source_kind: DiagnosticSourceKind::Pushed,
4940 ..Diagnostic::default()
4941 }
4942 }
4943 ]
4944 );
4945
4946 assert_eq!(
4947 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
4948 &[
4949 DiagnosticEntry {
4950 range: Point::new(1, 8)..Point::new(1, 9),
4951 diagnostic: Diagnostic {
4952 severity: DiagnosticSeverity::WARNING,
4953 message: "error 1".to_string(),
4954 group_id: 1,
4955 is_primary: true,
4956 source_kind: DiagnosticSourceKind::Pushed,
4957 ..Diagnostic::default()
4958 }
4959 },
4960 DiagnosticEntry {
4961 range: Point::new(1, 8)..Point::new(1, 9),
4962 diagnostic: Diagnostic {
4963 severity: DiagnosticSeverity::HINT,
4964 message: "error 1 hint 1".to_string(),
4965 group_id: 1,
4966 is_primary: false,
4967 source_kind: DiagnosticSourceKind::Pushed,
4968 ..Diagnostic::default()
4969 }
4970 },
4971 ]
4972 );
4973}
4974
4975#[gpui::test]
4976async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
4977 init_test(cx);
4978
4979 let fs = FakeFs::new(cx.executor());
4980 fs.insert_tree(
4981 path!("/dir"),
4982 json!({
4983 "one.rs": "const ONE: usize = 1;",
4984 "two": {
4985 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
4986 }
4987
4988 }),
4989 )
4990 .await;
4991 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4992
4993 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
4994 language_registry.add(rust_lang());
4995 let watched_paths = lsp::FileOperationRegistrationOptions {
4996 filters: vec![
4997 FileOperationFilter {
4998 scheme: Some("file".to_owned()),
4999 pattern: lsp::FileOperationPattern {
5000 glob: "**/*.rs".to_owned(),
5001 matches: Some(lsp::FileOperationPatternKind::File),
5002 options: None,
5003 },
5004 },
5005 FileOperationFilter {
5006 scheme: Some("file".to_owned()),
5007 pattern: lsp::FileOperationPattern {
5008 glob: "**/**".to_owned(),
5009 matches: Some(lsp::FileOperationPatternKind::Folder),
5010 options: None,
5011 },
5012 },
5013 ],
5014 };
5015 let mut fake_servers = language_registry.register_fake_lsp(
5016 "Rust",
5017 FakeLspAdapter {
5018 capabilities: lsp::ServerCapabilities {
5019 workspace: Some(lsp::WorkspaceServerCapabilities {
5020 workspace_folders: None,
5021 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5022 did_rename: Some(watched_paths.clone()),
5023 will_rename: Some(watched_paths),
5024 ..Default::default()
5025 }),
5026 }),
5027 ..Default::default()
5028 },
5029 ..Default::default()
5030 },
5031 );
5032
5033 let _ = project
5034 .update(cx, |project, cx| {
5035 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5036 })
5037 .await
5038 .unwrap();
5039
5040 let fake_server = fake_servers.next().await.unwrap();
5041 let response = project.update(cx, |project, cx| {
5042 let worktree = project.worktrees(cx).next().unwrap();
5043 let entry = worktree.read(cx).entry_for_path("one.rs").unwrap();
5044 project.rename_entry(entry.id, "three.rs".as_ref(), cx)
5045 });
5046 let expected_edit = lsp::WorkspaceEdit {
5047 changes: None,
5048 document_changes: Some(DocumentChanges::Edits({
5049 vec![TextDocumentEdit {
5050 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5051 range: lsp::Range {
5052 start: lsp::Position {
5053 line: 0,
5054 character: 1,
5055 },
5056 end: lsp::Position {
5057 line: 0,
5058 character: 3,
5059 },
5060 },
5061 new_text: "This is not a drill".to_owned(),
5062 })],
5063 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5064 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5065 version: Some(1337),
5066 },
5067 }]
5068 })),
5069 change_annotations: None,
5070 };
5071 let resolved_workspace_edit = Arc::new(OnceLock::new());
5072 fake_server
5073 .set_request_handler::<WillRenameFiles, _, _>({
5074 let resolved_workspace_edit = resolved_workspace_edit.clone();
5075 let expected_edit = expected_edit.clone();
5076 move |params, _| {
5077 let resolved_workspace_edit = resolved_workspace_edit.clone();
5078 let expected_edit = expected_edit.clone();
5079 async move {
5080 assert_eq!(params.files.len(), 1);
5081 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5082 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5083 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5084 Ok(Some(expected_edit))
5085 }
5086 }
5087 })
5088 .next()
5089 .await
5090 .unwrap();
5091 let _ = response.await.unwrap();
5092 fake_server
5093 .handle_notification::<DidRenameFiles, _>(|params, _| {
5094 assert_eq!(params.files.len(), 1);
5095 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5096 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5097 })
5098 .next()
5099 .await
5100 .unwrap();
5101 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5102}
5103
5104#[gpui::test]
5105async fn test_rename(cx: &mut gpui::TestAppContext) {
5106 // hi
5107 init_test(cx);
5108
5109 let fs = FakeFs::new(cx.executor());
5110 fs.insert_tree(
5111 path!("/dir"),
5112 json!({
5113 "one.rs": "const ONE: usize = 1;",
5114 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5115 }),
5116 )
5117 .await;
5118
5119 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5120
5121 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5122 language_registry.add(rust_lang());
5123 let mut fake_servers = language_registry.register_fake_lsp(
5124 "Rust",
5125 FakeLspAdapter {
5126 capabilities: lsp::ServerCapabilities {
5127 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5128 prepare_provider: Some(true),
5129 work_done_progress_options: Default::default(),
5130 })),
5131 ..Default::default()
5132 },
5133 ..Default::default()
5134 },
5135 );
5136
5137 let (buffer, _handle) = project
5138 .update(cx, |project, cx| {
5139 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5140 })
5141 .await
5142 .unwrap();
5143
5144 let fake_server = fake_servers.next().await.unwrap();
5145
5146 let response = project.update(cx, |project, cx| {
5147 project.prepare_rename(buffer.clone(), 7, cx)
5148 });
5149 fake_server
5150 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5151 assert_eq!(
5152 params.text_document.uri.as_str(),
5153 uri!("file:///dir/one.rs")
5154 );
5155 assert_eq!(params.position, lsp::Position::new(0, 7));
5156 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5157 lsp::Position::new(0, 6),
5158 lsp::Position::new(0, 9),
5159 ))))
5160 })
5161 .next()
5162 .await
5163 .unwrap();
5164 let response = response.await.unwrap();
5165 let PrepareRenameResponse::Success(range) = response else {
5166 panic!("{:?}", response);
5167 };
5168 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5169 assert_eq!(range, 6..9);
5170
5171 let response = project.update(cx, |project, cx| {
5172 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5173 });
5174 fake_server
5175 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5176 assert_eq!(
5177 params.text_document_position.text_document.uri.as_str(),
5178 uri!("file:///dir/one.rs")
5179 );
5180 assert_eq!(
5181 params.text_document_position.position,
5182 lsp::Position::new(0, 7)
5183 );
5184 assert_eq!(params.new_name, "THREE");
5185 Ok(Some(lsp::WorkspaceEdit {
5186 changes: Some(
5187 [
5188 (
5189 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5190 vec![lsp::TextEdit::new(
5191 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5192 "THREE".to_string(),
5193 )],
5194 ),
5195 (
5196 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5197 vec![
5198 lsp::TextEdit::new(
5199 lsp::Range::new(
5200 lsp::Position::new(0, 24),
5201 lsp::Position::new(0, 27),
5202 ),
5203 "THREE".to_string(),
5204 ),
5205 lsp::TextEdit::new(
5206 lsp::Range::new(
5207 lsp::Position::new(0, 35),
5208 lsp::Position::new(0, 38),
5209 ),
5210 "THREE".to_string(),
5211 ),
5212 ],
5213 ),
5214 ]
5215 .into_iter()
5216 .collect(),
5217 ),
5218 ..Default::default()
5219 }))
5220 })
5221 .next()
5222 .await
5223 .unwrap();
5224 let mut transaction = response.await.unwrap().0;
5225 assert_eq!(transaction.len(), 2);
5226 assert_eq!(
5227 transaction
5228 .remove_entry(&buffer)
5229 .unwrap()
5230 .0
5231 .update(cx, |buffer, _| buffer.text()),
5232 "const THREE: usize = 1;"
5233 );
5234 assert_eq!(
5235 transaction
5236 .into_keys()
5237 .next()
5238 .unwrap()
5239 .update(cx, |buffer, _| buffer.text()),
5240 "const TWO: usize = one::THREE + one::THREE;"
5241 );
5242}
5243
5244#[gpui::test]
5245async fn test_search(cx: &mut gpui::TestAppContext) {
5246 init_test(cx);
5247
5248 let fs = FakeFs::new(cx.executor());
5249 fs.insert_tree(
5250 path!("/dir"),
5251 json!({
5252 "one.rs": "const ONE: usize = 1;",
5253 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5254 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5255 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5256 }),
5257 )
5258 .await;
5259 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5260 assert_eq!(
5261 search(
5262 &project,
5263 SearchQuery::text(
5264 "TWO",
5265 false,
5266 true,
5267 false,
5268 Default::default(),
5269 Default::default(),
5270 false,
5271 None
5272 )
5273 .unwrap(),
5274 cx
5275 )
5276 .await
5277 .unwrap(),
5278 HashMap::from_iter([
5279 (path!("dir/two.rs").to_string(), vec![6..9]),
5280 (path!("dir/three.rs").to_string(), vec![37..40])
5281 ])
5282 );
5283
5284 let buffer_4 = project
5285 .update(cx, |project, cx| {
5286 project.open_local_buffer(path!("/dir/four.rs"), cx)
5287 })
5288 .await
5289 .unwrap();
5290 buffer_4.update(cx, |buffer, cx| {
5291 let text = "two::TWO";
5292 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5293 });
5294
5295 assert_eq!(
5296 search(
5297 &project,
5298 SearchQuery::text(
5299 "TWO",
5300 false,
5301 true,
5302 false,
5303 Default::default(),
5304 Default::default(),
5305 false,
5306 None,
5307 )
5308 .unwrap(),
5309 cx
5310 )
5311 .await
5312 .unwrap(),
5313 HashMap::from_iter([
5314 (path!("dir/two.rs").to_string(), vec![6..9]),
5315 (path!("dir/three.rs").to_string(), vec![37..40]),
5316 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5317 ])
5318 );
5319}
5320
5321#[gpui::test]
5322async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5323 init_test(cx);
5324
5325 let search_query = "file";
5326
5327 let fs = FakeFs::new(cx.executor());
5328 fs.insert_tree(
5329 path!("/dir"),
5330 json!({
5331 "one.rs": r#"// Rust file one"#,
5332 "one.ts": r#"// TypeScript file one"#,
5333 "two.rs": r#"// Rust file two"#,
5334 "two.ts": r#"// TypeScript file two"#,
5335 }),
5336 )
5337 .await;
5338 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5339
5340 assert!(
5341 search(
5342 &project,
5343 SearchQuery::text(
5344 search_query,
5345 false,
5346 true,
5347 false,
5348 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5349 Default::default(),
5350 false,
5351 None
5352 )
5353 .unwrap(),
5354 cx
5355 )
5356 .await
5357 .unwrap()
5358 .is_empty(),
5359 "If no inclusions match, no files should be returned"
5360 );
5361
5362 assert_eq!(
5363 search(
5364 &project,
5365 SearchQuery::text(
5366 search_query,
5367 false,
5368 true,
5369 false,
5370 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5371 Default::default(),
5372 false,
5373 None
5374 )
5375 .unwrap(),
5376 cx
5377 )
5378 .await
5379 .unwrap(),
5380 HashMap::from_iter([
5381 (path!("dir/one.rs").to_string(), vec![8..12]),
5382 (path!("dir/two.rs").to_string(), vec![8..12]),
5383 ]),
5384 "Rust only search should give only Rust files"
5385 );
5386
5387 assert_eq!(
5388 search(
5389 &project,
5390 SearchQuery::text(
5391 search_query,
5392 false,
5393 true,
5394 false,
5395 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5396 Default::default(),
5397 false,
5398 None,
5399 )
5400 .unwrap(),
5401 cx
5402 )
5403 .await
5404 .unwrap(),
5405 HashMap::from_iter([
5406 (path!("dir/one.ts").to_string(), vec![14..18]),
5407 (path!("dir/two.ts").to_string(), vec![14..18]),
5408 ]),
5409 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5410 );
5411
5412 assert_eq!(
5413 search(
5414 &project,
5415 SearchQuery::text(
5416 search_query,
5417 false,
5418 true,
5419 false,
5420 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5421 .unwrap(),
5422 Default::default(),
5423 false,
5424 None,
5425 )
5426 .unwrap(),
5427 cx
5428 )
5429 .await
5430 .unwrap(),
5431 HashMap::from_iter([
5432 (path!("dir/two.ts").to_string(), vec![14..18]),
5433 (path!("dir/one.rs").to_string(), vec![8..12]),
5434 (path!("dir/one.ts").to_string(), vec![14..18]),
5435 (path!("dir/two.rs").to_string(), vec![8..12]),
5436 ]),
5437 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5438 );
5439}
5440
5441#[gpui::test]
5442async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5443 init_test(cx);
5444
5445 let search_query = "file";
5446
5447 let fs = FakeFs::new(cx.executor());
5448 fs.insert_tree(
5449 path!("/dir"),
5450 json!({
5451 "one.rs": r#"// Rust file one"#,
5452 "one.ts": r#"// TypeScript file one"#,
5453 "two.rs": r#"// Rust file two"#,
5454 "two.ts": r#"// TypeScript file two"#,
5455 }),
5456 )
5457 .await;
5458 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5459
5460 assert_eq!(
5461 search(
5462 &project,
5463 SearchQuery::text(
5464 search_query,
5465 false,
5466 true,
5467 false,
5468 Default::default(),
5469 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5470 false,
5471 None,
5472 )
5473 .unwrap(),
5474 cx
5475 )
5476 .await
5477 .unwrap(),
5478 HashMap::from_iter([
5479 (path!("dir/one.rs").to_string(), vec![8..12]),
5480 (path!("dir/one.ts").to_string(), vec![14..18]),
5481 (path!("dir/two.rs").to_string(), vec![8..12]),
5482 (path!("dir/two.ts").to_string(), vec![14..18]),
5483 ]),
5484 "If no exclusions match, all files should be returned"
5485 );
5486
5487 assert_eq!(
5488 search(
5489 &project,
5490 SearchQuery::text(
5491 search_query,
5492 false,
5493 true,
5494 false,
5495 Default::default(),
5496 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5497 false,
5498 None,
5499 )
5500 .unwrap(),
5501 cx
5502 )
5503 .await
5504 .unwrap(),
5505 HashMap::from_iter([
5506 (path!("dir/one.ts").to_string(), vec![14..18]),
5507 (path!("dir/two.ts").to_string(), vec![14..18]),
5508 ]),
5509 "Rust exclusion search should give only TypeScript files"
5510 );
5511
5512 assert_eq!(
5513 search(
5514 &project,
5515 SearchQuery::text(
5516 search_query,
5517 false,
5518 true,
5519 false,
5520 Default::default(),
5521 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5522 false,
5523 None,
5524 )
5525 .unwrap(),
5526 cx
5527 )
5528 .await
5529 .unwrap(),
5530 HashMap::from_iter([
5531 (path!("dir/one.rs").to_string(), vec![8..12]),
5532 (path!("dir/two.rs").to_string(), vec![8..12]),
5533 ]),
5534 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5535 );
5536
5537 assert!(
5538 search(
5539 &project,
5540 SearchQuery::text(
5541 search_query,
5542 false,
5543 true,
5544 false,
5545 Default::default(),
5546 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5547 .unwrap(),
5548 false,
5549 None,
5550 )
5551 .unwrap(),
5552 cx
5553 )
5554 .await
5555 .unwrap()
5556 .is_empty(),
5557 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5558 );
5559}
5560
5561#[gpui::test]
5562async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5563 init_test(cx);
5564
5565 let search_query = "file";
5566
5567 let fs = FakeFs::new(cx.executor());
5568 fs.insert_tree(
5569 path!("/dir"),
5570 json!({
5571 "one.rs": r#"// Rust file one"#,
5572 "one.ts": r#"// TypeScript file one"#,
5573 "two.rs": r#"// Rust file two"#,
5574 "two.ts": r#"// TypeScript file two"#,
5575 }),
5576 )
5577 .await;
5578
5579 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5580 let _buffer = project.update(cx, |project, cx| {
5581 project.create_local_buffer("file", None, false, cx)
5582 });
5583
5584 assert_eq!(
5585 search(
5586 &project,
5587 SearchQuery::text(
5588 search_query,
5589 false,
5590 true,
5591 false,
5592 Default::default(),
5593 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5594 false,
5595 None,
5596 )
5597 .unwrap(),
5598 cx
5599 )
5600 .await
5601 .unwrap(),
5602 HashMap::from_iter([
5603 (path!("dir/one.rs").to_string(), vec![8..12]),
5604 (path!("dir/one.ts").to_string(), vec![14..18]),
5605 (path!("dir/two.rs").to_string(), vec![8..12]),
5606 (path!("dir/two.ts").to_string(), vec![14..18]),
5607 ]),
5608 "If no exclusions match, all files should be returned"
5609 );
5610
5611 assert_eq!(
5612 search(
5613 &project,
5614 SearchQuery::text(
5615 search_query,
5616 false,
5617 true,
5618 false,
5619 Default::default(),
5620 PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
5621 false,
5622 None,
5623 )
5624 .unwrap(),
5625 cx
5626 )
5627 .await
5628 .unwrap(),
5629 HashMap::from_iter([
5630 (path!("dir/one.ts").to_string(), vec![14..18]),
5631 (path!("dir/two.ts").to_string(), vec![14..18]),
5632 ]),
5633 "Rust exclusion search should give only TypeScript files"
5634 );
5635
5636 assert_eq!(
5637 search(
5638 &project,
5639 SearchQuery::text(
5640 search_query,
5641 false,
5642 true,
5643 false,
5644 Default::default(),
5645 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5646 false,
5647 None,
5648 )
5649 .unwrap(),
5650 cx
5651 )
5652 .await
5653 .unwrap(),
5654 HashMap::from_iter([
5655 (path!("dir/one.rs").to_string(), vec![8..12]),
5656 (path!("dir/two.rs").to_string(), vec![8..12]),
5657 ]),
5658 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5659 );
5660
5661 assert!(
5662 search(
5663 &project,
5664 SearchQuery::text(
5665 search_query,
5666 false,
5667 true,
5668 false,
5669 Default::default(),
5670 PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()])
5671 .unwrap(),
5672 false,
5673 None,
5674 )
5675 .unwrap(),
5676 cx
5677 )
5678 .await
5679 .unwrap()
5680 .is_empty(),
5681 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5682 );
5683}
5684
5685#[gpui::test]
5686async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5687 init_test(cx);
5688
5689 let search_query = "file";
5690
5691 let fs = FakeFs::new(cx.executor());
5692 fs.insert_tree(
5693 path!("/dir"),
5694 json!({
5695 "one.rs": r#"// Rust file one"#,
5696 "one.ts": r#"// TypeScript file one"#,
5697 "two.rs": r#"// Rust file two"#,
5698 "two.ts": r#"// TypeScript file two"#,
5699 }),
5700 )
5701 .await;
5702 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5703
5704 assert!(
5705 search(
5706 &project,
5707 SearchQuery::text(
5708 search_query,
5709 false,
5710 true,
5711 false,
5712 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5713 PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
5714 false,
5715 None,
5716 )
5717 .unwrap(),
5718 cx
5719 )
5720 .await
5721 .unwrap()
5722 .is_empty(),
5723 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5724 );
5725
5726 assert!(
5727 search(
5728 &project,
5729 SearchQuery::text(
5730 search_query,
5731 false,
5732 true,
5733 false,
5734 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5735 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5736 false,
5737 None,
5738 )
5739 .unwrap(),
5740 cx
5741 )
5742 .await
5743 .unwrap()
5744 .is_empty(),
5745 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5746 );
5747
5748 assert!(
5749 search(
5750 &project,
5751 SearchQuery::text(
5752 search_query,
5753 false,
5754 true,
5755 false,
5756 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5757 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5758 false,
5759 None,
5760 )
5761 .unwrap(),
5762 cx
5763 )
5764 .await
5765 .unwrap()
5766 .is_empty(),
5767 "Non-matching inclusions and exclusions should not change that."
5768 );
5769
5770 assert_eq!(
5771 search(
5772 &project,
5773 SearchQuery::text(
5774 search_query,
5775 false,
5776 true,
5777 false,
5778 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
5779 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
5780 false,
5781 None,
5782 )
5783 .unwrap(),
5784 cx
5785 )
5786 .await
5787 .unwrap(),
5788 HashMap::from_iter([
5789 (path!("dir/one.ts").to_string(), vec![14..18]),
5790 (path!("dir/two.ts").to_string(), vec![14..18]),
5791 ]),
5792 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5793 );
5794}
5795
5796#[gpui::test]
5797async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5798 init_test(cx);
5799
5800 let fs = FakeFs::new(cx.executor());
5801 fs.insert_tree(
5802 path!("/worktree-a"),
5803 json!({
5804 "haystack.rs": r#"// NEEDLE"#,
5805 "haystack.ts": r#"// NEEDLE"#,
5806 }),
5807 )
5808 .await;
5809 fs.insert_tree(
5810 path!("/worktree-b"),
5811 json!({
5812 "haystack.rs": r#"// NEEDLE"#,
5813 "haystack.ts": r#"// NEEDLE"#,
5814 }),
5815 )
5816 .await;
5817
5818 let project = Project::test(
5819 fs.clone(),
5820 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5821 cx,
5822 )
5823 .await;
5824
5825 assert_eq!(
5826 search(
5827 &project,
5828 SearchQuery::text(
5829 "NEEDLE",
5830 false,
5831 true,
5832 false,
5833 PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
5834 Default::default(),
5835 true,
5836 None,
5837 )
5838 .unwrap(),
5839 cx
5840 )
5841 .await
5842 .unwrap(),
5843 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5844 "should only return results from included worktree"
5845 );
5846 assert_eq!(
5847 search(
5848 &project,
5849 SearchQuery::text(
5850 "NEEDLE",
5851 false,
5852 true,
5853 false,
5854 PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
5855 Default::default(),
5856 true,
5857 None,
5858 )
5859 .unwrap(),
5860 cx
5861 )
5862 .await
5863 .unwrap(),
5864 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
5865 "should only return results from included worktree"
5866 );
5867
5868 assert_eq!(
5869 search(
5870 &project,
5871 SearchQuery::text(
5872 "NEEDLE",
5873 false,
5874 true,
5875 false,
5876 PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
5877 Default::default(),
5878 false,
5879 None,
5880 )
5881 .unwrap(),
5882 cx
5883 )
5884 .await
5885 .unwrap(),
5886 HashMap::from_iter([
5887 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
5888 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
5889 ]),
5890 "should return results from both worktrees"
5891 );
5892}
5893
5894#[gpui::test]
5895async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
5896 init_test(cx);
5897
5898 let fs = FakeFs::new(cx.background_executor.clone());
5899 fs.insert_tree(
5900 path!("/dir"),
5901 json!({
5902 ".git": {},
5903 ".gitignore": "**/target\n/node_modules\n",
5904 "target": {
5905 "index.txt": "index_key:index_value"
5906 },
5907 "node_modules": {
5908 "eslint": {
5909 "index.ts": "const eslint_key = 'eslint value'",
5910 "package.json": r#"{ "some_key": "some value" }"#,
5911 },
5912 "prettier": {
5913 "index.ts": "const prettier_key = 'prettier value'",
5914 "package.json": r#"{ "other_key": "other value" }"#,
5915 },
5916 },
5917 "package.json": r#"{ "main_key": "main value" }"#,
5918 }),
5919 )
5920 .await;
5921 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5922
5923 let query = "key";
5924 assert_eq!(
5925 search(
5926 &project,
5927 SearchQuery::text(
5928 query,
5929 false,
5930 false,
5931 false,
5932 Default::default(),
5933 Default::default(),
5934 false,
5935 None,
5936 )
5937 .unwrap(),
5938 cx
5939 )
5940 .await
5941 .unwrap(),
5942 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
5943 "Only one non-ignored file should have the query"
5944 );
5945
5946 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5947 assert_eq!(
5948 search(
5949 &project,
5950 SearchQuery::text(
5951 query,
5952 false,
5953 false,
5954 true,
5955 Default::default(),
5956 Default::default(),
5957 false,
5958 None,
5959 )
5960 .unwrap(),
5961 cx
5962 )
5963 .await
5964 .unwrap(),
5965 HashMap::from_iter([
5966 (path!("dir/package.json").to_string(), vec![8..11]),
5967 (path!("dir/target/index.txt").to_string(), vec![6..9]),
5968 (
5969 path!("dir/node_modules/prettier/package.json").to_string(),
5970 vec![9..12]
5971 ),
5972 (
5973 path!("dir/node_modules/prettier/index.ts").to_string(),
5974 vec![15..18]
5975 ),
5976 (
5977 path!("dir/node_modules/eslint/index.ts").to_string(),
5978 vec![13..16]
5979 ),
5980 (
5981 path!("dir/node_modules/eslint/package.json").to_string(),
5982 vec![8..11]
5983 ),
5984 ]),
5985 "Unrestricted search with ignored directories should find every file with the query"
5986 );
5987
5988 let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
5989 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
5990 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5991 assert_eq!(
5992 search(
5993 &project,
5994 SearchQuery::text(
5995 query,
5996 false,
5997 false,
5998 true,
5999 files_to_include,
6000 files_to_exclude,
6001 false,
6002 None,
6003 )
6004 .unwrap(),
6005 cx
6006 )
6007 .await
6008 .unwrap(),
6009 HashMap::from_iter([(
6010 path!("dir/node_modules/prettier/package.json").to_string(),
6011 vec![9..12]
6012 )]),
6013 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6014 );
6015}
6016
6017#[gpui::test]
6018async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6019 init_test(cx);
6020
6021 let fs = FakeFs::new(cx.executor());
6022 fs.insert_tree(
6023 path!("/dir"),
6024 json!({
6025 "one.rs": "// ПРИВЕТ? привет!",
6026 "two.rs": "// ПРИВЕТ.",
6027 "three.rs": "// привет",
6028 }),
6029 )
6030 .await;
6031 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6032
6033 let unicode_case_sensitive_query = SearchQuery::text(
6034 "привет",
6035 false,
6036 true,
6037 false,
6038 Default::default(),
6039 Default::default(),
6040 false,
6041 None,
6042 );
6043 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6044 assert_eq!(
6045 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6046 .await
6047 .unwrap(),
6048 HashMap::from_iter([
6049 (path!("dir/one.rs").to_string(), vec![17..29]),
6050 (path!("dir/three.rs").to_string(), vec![3..15]),
6051 ])
6052 );
6053
6054 let unicode_case_insensitive_query = SearchQuery::text(
6055 "привет",
6056 false,
6057 false,
6058 false,
6059 Default::default(),
6060 Default::default(),
6061 false,
6062 None,
6063 );
6064 assert_matches!(
6065 unicode_case_insensitive_query,
6066 Ok(SearchQuery::Regex { .. })
6067 );
6068 assert_eq!(
6069 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6070 .await
6071 .unwrap(),
6072 HashMap::from_iter([
6073 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6074 (path!("dir/two.rs").to_string(), vec![3..15]),
6075 (path!("dir/three.rs").to_string(), vec![3..15]),
6076 ])
6077 );
6078
6079 assert_eq!(
6080 search(
6081 &project,
6082 SearchQuery::text(
6083 "привет.",
6084 false,
6085 false,
6086 false,
6087 Default::default(),
6088 Default::default(),
6089 false,
6090 None,
6091 )
6092 .unwrap(),
6093 cx
6094 )
6095 .await
6096 .unwrap(),
6097 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6098 );
6099}
6100
6101#[gpui::test]
6102async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6103 init_test(cx);
6104
6105 let fs = FakeFs::new(cx.executor());
6106 fs.insert_tree(
6107 "/one/two",
6108 json!({
6109 "three": {
6110 "a.txt": "",
6111 "four": {}
6112 },
6113 "c.rs": ""
6114 }),
6115 )
6116 .await;
6117
6118 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6119 project
6120 .update(cx, |project, cx| {
6121 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6122 project.create_entry((id, "b.."), true, cx)
6123 })
6124 .await
6125 .unwrap()
6126 .into_included()
6127 .unwrap();
6128
6129 // Can't create paths outside the project
6130 let result = project
6131 .update(cx, |project, cx| {
6132 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6133 project.create_entry((id, "../../boop"), true, cx)
6134 })
6135 .await;
6136 assert!(result.is_err());
6137
6138 // Can't create paths with '..'
6139 let result = project
6140 .update(cx, |project, cx| {
6141 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6142 project.create_entry((id, "four/../beep"), true, cx)
6143 })
6144 .await;
6145 assert!(result.is_err());
6146
6147 assert_eq!(
6148 fs.paths(true),
6149 vec![
6150 PathBuf::from(path!("/")),
6151 PathBuf::from(path!("/one")),
6152 PathBuf::from(path!("/one/two")),
6153 PathBuf::from(path!("/one/two/c.rs")),
6154 PathBuf::from(path!("/one/two/three")),
6155 PathBuf::from(path!("/one/two/three/a.txt")),
6156 PathBuf::from(path!("/one/two/three/b..")),
6157 PathBuf::from(path!("/one/two/three/four")),
6158 ]
6159 );
6160
6161 // And we cannot open buffers with '..'
6162 let result = project
6163 .update(cx, |project, cx| {
6164 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6165 project.open_buffer((id, "../c.rs"), cx)
6166 })
6167 .await;
6168 assert!(result.is_err())
6169}
6170
6171#[gpui::test]
6172async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6173 init_test(cx);
6174
6175 let fs = FakeFs::new(cx.executor());
6176 fs.insert_tree(
6177 path!("/dir"),
6178 json!({
6179 "a.tsx": "a",
6180 }),
6181 )
6182 .await;
6183
6184 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6185
6186 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6187 language_registry.add(tsx_lang());
6188 let language_server_names = [
6189 "TypeScriptServer",
6190 "TailwindServer",
6191 "ESLintServer",
6192 "NoHoverCapabilitiesServer",
6193 ];
6194 let mut language_servers = [
6195 language_registry.register_fake_lsp(
6196 "tsx",
6197 FakeLspAdapter {
6198 name: language_server_names[0],
6199 capabilities: lsp::ServerCapabilities {
6200 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6201 ..lsp::ServerCapabilities::default()
6202 },
6203 ..FakeLspAdapter::default()
6204 },
6205 ),
6206 language_registry.register_fake_lsp(
6207 "tsx",
6208 FakeLspAdapter {
6209 name: language_server_names[1],
6210 capabilities: lsp::ServerCapabilities {
6211 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6212 ..lsp::ServerCapabilities::default()
6213 },
6214 ..FakeLspAdapter::default()
6215 },
6216 ),
6217 language_registry.register_fake_lsp(
6218 "tsx",
6219 FakeLspAdapter {
6220 name: language_server_names[2],
6221 capabilities: lsp::ServerCapabilities {
6222 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6223 ..lsp::ServerCapabilities::default()
6224 },
6225 ..FakeLspAdapter::default()
6226 },
6227 ),
6228 language_registry.register_fake_lsp(
6229 "tsx",
6230 FakeLspAdapter {
6231 name: language_server_names[3],
6232 capabilities: lsp::ServerCapabilities {
6233 hover_provider: None,
6234 ..lsp::ServerCapabilities::default()
6235 },
6236 ..FakeLspAdapter::default()
6237 },
6238 ),
6239 ];
6240
6241 let (buffer, _handle) = project
6242 .update(cx, |p, cx| {
6243 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6244 })
6245 .await
6246 .unwrap();
6247 cx.executor().run_until_parked();
6248
6249 let mut servers_with_hover_requests = HashMap::default();
6250 for i in 0..language_server_names.len() {
6251 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6252 panic!(
6253 "Failed to get language server #{i} with name {}",
6254 &language_server_names[i]
6255 )
6256 });
6257 let new_server_name = new_server.server.name();
6258 assert!(
6259 !servers_with_hover_requests.contains_key(&new_server_name),
6260 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6261 );
6262 match new_server_name.as_ref() {
6263 "TailwindServer" | "TypeScriptServer" => {
6264 servers_with_hover_requests.insert(
6265 new_server_name.clone(),
6266 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6267 move |_, _| {
6268 let name = new_server_name.clone();
6269 async move {
6270 Ok(Some(lsp::Hover {
6271 contents: lsp::HoverContents::Scalar(
6272 lsp::MarkedString::String(format!("{name} hover")),
6273 ),
6274 range: None,
6275 }))
6276 }
6277 },
6278 ),
6279 );
6280 }
6281 "ESLintServer" => {
6282 servers_with_hover_requests.insert(
6283 new_server_name,
6284 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6285 |_, _| async move { Ok(None) },
6286 ),
6287 );
6288 }
6289 "NoHoverCapabilitiesServer" => {
6290 let _never_handled = new_server
6291 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6292 panic!(
6293 "Should not call for hovers server with no corresponding capabilities"
6294 )
6295 });
6296 }
6297 unexpected => panic!("Unexpected server name: {unexpected}"),
6298 }
6299 }
6300
6301 let hover_task = project.update(cx, |project, cx| {
6302 project.hover(&buffer, Point::new(0, 0), cx)
6303 });
6304 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6305 |mut hover_request| async move {
6306 hover_request
6307 .next()
6308 .await
6309 .expect("All hover requests should have been triggered")
6310 },
6311 ))
6312 .await;
6313 assert_eq!(
6314 vec!["TailwindServer hover", "TypeScriptServer hover"],
6315 hover_task
6316 .await
6317 .into_iter()
6318 .flatten()
6319 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6320 .sorted()
6321 .collect::<Vec<_>>(),
6322 "Should receive hover responses from all related servers with hover capabilities"
6323 );
6324}
6325
6326#[gpui::test]
6327async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6328 init_test(cx);
6329
6330 let fs = FakeFs::new(cx.executor());
6331 fs.insert_tree(
6332 path!("/dir"),
6333 json!({
6334 "a.ts": "a",
6335 }),
6336 )
6337 .await;
6338
6339 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6340
6341 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6342 language_registry.add(typescript_lang());
6343 let mut fake_language_servers = language_registry.register_fake_lsp(
6344 "TypeScript",
6345 FakeLspAdapter {
6346 capabilities: lsp::ServerCapabilities {
6347 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6348 ..lsp::ServerCapabilities::default()
6349 },
6350 ..FakeLspAdapter::default()
6351 },
6352 );
6353
6354 let (buffer, _handle) = project
6355 .update(cx, |p, cx| {
6356 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6357 })
6358 .await
6359 .unwrap();
6360 cx.executor().run_until_parked();
6361
6362 let fake_server = fake_language_servers
6363 .next()
6364 .await
6365 .expect("failed to get the language server");
6366
6367 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6368 move |_, _| async move {
6369 Ok(Some(lsp::Hover {
6370 contents: lsp::HoverContents::Array(vec![
6371 lsp::MarkedString::String("".to_string()),
6372 lsp::MarkedString::String(" ".to_string()),
6373 lsp::MarkedString::String("\n\n\n".to_string()),
6374 ]),
6375 range: None,
6376 }))
6377 },
6378 );
6379
6380 let hover_task = project.update(cx, |project, cx| {
6381 project.hover(&buffer, Point::new(0, 0), cx)
6382 });
6383 let () = request_handled
6384 .next()
6385 .await
6386 .expect("All hover requests should have been triggered");
6387 assert_eq!(
6388 Vec::<String>::new(),
6389 hover_task
6390 .await
6391 .into_iter()
6392 .flatten()
6393 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6394 .sorted()
6395 .collect::<Vec<_>>(),
6396 "Empty hover parts should be ignored"
6397 );
6398}
6399
6400#[gpui::test]
6401async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6402 init_test(cx);
6403
6404 let fs = FakeFs::new(cx.executor());
6405 fs.insert_tree(
6406 path!("/dir"),
6407 json!({
6408 "a.ts": "a",
6409 }),
6410 )
6411 .await;
6412
6413 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6414
6415 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6416 language_registry.add(typescript_lang());
6417 let mut fake_language_servers = language_registry.register_fake_lsp(
6418 "TypeScript",
6419 FakeLspAdapter {
6420 capabilities: lsp::ServerCapabilities {
6421 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6422 ..lsp::ServerCapabilities::default()
6423 },
6424 ..FakeLspAdapter::default()
6425 },
6426 );
6427
6428 let (buffer, _handle) = project
6429 .update(cx, |p, cx| {
6430 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6431 })
6432 .await
6433 .unwrap();
6434 cx.executor().run_until_parked();
6435
6436 let fake_server = fake_language_servers
6437 .next()
6438 .await
6439 .expect("failed to get the language server");
6440
6441 let mut request_handled = fake_server
6442 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6443 Ok(Some(vec![
6444 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6445 title: "organize imports".to_string(),
6446 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6447 ..lsp::CodeAction::default()
6448 }),
6449 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6450 title: "fix code".to_string(),
6451 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6452 ..lsp::CodeAction::default()
6453 }),
6454 ]))
6455 });
6456
6457 let code_actions_task = project.update(cx, |project, cx| {
6458 project.code_actions(
6459 &buffer,
6460 0..buffer.read(cx).len(),
6461 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6462 cx,
6463 )
6464 });
6465
6466 let () = request_handled
6467 .next()
6468 .await
6469 .expect("The code action request should have been triggered");
6470
6471 let code_actions = code_actions_task.await.unwrap().unwrap();
6472 assert_eq!(code_actions.len(), 1);
6473 assert_eq!(
6474 code_actions[0].lsp_action.action_kind(),
6475 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6476 );
6477}
6478
6479#[gpui::test]
6480async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6481 init_test(cx);
6482
6483 let fs = FakeFs::new(cx.executor());
6484 fs.insert_tree(
6485 path!("/dir"),
6486 json!({
6487 "a.tsx": "a",
6488 }),
6489 )
6490 .await;
6491
6492 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6493
6494 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6495 language_registry.add(tsx_lang());
6496 let language_server_names = [
6497 "TypeScriptServer",
6498 "TailwindServer",
6499 "ESLintServer",
6500 "NoActionsCapabilitiesServer",
6501 ];
6502
6503 let mut language_server_rxs = [
6504 language_registry.register_fake_lsp(
6505 "tsx",
6506 FakeLspAdapter {
6507 name: language_server_names[0],
6508 capabilities: lsp::ServerCapabilities {
6509 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6510 ..lsp::ServerCapabilities::default()
6511 },
6512 ..FakeLspAdapter::default()
6513 },
6514 ),
6515 language_registry.register_fake_lsp(
6516 "tsx",
6517 FakeLspAdapter {
6518 name: language_server_names[1],
6519 capabilities: lsp::ServerCapabilities {
6520 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6521 ..lsp::ServerCapabilities::default()
6522 },
6523 ..FakeLspAdapter::default()
6524 },
6525 ),
6526 language_registry.register_fake_lsp(
6527 "tsx",
6528 FakeLspAdapter {
6529 name: language_server_names[2],
6530 capabilities: lsp::ServerCapabilities {
6531 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6532 ..lsp::ServerCapabilities::default()
6533 },
6534 ..FakeLspAdapter::default()
6535 },
6536 ),
6537 language_registry.register_fake_lsp(
6538 "tsx",
6539 FakeLspAdapter {
6540 name: language_server_names[3],
6541 capabilities: lsp::ServerCapabilities {
6542 code_action_provider: None,
6543 ..lsp::ServerCapabilities::default()
6544 },
6545 ..FakeLspAdapter::default()
6546 },
6547 ),
6548 ];
6549
6550 let (buffer, _handle) = project
6551 .update(cx, |p, cx| {
6552 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6553 })
6554 .await
6555 .unwrap();
6556 cx.executor().run_until_parked();
6557
6558 let mut servers_with_actions_requests = HashMap::default();
6559 for i in 0..language_server_names.len() {
6560 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6561 panic!(
6562 "Failed to get language server #{i} with name {}",
6563 &language_server_names[i]
6564 )
6565 });
6566 let new_server_name = new_server.server.name();
6567
6568 assert!(
6569 !servers_with_actions_requests.contains_key(&new_server_name),
6570 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6571 );
6572 match new_server_name.0.as_ref() {
6573 "TailwindServer" | "TypeScriptServer" => {
6574 servers_with_actions_requests.insert(
6575 new_server_name.clone(),
6576 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6577 move |_, _| {
6578 let name = new_server_name.clone();
6579 async move {
6580 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6581 lsp::CodeAction {
6582 title: format!("{name} code action"),
6583 ..lsp::CodeAction::default()
6584 },
6585 )]))
6586 }
6587 },
6588 ),
6589 );
6590 }
6591 "ESLintServer" => {
6592 servers_with_actions_requests.insert(
6593 new_server_name,
6594 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6595 |_, _| async move { Ok(None) },
6596 ),
6597 );
6598 }
6599 "NoActionsCapabilitiesServer" => {
6600 let _never_handled = new_server
6601 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6602 panic!(
6603 "Should not call for code actions server with no corresponding capabilities"
6604 )
6605 });
6606 }
6607 unexpected => panic!("Unexpected server name: {unexpected}"),
6608 }
6609 }
6610
6611 let code_actions_task = project.update(cx, |project, cx| {
6612 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6613 });
6614
6615 // cx.run_until_parked();
6616 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6617 |mut code_actions_request| async move {
6618 code_actions_request
6619 .next()
6620 .await
6621 .expect("All code actions requests should have been triggered")
6622 },
6623 ))
6624 .await;
6625 assert_eq!(
6626 vec!["TailwindServer code action", "TypeScriptServer code action"],
6627 code_actions_task
6628 .await
6629 .unwrap()
6630 .unwrap()
6631 .into_iter()
6632 .map(|code_action| code_action.lsp_action.title().to_owned())
6633 .sorted()
6634 .collect::<Vec<_>>(),
6635 "Should receive code actions responses from all related servers with hover capabilities"
6636 );
6637}
6638
6639#[gpui::test]
6640async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6641 init_test(cx);
6642
6643 let fs = FakeFs::new(cx.executor());
6644 fs.insert_tree(
6645 "/dir",
6646 json!({
6647 "a.rs": "let a = 1;",
6648 "b.rs": "let b = 2;",
6649 "c.rs": "let c = 2;",
6650 }),
6651 )
6652 .await;
6653
6654 let project = Project::test(
6655 fs,
6656 [
6657 "/dir/a.rs".as_ref(),
6658 "/dir/b.rs".as_ref(),
6659 "/dir/c.rs".as_ref(),
6660 ],
6661 cx,
6662 )
6663 .await;
6664
6665 // check the initial state and get the worktrees
6666 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6667 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6668 assert_eq!(worktrees.len(), 3);
6669
6670 let worktree_a = worktrees[0].read(cx);
6671 let worktree_b = worktrees[1].read(cx);
6672 let worktree_c = worktrees[2].read(cx);
6673
6674 // check they start in the right order
6675 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6676 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6677 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6678
6679 (
6680 worktrees[0].clone(),
6681 worktrees[1].clone(),
6682 worktrees[2].clone(),
6683 )
6684 });
6685
6686 // move first worktree to after the second
6687 // [a, b, c] -> [b, a, c]
6688 project
6689 .update(cx, |project, cx| {
6690 let first = worktree_a.read(cx);
6691 let second = worktree_b.read(cx);
6692 project.move_worktree(first.id(), second.id(), cx)
6693 })
6694 .expect("moving first after second");
6695
6696 // check the state after moving
6697 project.update(cx, |project, cx| {
6698 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6699 assert_eq!(worktrees.len(), 3);
6700
6701 let first = worktrees[0].read(cx);
6702 let second = worktrees[1].read(cx);
6703 let third = worktrees[2].read(cx);
6704
6705 // check they are now in the right order
6706 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6707 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6708 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6709 });
6710
6711 // move the second worktree to before the first
6712 // [b, a, c] -> [a, b, c]
6713 project
6714 .update(cx, |project, cx| {
6715 let second = worktree_a.read(cx);
6716 let first = worktree_b.read(cx);
6717 project.move_worktree(first.id(), second.id(), cx)
6718 })
6719 .expect("moving second before first");
6720
6721 // check the state after moving
6722 project.update(cx, |project, cx| {
6723 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6724 assert_eq!(worktrees.len(), 3);
6725
6726 let first = worktrees[0].read(cx);
6727 let second = worktrees[1].read(cx);
6728 let third = worktrees[2].read(cx);
6729
6730 // check they are now in the right order
6731 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6732 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6733 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6734 });
6735
6736 // move the second worktree to after the third
6737 // [a, b, c] -> [a, c, b]
6738 project
6739 .update(cx, |project, cx| {
6740 let second = worktree_b.read(cx);
6741 let third = worktree_c.read(cx);
6742 project.move_worktree(second.id(), third.id(), cx)
6743 })
6744 .expect("moving second after third");
6745
6746 // check the state after moving
6747 project.update(cx, |project, cx| {
6748 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6749 assert_eq!(worktrees.len(), 3);
6750
6751 let first = worktrees[0].read(cx);
6752 let second = worktrees[1].read(cx);
6753 let third = worktrees[2].read(cx);
6754
6755 // check they are now in the right order
6756 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6757 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6758 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6759 });
6760
6761 // move the third worktree to before the second
6762 // [a, c, b] -> [a, b, c]
6763 project
6764 .update(cx, |project, cx| {
6765 let third = worktree_c.read(cx);
6766 let second = worktree_b.read(cx);
6767 project.move_worktree(third.id(), second.id(), cx)
6768 })
6769 .expect("moving third before second");
6770
6771 // check the state after moving
6772 project.update(cx, |project, cx| {
6773 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6774 assert_eq!(worktrees.len(), 3);
6775
6776 let first = worktrees[0].read(cx);
6777 let second = worktrees[1].read(cx);
6778 let third = worktrees[2].read(cx);
6779
6780 // check they are now in the right order
6781 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6782 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6783 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6784 });
6785
6786 // move the first worktree to after the third
6787 // [a, b, c] -> [b, c, a]
6788 project
6789 .update(cx, |project, cx| {
6790 let first = worktree_a.read(cx);
6791 let third = worktree_c.read(cx);
6792 project.move_worktree(first.id(), third.id(), cx)
6793 })
6794 .expect("moving first after third");
6795
6796 // check the state after moving
6797 project.update(cx, |project, cx| {
6798 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6799 assert_eq!(worktrees.len(), 3);
6800
6801 let first = worktrees[0].read(cx);
6802 let second = worktrees[1].read(cx);
6803 let third = worktrees[2].read(cx);
6804
6805 // check they are now in the right order
6806 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6807 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6808 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6809 });
6810
6811 // move the third worktree to before the first
6812 // [b, c, a] -> [a, b, c]
6813 project
6814 .update(cx, |project, cx| {
6815 let third = worktree_a.read(cx);
6816 let first = worktree_b.read(cx);
6817 project.move_worktree(third.id(), first.id(), cx)
6818 })
6819 .expect("moving third before first");
6820
6821 // check the state after moving
6822 project.update(cx, |project, cx| {
6823 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6824 assert_eq!(worktrees.len(), 3);
6825
6826 let first = worktrees[0].read(cx);
6827 let second = worktrees[1].read(cx);
6828 let third = worktrees[2].read(cx);
6829
6830 // check they are now in the right order
6831 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6832 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6833 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6834 });
6835}
6836
6837#[gpui::test]
6838async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6839 init_test(cx);
6840
6841 let staged_contents = r#"
6842 fn main() {
6843 println!("hello world");
6844 }
6845 "#
6846 .unindent();
6847 let file_contents = r#"
6848 // print goodbye
6849 fn main() {
6850 println!("goodbye world");
6851 }
6852 "#
6853 .unindent();
6854
6855 let fs = FakeFs::new(cx.background_executor.clone());
6856 fs.insert_tree(
6857 "/dir",
6858 json!({
6859 ".git": {},
6860 "src": {
6861 "main.rs": file_contents,
6862 }
6863 }),
6864 )
6865 .await;
6866
6867 fs.set_index_for_repo(
6868 Path::new("/dir/.git"),
6869 &[("src/main.rs".into(), staged_contents)],
6870 );
6871
6872 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6873
6874 let buffer = project
6875 .update(cx, |project, cx| {
6876 project.open_local_buffer("/dir/src/main.rs", cx)
6877 })
6878 .await
6879 .unwrap();
6880 let unstaged_diff = project
6881 .update(cx, |project, cx| {
6882 project.open_unstaged_diff(buffer.clone(), cx)
6883 })
6884 .await
6885 .unwrap();
6886
6887 cx.run_until_parked();
6888 unstaged_diff.update(cx, |unstaged_diff, cx| {
6889 let snapshot = buffer.read(cx).snapshot();
6890 assert_hunks(
6891 unstaged_diff.hunks(&snapshot, cx),
6892 &snapshot,
6893 &unstaged_diff.base_text_string().unwrap(),
6894 &[
6895 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
6896 (
6897 2..3,
6898 " println!(\"hello world\");\n",
6899 " println!(\"goodbye world\");\n",
6900 DiffHunkStatus::modified_none(),
6901 ),
6902 ],
6903 );
6904 });
6905
6906 let staged_contents = r#"
6907 // print goodbye
6908 fn main() {
6909 }
6910 "#
6911 .unindent();
6912
6913 fs.set_index_for_repo(
6914 Path::new("/dir/.git"),
6915 &[("src/main.rs".into(), staged_contents)],
6916 );
6917
6918 cx.run_until_parked();
6919 unstaged_diff.update(cx, |unstaged_diff, cx| {
6920 let snapshot = buffer.read(cx).snapshot();
6921 assert_hunks(
6922 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
6923 &snapshot,
6924 &unstaged_diff.base_text().text(),
6925 &[(
6926 2..3,
6927 "",
6928 " println!(\"goodbye world\");\n",
6929 DiffHunkStatus::added_none(),
6930 )],
6931 );
6932 });
6933}
6934
6935#[gpui::test]
6936async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6937 init_test(cx);
6938
6939 let committed_contents = r#"
6940 fn main() {
6941 println!("hello world");
6942 }
6943 "#
6944 .unindent();
6945 let staged_contents = r#"
6946 fn main() {
6947 println!("goodbye world");
6948 }
6949 "#
6950 .unindent();
6951 let file_contents = r#"
6952 // print goodbye
6953 fn main() {
6954 println!("goodbye world");
6955 }
6956 "#
6957 .unindent();
6958
6959 let fs = FakeFs::new(cx.background_executor.clone());
6960 fs.insert_tree(
6961 "/dir",
6962 json!({
6963 ".git": {},
6964 "src": {
6965 "modification.rs": file_contents,
6966 }
6967 }),
6968 )
6969 .await;
6970
6971 fs.set_head_for_repo(
6972 Path::new("/dir/.git"),
6973 &[
6974 ("src/modification.rs".into(), committed_contents),
6975 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6976 ],
6977 "deadbeef",
6978 );
6979 fs.set_index_for_repo(
6980 Path::new("/dir/.git"),
6981 &[
6982 ("src/modification.rs".into(), staged_contents),
6983 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
6984 ],
6985 );
6986
6987 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6988 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6989 let language = rust_lang();
6990 language_registry.add(language.clone());
6991
6992 let buffer_1 = project
6993 .update(cx, |project, cx| {
6994 project.open_local_buffer("/dir/src/modification.rs", cx)
6995 })
6996 .await
6997 .unwrap();
6998 let diff_1 = project
6999 .update(cx, |project, cx| {
7000 project.open_uncommitted_diff(buffer_1.clone(), cx)
7001 })
7002 .await
7003 .unwrap();
7004 diff_1.read_with(cx, |diff, _| {
7005 assert_eq!(diff.base_text().language().cloned(), Some(language))
7006 });
7007 cx.run_until_parked();
7008 diff_1.update(cx, |diff, cx| {
7009 let snapshot = buffer_1.read(cx).snapshot();
7010 assert_hunks(
7011 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7012 &snapshot,
7013 &diff.base_text_string().unwrap(),
7014 &[
7015 (
7016 0..1,
7017 "",
7018 "// print goodbye\n",
7019 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7020 ),
7021 (
7022 2..3,
7023 " println!(\"hello world\");\n",
7024 " println!(\"goodbye world\");\n",
7025 DiffHunkStatus::modified_none(),
7026 ),
7027 ],
7028 );
7029 });
7030
7031 // Reset HEAD to a version that differs from both the buffer and the index.
7032 let committed_contents = r#"
7033 // print goodbye
7034 fn main() {
7035 }
7036 "#
7037 .unindent();
7038 fs.set_head_for_repo(
7039 Path::new("/dir/.git"),
7040 &[
7041 ("src/modification.rs".into(), committed_contents.clone()),
7042 ("src/deletion.rs".into(), "// the-deleted-contents\n".into()),
7043 ],
7044 "deadbeef",
7045 );
7046
7047 // Buffer now has an unstaged hunk.
7048 cx.run_until_parked();
7049 diff_1.update(cx, |diff, cx| {
7050 let snapshot = buffer_1.read(cx).snapshot();
7051 assert_hunks(
7052 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7053 &snapshot,
7054 &diff.base_text().text(),
7055 &[(
7056 2..3,
7057 "",
7058 " println!(\"goodbye world\");\n",
7059 DiffHunkStatus::added_none(),
7060 )],
7061 );
7062 });
7063
7064 // Open a buffer for a file that's been deleted.
7065 let buffer_2 = project
7066 .update(cx, |project, cx| {
7067 project.open_local_buffer("/dir/src/deletion.rs", cx)
7068 })
7069 .await
7070 .unwrap();
7071 let diff_2 = project
7072 .update(cx, |project, cx| {
7073 project.open_uncommitted_diff(buffer_2.clone(), cx)
7074 })
7075 .await
7076 .unwrap();
7077 cx.run_until_parked();
7078 diff_2.update(cx, |diff, cx| {
7079 let snapshot = buffer_2.read(cx).snapshot();
7080 assert_hunks(
7081 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7082 &snapshot,
7083 &diff.base_text_string().unwrap(),
7084 &[(
7085 0..0,
7086 "// the-deleted-contents\n",
7087 "",
7088 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7089 )],
7090 );
7091 });
7092
7093 // Stage the deletion of this file
7094 fs.set_index_for_repo(
7095 Path::new("/dir/.git"),
7096 &[("src/modification.rs".into(), committed_contents.clone())],
7097 );
7098 cx.run_until_parked();
7099 diff_2.update(cx, |diff, cx| {
7100 let snapshot = buffer_2.read(cx).snapshot();
7101 assert_hunks(
7102 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7103 &snapshot,
7104 &diff.base_text_string().unwrap(),
7105 &[(
7106 0..0,
7107 "// the-deleted-contents\n",
7108 "",
7109 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7110 )],
7111 );
7112 });
7113}
7114
7115#[gpui::test]
7116async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7117 use DiffHunkSecondaryStatus::*;
7118 init_test(cx);
7119
7120 let committed_contents = r#"
7121 zero
7122 one
7123 two
7124 three
7125 four
7126 five
7127 "#
7128 .unindent();
7129 let file_contents = r#"
7130 one
7131 TWO
7132 three
7133 FOUR
7134 five
7135 "#
7136 .unindent();
7137
7138 let fs = FakeFs::new(cx.background_executor.clone());
7139 fs.insert_tree(
7140 "/dir",
7141 json!({
7142 ".git": {},
7143 "file.txt": file_contents.clone()
7144 }),
7145 )
7146 .await;
7147
7148 fs.set_head_and_index_for_repo(
7149 "/dir/.git".as_ref(),
7150 &[("file.txt".into(), committed_contents.clone())],
7151 );
7152
7153 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7154
7155 let buffer = project
7156 .update(cx, |project, cx| {
7157 project.open_local_buffer("/dir/file.txt", cx)
7158 })
7159 .await
7160 .unwrap();
7161 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7162 let uncommitted_diff = project
7163 .update(cx, |project, cx| {
7164 project.open_uncommitted_diff(buffer.clone(), cx)
7165 })
7166 .await
7167 .unwrap();
7168 let mut diff_events = cx.events(&uncommitted_diff);
7169
7170 // The hunks are initially unstaged.
7171 uncommitted_diff.read_with(cx, |diff, cx| {
7172 assert_hunks(
7173 diff.hunks(&snapshot, cx),
7174 &snapshot,
7175 &diff.base_text_string().unwrap(),
7176 &[
7177 (
7178 0..0,
7179 "zero\n",
7180 "",
7181 DiffHunkStatus::deleted(HasSecondaryHunk),
7182 ),
7183 (
7184 1..2,
7185 "two\n",
7186 "TWO\n",
7187 DiffHunkStatus::modified(HasSecondaryHunk),
7188 ),
7189 (
7190 3..4,
7191 "four\n",
7192 "FOUR\n",
7193 DiffHunkStatus::modified(HasSecondaryHunk),
7194 ),
7195 ],
7196 );
7197 });
7198
7199 // Stage a hunk. It appears as optimistically staged.
7200 uncommitted_diff.update(cx, |diff, cx| {
7201 let range =
7202 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7203 let hunks = diff
7204 .hunks_intersecting_range(range, &snapshot, cx)
7205 .collect::<Vec<_>>();
7206 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7207
7208 assert_hunks(
7209 diff.hunks(&snapshot, cx),
7210 &snapshot,
7211 &diff.base_text_string().unwrap(),
7212 &[
7213 (
7214 0..0,
7215 "zero\n",
7216 "",
7217 DiffHunkStatus::deleted(HasSecondaryHunk),
7218 ),
7219 (
7220 1..2,
7221 "two\n",
7222 "TWO\n",
7223 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7224 ),
7225 (
7226 3..4,
7227 "four\n",
7228 "FOUR\n",
7229 DiffHunkStatus::modified(HasSecondaryHunk),
7230 ),
7231 ],
7232 );
7233 });
7234
7235 // The diff emits a change event for the range of the staged hunk.
7236 assert!(matches!(
7237 diff_events.next().await.unwrap(),
7238 BufferDiffEvent::HunksStagedOrUnstaged(_)
7239 ));
7240 let event = diff_events.next().await.unwrap();
7241 if let BufferDiffEvent::DiffChanged {
7242 changed_range: Some(changed_range),
7243 } = event
7244 {
7245 let changed_range = changed_range.to_point(&snapshot);
7246 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7247 } else {
7248 panic!("Unexpected event {event:?}");
7249 }
7250
7251 // When the write to the index completes, it appears as staged.
7252 cx.run_until_parked();
7253 uncommitted_diff.update(cx, |diff, cx| {
7254 assert_hunks(
7255 diff.hunks(&snapshot, cx),
7256 &snapshot,
7257 &diff.base_text_string().unwrap(),
7258 &[
7259 (
7260 0..0,
7261 "zero\n",
7262 "",
7263 DiffHunkStatus::deleted(HasSecondaryHunk),
7264 ),
7265 (
7266 1..2,
7267 "two\n",
7268 "TWO\n",
7269 DiffHunkStatus::modified(NoSecondaryHunk),
7270 ),
7271 (
7272 3..4,
7273 "four\n",
7274 "FOUR\n",
7275 DiffHunkStatus::modified(HasSecondaryHunk),
7276 ),
7277 ],
7278 );
7279 });
7280
7281 // The diff emits a change event for the changed index text.
7282 let event = diff_events.next().await.unwrap();
7283 if let BufferDiffEvent::DiffChanged {
7284 changed_range: Some(changed_range),
7285 } = event
7286 {
7287 let changed_range = changed_range.to_point(&snapshot);
7288 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7289 } else {
7290 panic!("Unexpected event {event:?}");
7291 }
7292
7293 // Simulate a problem writing to the git index.
7294 fs.set_error_message_for_index_write(
7295 "/dir/.git".as_ref(),
7296 Some("failed to write git index".into()),
7297 );
7298
7299 // Stage another hunk.
7300 uncommitted_diff.update(cx, |diff, cx| {
7301 let range =
7302 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7303 let hunks = diff
7304 .hunks_intersecting_range(range, &snapshot, cx)
7305 .collect::<Vec<_>>();
7306 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7307
7308 assert_hunks(
7309 diff.hunks(&snapshot, cx),
7310 &snapshot,
7311 &diff.base_text_string().unwrap(),
7312 &[
7313 (
7314 0..0,
7315 "zero\n",
7316 "",
7317 DiffHunkStatus::deleted(HasSecondaryHunk),
7318 ),
7319 (
7320 1..2,
7321 "two\n",
7322 "TWO\n",
7323 DiffHunkStatus::modified(NoSecondaryHunk),
7324 ),
7325 (
7326 3..4,
7327 "four\n",
7328 "FOUR\n",
7329 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7330 ),
7331 ],
7332 );
7333 });
7334 assert!(matches!(
7335 diff_events.next().await.unwrap(),
7336 BufferDiffEvent::HunksStagedOrUnstaged(_)
7337 ));
7338 let event = diff_events.next().await.unwrap();
7339 if let BufferDiffEvent::DiffChanged {
7340 changed_range: Some(changed_range),
7341 } = event
7342 {
7343 let changed_range = changed_range.to_point(&snapshot);
7344 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7345 } else {
7346 panic!("Unexpected event {event:?}");
7347 }
7348
7349 // When the write fails, the hunk returns to being unstaged.
7350 cx.run_until_parked();
7351 uncommitted_diff.update(cx, |diff, cx| {
7352 assert_hunks(
7353 diff.hunks(&snapshot, cx),
7354 &snapshot,
7355 &diff.base_text_string().unwrap(),
7356 &[
7357 (
7358 0..0,
7359 "zero\n",
7360 "",
7361 DiffHunkStatus::deleted(HasSecondaryHunk),
7362 ),
7363 (
7364 1..2,
7365 "two\n",
7366 "TWO\n",
7367 DiffHunkStatus::modified(NoSecondaryHunk),
7368 ),
7369 (
7370 3..4,
7371 "four\n",
7372 "FOUR\n",
7373 DiffHunkStatus::modified(HasSecondaryHunk),
7374 ),
7375 ],
7376 );
7377 });
7378
7379 let event = diff_events.next().await.unwrap();
7380 if let BufferDiffEvent::DiffChanged {
7381 changed_range: Some(changed_range),
7382 } = event
7383 {
7384 let changed_range = changed_range.to_point(&snapshot);
7385 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7386 } else {
7387 panic!("Unexpected event {event:?}");
7388 }
7389
7390 // Allow writing to the git index to succeed again.
7391 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7392
7393 // Stage two hunks with separate operations.
7394 uncommitted_diff.update(cx, |diff, cx| {
7395 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7396 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7397 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7398 });
7399
7400 // Both staged hunks appear as pending.
7401 uncommitted_diff.update(cx, |diff, cx| {
7402 assert_hunks(
7403 diff.hunks(&snapshot, cx),
7404 &snapshot,
7405 &diff.base_text_string().unwrap(),
7406 &[
7407 (
7408 0..0,
7409 "zero\n",
7410 "",
7411 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7412 ),
7413 (
7414 1..2,
7415 "two\n",
7416 "TWO\n",
7417 DiffHunkStatus::modified(NoSecondaryHunk),
7418 ),
7419 (
7420 3..4,
7421 "four\n",
7422 "FOUR\n",
7423 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7424 ),
7425 ],
7426 );
7427 });
7428
7429 // Both staging operations take effect.
7430 cx.run_until_parked();
7431 uncommitted_diff.update(cx, |diff, cx| {
7432 assert_hunks(
7433 diff.hunks(&snapshot, cx),
7434 &snapshot,
7435 &diff.base_text_string().unwrap(),
7436 &[
7437 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7438 (
7439 1..2,
7440 "two\n",
7441 "TWO\n",
7442 DiffHunkStatus::modified(NoSecondaryHunk),
7443 ),
7444 (
7445 3..4,
7446 "four\n",
7447 "FOUR\n",
7448 DiffHunkStatus::modified(NoSecondaryHunk),
7449 ),
7450 ],
7451 );
7452 });
7453}
7454
7455#[gpui::test(seeds(340, 472))]
7456async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7457 use DiffHunkSecondaryStatus::*;
7458 init_test(cx);
7459
7460 let committed_contents = r#"
7461 zero
7462 one
7463 two
7464 three
7465 four
7466 five
7467 "#
7468 .unindent();
7469 let file_contents = r#"
7470 one
7471 TWO
7472 three
7473 FOUR
7474 five
7475 "#
7476 .unindent();
7477
7478 let fs = FakeFs::new(cx.background_executor.clone());
7479 fs.insert_tree(
7480 "/dir",
7481 json!({
7482 ".git": {},
7483 "file.txt": file_contents.clone()
7484 }),
7485 )
7486 .await;
7487
7488 fs.set_head_for_repo(
7489 "/dir/.git".as_ref(),
7490 &[("file.txt".into(), committed_contents.clone())],
7491 "deadbeef",
7492 );
7493 fs.set_index_for_repo(
7494 "/dir/.git".as_ref(),
7495 &[("file.txt".into(), committed_contents.clone())],
7496 );
7497
7498 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7499
7500 let buffer = project
7501 .update(cx, |project, cx| {
7502 project.open_local_buffer("/dir/file.txt", cx)
7503 })
7504 .await
7505 .unwrap();
7506 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7507 let uncommitted_diff = project
7508 .update(cx, |project, cx| {
7509 project.open_uncommitted_diff(buffer.clone(), cx)
7510 })
7511 .await
7512 .unwrap();
7513
7514 // The hunks are initially unstaged.
7515 uncommitted_diff.read_with(cx, |diff, cx| {
7516 assert_hunks(
7517 diff.hunks(&snapshot, cx),
7518 &snapshot,
7519 &diff.base_text_string().unwrap(),
7520 &[
7521 (
7522 0..0,
7523 "zero\n",
7524 "",
7525 DiffHunkStatus::deleted(HasSecondaryHunk),
7526 ),
7527 (
7528 1..2,
7529 "two\n",
7530 "TWO\n",
7531 DiffHunkStatus::modified(HasSecondaryHunk),
7532 ),
7533 (
7534 3..4,
7535 "four\n",
7536 "FOUR\n",
7537 DiffHunkStatus::modified(HasSecondaryHunk),
7538 ),
7539 ],
7540 );
7541 });
7542
7543 // Pause IO events
7544 fs.pause_events();
7545
7546 // Stage the first hunk.
7547 uncommitted_diff.update(cx, |diff, cx| {
7548 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7549 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7550 assert_hunks(
7551 diff.hunks(&snapshot, cx),
7552 &snapshot,
7553 &diff.base_text_string().unwrap(),
7554 &[
7555 (
7556 0..0,
7557 "zero\n",
7558 "",
7559 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7560 ),
7561 (
7562 1..2,
7563 "two\n",
7564 "TWO\n",
7565 DiffHunkStatus::modified(HasSecondaryHunk),
7566 ),
7567 (
7568 3..4,
7569 "four\n",
7570 "FOUR\n",
7571 DiffHunkStatus::modified(HasSecondaryHunk),
7572 ),
7573 ],
7574 );
7575 });
7576
7577 // Stage the second hunk *before* receiving the FS event for the first hunk.
7578 cx.run_until_parked();
7579 uncommitted_diff.update(cx, |diff, cx| {
7580 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7581 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7582 assert_hunks(
7583 diff.hunks(&snapshot, cx),
7584 &snapshot,
7585 &diff.base_text_string().unwrap(),
7586 &[
7587 (
7588 0..0,
7589 "zero\n",
7590 "",
7591 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7592 ),
7593 (
7594 1..2,
7595 "two\n",
7596 "TWO\n",
7597 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7598 ),
7599 (
7600 3..4,
7601 "four\n",
7602 "FOUR\n",
7603 DiffHunkStatus::modified(HasSecondaryHunk),
7604 ),
7605 ],
7606 );
7607 });
7608
7609 // Process the FS event for staging the first hunk (second event is still pending).
7610 fs.flush_events(1);
7611 cx.run_until_parked();
7612
7613 // Stage the third hunk before receiving the second FS event.
7614 uncommitted_diff.update(cx, |diff, cx| {
7615 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7616 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7617 });
7618
7619 // Wait for all remaining IO.
7620 cx.run_until_parked();
7621 fs.flush_events(fs.buffered_event_count());
7622
7623 // Now all hunks are staged.
7624 cx.run_until_parked();
7625 uncommitted_diff.update(cx, |diff, cx| {
7626 assert_hunks(
7627 diff.hunks(&snapshot, cx),
7628 &snapshot,
7629 &diff.base_text_string().unwrap(),
7630 &[
7631 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7632 (
7633 1..2,
7634 "two\n",
7635 "TWO\n",
7636 DiffHunkStatus::modified(NoSecondaryHunk),
7637 ),
7638 (
7639 3..4,
7640 "four\n",
7641 "FOUR\n",
7642 DiffHunkStatus::modified(NoSecondaryHunk),
7643 ),
7644 ],
7645 );
7646 });
7647}
7648
7649#[gpui::test(iterations = 100)]
7650async fn test_staging_random_hunks(mut rng: StdRng, cx: &mut gpui::TestAppContext) {
7651 let operations = env::var("OPERATIONS")
7652 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7653 .unwrap_or(20);
7654
7655 use DiffHunkSecondaryStatus::*;
7656 init_test(cx);
7657
7658 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7659 let index_text = committed_text.clone();
7660 let buffer_text = (0..30)
7661 .map(|i| match i % 5 {
7662 0 => format!("line {i} (modified)\n"),
7663 _ => format!("line {i}\n"),
7664 })
7665 .collect::<String>();
7666
7667 let fs = FakeFs::new(cx.background_executor.clone());
7668 fs.insert_tree(
7669 path!("/dir"),
7670 json!({
7671 ".git": {},
7672 "file.txt": buffer_text.clone()
7673 }),
7674 )
7675 .await;
7676 fs.set_head_for_repo(
7677 path!("/dir/.git").as_ref(),
7678 &[("file.txt".into(), committed_text.clone())],
7679 "deadbeef",
7680 );
7681 fs.set_index_for_repo(
7682 path!("/dir/.git").as_ref(),
7683 &[("file.txt".into(), index_text.clone())],
7684 );
7685 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7686
7687 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7688 let buffer = project
7689 .update(cx, |project, cx| {
7690 project.open_local_buffer(path!("/dir/file.txt"), cx)
7691 })
7692 .await
7693 .unwrap();
7694 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7695 let uncommitted_diff = project
7696 .update(cx, |project, cx| {
7697 project.open_uncommitted_diff(buffer.clone(), cx)
7698 })
7699 .await
7700 .unwrap();
7701
7702 let mut hunks =
7703 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7704 assert_eq!(hunks.len(), 6);
7705
7706 for _i in 0..operations {
7707 let hunk_ix = rng.random_range(0..hunks.len());
7708 let hunk = &mut hunks[hunk_ix];
7709 let row = hunk.range.start.row;
7710
7711 if hunk.status().has_secondary_hunk() {
7712 log::info!("staging hunk at {row}");
7713 uncommitted_diff.update(cx, |diff, cx| {
7714 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7715 });
7716 hunk.secondary_status = SecondaryHunkRemovalPending;
7717 } else {
7718 log::info!("unstaging hunk at {row}");
7719 uncommitted_diff.update(cx, |diff, cx| {
7720 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7721 });
7722 hunk.secondary_status = SecondaryHunkAdditionPending;
7723 }
7724
7725 for _ in 0..rng.random_range(0..10) {
7726 log::info!("yielding");
7727 cx.executor().simulate_random_delay().await;
7728 }
7729 }
7730
7731 cx.executor().run_until_parked();
7732
7733 for hunk in &mut hunks {
7734 if hunk.secondary_status == SecondaryHunkRemovalPending {
7735 hunk.secondary_status = NoSecondaryHunk;
7736 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7737 hunk.secondary_status = HasSecondaryHunk;
7738 }
7739 }
7740
7741 log::info!(
7742 "index text:\n{}",
7743 repo.load_index_text("file.txt".into()).await.unwrap()
7744 );
7745
7746 uncommitted_diff.update(cx, |diff, cx| {
7747 let expected_hunks = hunks
7748 .iter()
7749 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7750 .collect::<Vec<_>>();
7751 let actual_hunks = diff
7752 .hunks(&snapshot, cx)
7753 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7754 .collect::<Vec<_>>();
7755 assert_eq!(actual_hunks, expected_hunks);
7756 });
7757}
7758
7759#[gpui::test]
7760async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7761 init_test(cx);
7762
7763 let committed_contents = r#"
7764 fn main() {
7765 println!("hello from HEAD");
7766 }
7767 "#
7768 .unindent();
7769 let file_contents = r#"
7770 fn main() {
7771 println!("hello from the working copy");
7772 }
7773 "#
7774 .unindent();
7775
7776 let fs = FakeFs::new(cx.background_executor.clone());
7777 fs.insert_tree(
7778 "/dir",
7779 json!({
7780 ".git": {},
7781 "src": {
7782 "main.rs": file_contents,
7783 }
7784 }),
7785 )
7786 .await;
7787
7788 fs.set_head_for_repo(
7789 Path::new("/dir/.git"),
7790 &[("src/main.rs".into(), committed_contents.clone())],
7791 "deadbeef",
7792 );
7793 fs.set_index_for_repo(
7794 Path::new("/dir/.git"),
7795 &[("src/main.rs".into(), committed_contents.clone())],
7796 );
7797
7798 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7799
7800 let buffer = project
7801 .update(cx, |project, cx| {
7802 project.open_local_buffer("/dir/src/main.rs", cx)
7803 })
7804 .await
7805 .unwrap();
7806 let uncommitted_diff = project
7807 .update(cx, |project, cx| {
7808 project.open_uncommitted_diff(buffer.clone(), cx)
7809 })
7810 .await
7811 .unwrap();
7812
7813 cx.run_until_parked();
7814 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7815 let snapshot = buffer.read(cx).snapshot();
7816 assert_hunks(
7817 uncommitted_diff.hunks(&snapshot, cx),
7818 &snapshot,
7819 &uncommitted_diff.base_text_string().unwrap(),
7820 &[(
7821 1..2,
7822 " println!(\"hello from HEAD\");\n",
7823 " println!(\"hello from the working copy\");\n",
7824 DiffHunkStatus {
7825 kind: DiffHunkStatusKind::Modified,
7826 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7827 },
7828 )],
7829 );
7830 });
7831}
7832
7833#[gpui::test]
7834async fn test_repository_and_path_for_project_path(
7835 background_executor: BackgroundExecutor,
7836 cx: &mut gpui::TestAppContext,
7837) {
7838 init_test(cx);
7839 let fs = FakeFs::new(background_executor);
7840 fs.insert_tree(
7841 path!("/root"),
7842 json!({
7843 "c.txt": "",
7844 "dir1": {
7845 ".git": {},
7846 "deps": {
7847 "dep1": {
7848 ".git": {},
7849 "src": {
7850 "a.txt": ""
7851 }
7852 }
7853 },
7854 "src": {
7855 "b.txt": ""
7856 }
7857 },
7858 }),
7859 )
7860 .await;
7861
7862 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7863 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7864 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7865 project
7866 .update(cx, |project, cx| project.git_scans_complete(cx))
7867 .await;
7868 cx.run_until_parked();
7869
7870 project.read_with(cx, |project, cx| {
7871 let git_store = project.git_store().read(cx);
7872 let pairs = [
7873 ("c.txt", None),
7874 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7875 (
7876 "dir1/deps/dep1/src/a.txt",
7877 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7878 ),
7879 ];
7880 let expected = pairs
7881 .iter()
7882 .map(|(path, result)| {
7883 (
7884 path,
7885 result.map(|(repo, repo_path)| {
7886 (Path::new(repo).into(), RepoPath::from(repo_path))
7887 }),
7888 )
7889 })
7890 .collect::<Vec<_>>();
7891 let actual = pairs
7892 .iter()
7893 .map(|(path, _)| {
7894 let project_path = (tree_id, Path::new(path)).into();
7895 let result = maybe!({
7896 let (repo, repo_path) =
7897 git_store.repository_and_path_for_project_path(&project_path, cx)?;
7898 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
7899 });
7900 (path, result)
7901 })
7902 .collect::<Vec<_>>();
7903 pretty_assertions::assert_eq!(expected, actual);
7904 });
7905
7906 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
7907 .await
7908 .unwrap();
7909 cx.run_until_parked();
7910
7911 project.read_with(cx, |project, cx| {
7912 let git_store = project.git_store().read(cx);
7913 assert_eq!(
7914 git_store.repository_and_path_for_project_path(
7915 &(tree_id, Path::new("dir1/src/b.txt")).into(),
7916 cx
7917 ),
7918 None
7919 );
7920 });
7921}
7922
7923#[gpui::test]
7924async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
7925 init_test(cx);
7926 let fs = FakeFs::new(cx.background_executor.clone());
7927 fs.insert_tree(
7928 path!("/root"),
7929 json!({
7930 "home": {
7931 ".git": {},
7932 "project": {
7933 "a.txt": "A"
7934 },
7935 },
7936 }),
7937 )
7938 .await;
7939 fs.set_home_dir(Path::new(path!("/root/home")).to_owned());
7940
7941 let project = Project::test(fs.clone(), [path!("/root/home/project").as_ref()], cx).await;
7942 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7943 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7944
7945 project
7946 .update(cx, |project, cx| project.git_scans_complete(cx))
7947 .await;
7948 tree.flush_fs_events(cx).await;
7949
7950 project.read_with(cx, |project, cx| {
7951 let containing = project
7952 .git_store()
7953 .read(cx)
7954 .repository_and_path_for_project_path(&(tree_id, "a.txt").into(), cx);
7955 assert!(containing.is_none());
7956 });
7957
7958 let project = Project::test(fs.clone(), [path!("/root/home").as_ref()], cx).await;
7959 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7960 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7961 project
7962 .update(cx, |project, cx| project.git_scans_complete(cx))
7963 .await;
7964 tree.flush_fs_events(cx).await;
7965
7966 project.read_with(cx, |project, cx| {
7967 let containing = project
7968 .git_store()
7969 .read(cx)
7970 .repository_and_path_for_project_path(&(tree_id, "project/a.txt").into(), cx);
7971 assert_eq!(
7972 containing
7973 .unwrap()
7974 .0
7975 .read(cx)
7976 .work_directory_abs_path
7977 .as_ref(),
7978 Path::new(path!("/root/home"))
7979 );
7980 });
7981}
7982
7983#[gpui::test]
7984async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
7985 init_test(cx);
7986 cx.executor().allow_parking();
7987
7988 let root = TempTree::new(json!({
7989 "project": {
7990 "a.txt": "a", // Modified
7991 "b.txt": "bb", // Added
7992 "c.txt": "ccc", // Unchanged
7993 "d.txt": "dddd", // Deleted
7994 },
7995 }));
7996
7997 // Set up git repository before creating the project.
7998 let work_dir = root.path().join("project");
7999 let repo = git_init(work_dir.as_path());
8000 git_add("a.txt", &repo);
8001 git_add("c.txt", &repo);
8002 git_add("d.txt", &repo);
8003 git_commit("Initial commit", &repo);
8004 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8005 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8006
8007 let project = Project::test(
8008 Arc::new(RealFs::new(None, cx.executor())),
8009 [root.path()],
8010 cx,
8011 )
8012 .await;
8013
8014 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8015 tree.flush_fs_events(cx).await;
8016 project
8017 .update(cx, |project, cx| project.git_scans_complete(cx))
8018 .await;
8019 cx.executor().run_until_parked();
8020
8021 let repository = project.read_with(cx, |project, cx| {
8022 project.repositories(cx).values().next().unwrap().clone()
8023 });
8024
8025 // Check that the right git state is observed on startup
8026 repository.read_with(cx, |repository, _| {
8027 let entries = repository.cached_status().collect::<Vec<_>>();
8028 assert_eq!(
8029 entries,
8030 [
8031 StatusEntry {
8032 repo_path: "a.txt".into(),
8033 status: StatusCode::Modified.worktree(),
8034 },
8035 StatusEntry {
8036 repo_path: "b.txt".into(),
8037 status: FileStatus::Untracked,
8038 },
8039 StatusEntry {
8040 repo_path: "d.txt".into(),
8041 status: StatusCode::Deleted.worktree(),
8042 },
8043 ]
8044 );
8045 });
8046
8047 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8048
8049 tree.flush_fs_events(cx).await;
8050 project
8051 .update(cx, |project, cx| project.git_scans_complete(cx))
8052 .await;
8053 cx.executor().run_until_parked();
8054
8055 repository.read_with(cx, |repository, _| {
8056 let entries = repository.cached_status().collect::<Vec<_>>();
8057 assert_eq!(
8058 entries,
8059 [
8060 StatusEntry {
8061 repo_path: "a.txt".into(),
8062 status: StatusCode::Modified.worktree(),
8063 },
8064 StatusEntry {
8065 repo_path: "b.txt".into(),
8066 status: FileStatus::Untracked,
8067 },
8068 StatusEntry {
8069 repo_path: "c.txt".into(),
8070 status: StatusCode::Modified.worktree(),
8071 },
8072 StatusEntry {
8073 repo_path: "d.txt".into(),
8074 status: StatusCode::Deleted.worktree(),
8075 },
8076 ]
8077 );
8078 });
8079
8080 git_add("a.txt", &repo);
8081 git_add("c.txt", &repo);
8082 git_remove_index(Path::new("d.txt"), &repo);
8083 git_commit("Another commit", &repo);
8084 tree.flush_fs_events(cx).await;
8085 project
8086 .update(cx, |project, cx| project.git_scans_complete(cx))
8087 .await;
8088 cx.executor().run_until_parked();
8089
8090 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8091 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8092 tree.flush_fs_events(cx).await;
8093 project
8094 .update(cx, |project, cx| project.git_scans_complete(cx))
8095 .await;
8096 cx.executor().run_until_parked();
8097
8098 repository.read_with(cx, |repository, _cx| {
8099 let entries = repository.cached_status().collect::<Vec<_>>();
8100
8101 // Deleting an untracked entry, b.txt, should leave no status
8102 // a.txt was tracked, and so should have a status
8103 assert_eq!(
8104 entries,
8105 [StatusEntry {
8106 repo_path: "a.txt".into(),
8107 status: StatusCode::Deleted.worktree(),
8108 }]
8109 );
8110 });
8111}
8112
8113#[gpui::test]
8114async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8115 init_test(cx);
8116 cx.executor().allow_parking();
8117
8118 let root = TempTree::new(json!({
8119 "project": {
8120 "sub": {},
8121 "a.txt": "",
8122 },
8123 }));
8124
8125 let work_dir = root.path().join("project");
8126 let repo = git_init(work_dir.as_path());
8127 // a.txt exists in HEAD and the working copy but is deleted in the index.
8128 git_add("a.txt", &repo);
8129 git_commit("Initial commit", &repo);
8130 git_remove_index("a.txt".as_ref(), &repo);
8131 // `sub` is a nested git repository.
8132 let _sub = git_init(&work_dir.join("sub"));
8133
8134 let project = Project::test(
8135 Arc::new(RealFs::new(None, cx.executor())),
8136 [root.path()],
8137 cx,
8138 )
8139 .await;
8140
8141 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8142 tree.flush_fs_events(cx).await;
8143 project
8144 .update(cx, |project, cx| project.git_scans_complete(cx))
8145 .await;
8146 cx.executor().run_until_parked();
8147
8148 let repository = project.read_with(cx, |project, cx| {
8149 project
8150 .repositories(cx)
8151 .values()
8152 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8153 .unwrap()
8154 .clone()
8155 });
8156
8157 repository.read_with(cx, |repository, _cx| {
8158 let entries = repository.cached_status().collect::<Vec<_>>();
8159
8160 // `sub` doesn't appear in our computed statuses.
8161 // a.txt appears with a combined `DA` status.
8162 assert_eq!(
8163 entries,
8164 [StatusEntry {
8165 repo_path: "a.txt".into(),
8166 status: TrackedStatus {
8167 index_status: StatusCode::Deleted,
8168 worktree_status: StatusCode::Added
8169 }
8170 .into(),
8171 }]
8172 )
8173 });
8174}
8175
8176#[gpui::test]
8177async fn test_repository_subfolder_git_status(
8178 executor: gpui::BackgroundExecutor,
8179 cx: &mut gpui::TestAppContext,
8180) {
8181 init_test(cx);
8182
8183 let fs = FakeFs::new(executor);
8184 fs.insert_tree(
8185 path!("/root"),
8186 json!({
8187 "my-repo": {
8188 ".git": {},
8189 "a.txt": "a",
8190 "sub-folder-1": {
8191 "sub-folder-2": {
8192 "c.txt": "cc",
8193 "d": {
8194 "e.txt": "eee"
8195 }
8196 },
8197 }
8198 },
8199 }),
8200 )
8201 .await;
8202
8203 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8204 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8205
8206 fs.set_status_for_repo(
8207 path!("/root/my-repo/.git").as_ref(),
8208 &[(E_TXT.as_ref(), FileStatus::Untracked)],
8209 );
8210
8211 let project = Project::test(
8212 fs.clone(),
8213 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8214 cx,
8215 )
8216 .await;
8217
8218 project
8219 .update(cx, |project, cx| project.git_scans_complete(cx))
8220 .await;
8221 cx.run_until_parked();
8222
8223 let repository = project.read_with(cx, |project, cx| {
8224 project.repositories(cx).values().next().unwrap().clone()
8225 });
8226
8227 // Ensure that the git status is loaded correctly
8228 repository.read_with(cx, |repository, _cx| {
8229 assert_eq!(
8230 repository.work_directory_abs_path,
8231 Path::new(path!("/root/my-repo")).into()
8232 );
8233
8234 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8235 assert_eq!(
8236 repository.status_for_path(&E_TXT.into()).unwrap().status,
8237 FileStatus::Untracked
8238 );
8239 });
8240
8241 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8242 project
8243 .update(cx, |project, cx| project.git_scans_complete(cx))
8244 .await;
8245 cx.run_until_parked();
8246
8247 repository.read_with(cx, |repository, _cx| {
8248 assert_eq!(repository.status_for_path(&C_TXT.into()), None);
8249 assert_eq!(repository.status_for_path(&E_TXT.into()), None);
8250 });
8251}
8252
8253// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8254#[cfg(any())]
8255#[gpui::test]
8256async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8257 init_test(cx);
8258 cx.executor().allow_parking();
8259
8260 let root = TempTree::new(json!({
8261 "project": {
8262 "a.txt": "a",
8263 },
8264 }));
8265 let root_path = root.path();
8266
8267 let repo = git_init(&root_path.join("project"));
8268 git_add("a.txt", &repo);
8269 git_commit("init", &repo);
8270
8271 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8272
8273 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8274 tree.flush_fs_events(cx).await;
8275 project
8276 .update(cx, |project, cx| project.git_scans_complete(cx))
8277 .await;
8278 cx.executor().run_until_parked();
8279
8280 let repository = project.read_with(cx, |project, cx| {
8281 project.repositories(cx).values().next().unwrap().clone()
8282 });
8283
8284 git_branch("other-branch", &repo);
8285 git_checkout("refs/heads/other-branch", &repo);
8286 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8287 git_add("a.txt", &repo);
8288 git_commit("capitalize", &repo);
8289 let commit = repo
8290 .head()
8291 .expect("Failed to get HEAD")
8292 .peel_to_commit()
8293 .expect("HEAD is not a commit");
8294 git_checkout("refs/heads/main", &repo);
8295 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8296 git_add("a.txt", &repo);
8297 git_commit("improve letter", &repo);
8298 git_cherry_pick(&commit, &repo);
8299 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8300 .expect("No CHERRY_PICK_HEAD");
8301 pretty_assertions::assert_eq!(
8302 git_status(&repo),
8303 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8304 );
8305 tree.flush_fs_events(cx).await;
8306 project
8307 .update(cx, |project, cx| project.git_scans_complete(cx))
8308 .await;
8309 cx.executor().run_until_parked();
8310 let conflicts = repository.update(cx, |repository, _| {
8311 repository
8312 .merge_conflicts
8313 .iter()
8314 .cloned()
8315 .collect::<Vec<_>>()
8316 });
8317 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8318
8319 git_add("a.txt", &repo);
8320 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8321 git_commit("whatevs", &repo);
8322 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8323 .expect("Failed to remove CHERRY_PICK_HEAD");
8324 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8325 tree.flush_fs_events(cx).await;
8326 let conflicts = repository.update(cx, |repository, _| {
8327 repository
8328 .merge_conflicts
8329 .iter()
8330 .cloned()
8331 .collect::<Vec<_>>()
8332 });
8333 pretty_assertions::assert_eq!(conflicts, []);
8334}
8335
8336#[gpui::test]
8337async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8338 init_test(cx);
8339 let fs = FakeFs::new(cx.background_executor.clone());
8340 fs.insert_tree(
8341 path!("/root"),
8342 json!({
8343 ".git": {},
8344 ".gitignore": "*.txt\n",
8345 "a.xml": "<a></a>",
8346 "b.txt": "Some text"
8347 }),
8348 )
8349 .await;
8350
8351 fs.set_head_and_index_for_repo(
8352 path!("/root/.git").as_ref(),
8353 &[
8354 (".gitignore".into(), "*.txt\n".into()),
8355 ("a.xml".into(), "<a></a>".into()),
8356 ],
8357 );
8358
8359 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8360
8361 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8362 tree.flush_fs_events(cx).await;
8363 project
8364 .update(cx, |project, cx| project.git_scans_complete(cx))
8365 .await;
8366 cx.executor().run_until_parked();
8367
8368 let repository = project.read_with(cx, |project, cx| {
8369 project.repositories(cx).values().next().unwrap().clone()
8370 });
8371
8372 // One file is unmodified, the other is ignored.
8373 cx.read(|cx| {
8374 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8375 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8376 });
8377
8378 // Change the gitignore, and stage the newly non-ignored file.
8379 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8380 .await
8381 .unwrap();
8382 fs.set_index_for_repo(
8383 Path::new(path!("/root/.git")),
8384 &[
8385 (".gitignore".into(), "*.txt\n".into()),
8386 ("a.xml".into(), "<a></a>".into()),
8387 ("b.txt".into(), "Some text".into()),
8388 ],
8389 );
8390
8391 cx.executor().run_until_parked();
8392 cx.read(|cx| {
8393 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8394 assert_entry_git_state(
8395 tree.read(cx),
8396 repository.read(cx),
8397 "b.txt",
8398 Some(StatusCode::Added),
8399 false,
8400 );
8401 });
8402}
8403
8404// NOTE:
8405// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8406// a directory which some program has already open.
8407// This is a limitation of the Windows.
8408// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8409#[gpui::test]
8410#[cfg_attr(target_os = "windows", ignore)]
8411async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8412 init_test(cx);
8413 cx.executor().allow_parking();
8414 let root = TempTree::new(json!({
8415 "projects": {
8416 "project1": {
8417 "a": "",
8418 "b": "",
8419 }
8420 },
8421
8422 }));
8423 let root_path = root.path();
8424
8425 let repo = git_init(&root_path.join("projects/project1"));
8426 git_add("a", &repo);
8427 git_commit("init", &repo);
8428 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8429
8430 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8431
8432 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8433 tree.flush_fs_events(cx).await;
8434 project
8435 .update(cx, |project, cx| project.git_scans_complete(cx))
8436 .await;
8437 cx.executor().run_until_parked();
8438
8439 let repository = project.read_with(cx, |project, cx| {
8440 project.repositories(cx).values().next().unwrap().clone()
8441 });
8442
8443 repository.read_with(cx, |repository, _| {
8444 assert_eq!(
8445 repository.work_directory_abs_path.as_ref(),
8446 root_path.join("projects/project1").as_path()
8447 );
8448 assert_eq!(
8449 repository
8450 .status_for_path(&"a".into())
8451 .map(|entry| entry.status),
8452 Some(StatusCode::Modified.worktree()),
8453 );
8454 assert_eq!(
8455 repository
8456 .status_for_path(&"b".into())
8457 .map(|entry| entry.status),
8458 Some(FileStatus::Untracked),
8459 );
8460 });
8461
8462 std::fs::rename(
8463 root_path.join("projects/project1"),
8464 root_path.join("projects/project2"),
8465 )
8466 .unwrap();
8467 tree.flush_fs_events(cx).await;
8468
8469 repository.read_with(cx, |repository, _| {
8470 assert_eq!(
8471 repository.work_directory_abs_path.as_ref(),
8472 root_path.join("projects/project2").as_path()
8473 );
8474 assert_eq!(
8475 repository.status_for_path(&"a".into()).unwrap().status,
8476 StatusCode::Modified.worktree(),
8477 );
8478 assert_eq!(
8479 repository.status_for_path(&"b".into()).unwrap().status,
8480 FileStatus::Untracked,
8481 );
8482 });
8483}
8484
8485// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8486// you can't rename a directory which some program has already open. This is a
8487// limitation of the Windows. See:
8488// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8489#[gpui::test]
8490#[cfg_attr(target_os = "windows", ignore)]
8491async fn test_file_status(cx: &mut gpui::TestAppContext) {
8492 init_test(cx);
8493 cx.executor().allow_parking();
8494 const IGNORE_RULE: &str = "**/target";
8495
8496 let root = TempTree::new(json!({
8497 "project": {
8498 "a.txt": "a",
8499 "b.txt": "bb",
8500 "c": {
8501 "d": {
8502 "e.txt": "eee"
8503 }
8504 },
8505 "f.txt": "ffff",
8506 "target": {
8507 "build_file": "???"
8508 },
8509 ".gitignore": IGNORE_RULE
8510 },
8511
8512 }));
8513 let root_path = root.path();
8514
8515 const A_TXT: &str = "a.txt";
8516 const B_TXT: &str = "b.txt";
8517 const E_TXT: &str = "c/d/e.txt";
8518 const F_TXT: &str = "f.txt";
8519 const DOTGITIGNORE: &str = ".gitignore";
8520 const BUILD_FILE: &str = "target/build_file";
8521
8522 // Set up git repository before creating the worktree.
8523 let work_dir = root.path().join("project");
8524 let mut repo = git_init(work_dir.as_path());
8525 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8526 git_add(A_TXT, &repo);
8527 git_add(E_TXT, &repo);
8528 git_add(DOTGITIGNORE, &repo);
8529 git_commit("Initial commit", &repo);
8530
8531 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8532
8533 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8534 tree.flush_fs_events(cx).await;
8535 project
8536 .update(cx, |project, cx| project.git_scans_complete(cx))
8537 .await;
8538 cx.executor().run_until_parked();
8539
8540 let repository = project.read_with(cx, |project, cx| {
8541 project.repositories(cx).values().next().unwrap().clone()
8542 });
8543
8544 // Check that the right git state is observed on startup
8545 repository.read_with(cx, |repository, _cx| {
8546 assert_eq!(
8547 repository.work_directory_abs_path.as_ref(),
8548 root_path.join("project").as_path()
8549 );
8550
8551 assert_eq!(
8552 repository.status_for_path(&B_TXT.into()).unwrap().status,
8553 FileStatus::Untracked,
8554 );
8555 assert_eq!(
8556 repository.status_for_path(&F_TXT.into()).unwrap().status,
8557 FileStatus::Untracked,
8558 );
8559 });
8560
8561 // Modify a file in the working copy.
8562 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8563 tree.flush_fs_events(cx).await;
8564 project
8565 .update(cx, |project, cx| project.git_scans_complete(cx))
8566 .await;
8567 cx.executor().run_until_parked();
8568
8569 // The worktree detects that the file's git status has changed.
8570 repository.read_with(cx, |repository, _| {
8571 assert_eq!(
8572 repository.status_for_path(&A_TXT.into()).unwrap().status,
8573 StatusCode::Modified.worktree(),
8574 );
8575 });
8576
8577 // Create a commit in the git repository.
8578 git_add(A_TXT, &repo);
8579 git_add(B_TXT, &repo);
8580 git_commit("Committing modified and added", &repo);
8581 tree.flush_fs_events(cx).await;
8582 project
8583 .update(cx, |project, cx| project.git_scans_complete(cx))
8584 .await;
8585 cx.executor().run_until_parked();
8586
8587 // The worktree detects that the files' git status have changed.
8588 repository.read_with(cx, |repository, _cx| {
8589 assert_eq!(
8590 repository.status_for_path(&F_TXT.into()).unwrap().status,
8591 FileStatus::Untracked,
8592 );
8593 assert_eq!(repository.status_for_path(&B_TXT.into()), None);
8594 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8595 });
8596
8597 // Modify files in the working copy and perform git operations on other files.
8598 git_reset(0, &repo);
8599 git_remove_index(Path::new(B_TXT), &repo);
8600 git_stash(&mut repo);
8601 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8602 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8603 tree.flush_fs_events(cx).await;
8604 project
8605 .update(cx, |project, cx| project.git_scans_complete(cx))
8606 .await;
8607 cx.executor().run_until_parked();
8608
8609 // Check that more complex repo changes are tracked
8610 repository.read_with(cx, |repository, _cx| {
8611 assert_eq!(repository.status_for_path(&A_TXT.into()), None);
8612 assert_eq!(
8613 repository.status_for_path(&B_TXT.into()).unwrap().status,
8614 FileStatus::Untracked,
8615 );
8616 assert_eq!(
8617 repository.status_for_path(&E_TXT.into()).unwrap().status,
8618 StatusCode::Modified.worktree(),
8619 );
8620 });
8621
8622 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8623 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8624 std::fs::write(
8625 work_dir.join(DOTGITIGNORE),
8626 [IGNORE_RULE, "f.txt"].join("\n"),
8627 )
8628 .unwrap();
8629
8630 git_add(Path::new(DOTGITIGNORE), &repo);
8631 git_commit("Committing modified git ignore", &repo);
8632
8633 tree.flush_fs_events(cx).await;
8634 cx.executor().run_until_parked();
8635
8636 let mut renamed_dir_name = "first_directory/second_directory";
8637 const RENAMED_FILE: &str = "rf.txt";
8638
8639 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8640 std::fs::write(
8641 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8642 "new-contents",
8643 )
8644 .unwrap();
8645
8646 tree.flush_fs_events(cx).await;
8647 project
8648 .update(cx, |project, cx| project.git_scans_complete(cx))
8649 .await;
8650 cx.executor().run_until_parked();
8651
8652 repository.read_with(cx, |repository, _cx| {
8653 assert_eq!(
8654 repository
8655 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8656 .unwrap()
8657 .status,
8658 FileStatus::Untracked,
8659 );
8660 });
8661
8662 renamed_dir_name = "new_first_directory/second_directory";
8663
8664 std::fs::rename(
8665 work_dir.join("first_directory"),
8666 work_dir.join("new_first_directory"),
8667 )
8668 .unwrap();
8669
8670 tree.flush_fs_events(cx).await;
8671 project
8672 .update(cx, |project, cx| project.git_scans_complete(cx))
8673 .await;
8674 cx.executor().run_until_parked();
8675
8676 repository.read_with(cx, |repository, _cx| {
8677 assert_eq!(
8678 repository
8679 .status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
8680 .unwrap()
8681 .status,
8682 FileStatus::Untracked,
8683 );
8684 });
8685}
8686
8687#[gpui::test]
8688async fn test_repos_in_invisible_worktrees(
8689 executor: BackgroundExecutor,
8690 cx: &mut gpui::TestAppContext,
8691) {
8692 init_test(cx);
8693 let fs = FakeFs::new(executor);
8694 fs.insert_tree(
8695 path!("/root"),
8696 json!({
8697 "dir1": {
8698 ".git": {},
8699 "dep1": {
8700 ".git": {},
8701 "src": {
8702 "a.txt": "",
8703 },
8704 },
8705 "b.txt": "",
8706 },
8707 }),
8708 )
8709 .await;
8710
8711 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8712 let _visible_worktree =
8713 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8714 project
8715 .update(cx, |project, cx| project.git_scans_complete(cx))
8716 .await;
8717
8718 let repos = project.read_with(cx, |project, cx| {
8719 project
8720 .repositories(cx)
8721 .values()
8722 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8723 .collect::<Vec<_>>()
8724 });
8725 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8726
8727 let (_invisible_worktree, _) = project
8728 .update(cx, |project, cx| {
8729 project.worktree_store.update(cx, |worktree_store, cx| {
8730 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8731 })
8732 })
8733 .await
8734 .expect("failed to create worktree");
8735 project
8736 .update(cx, |project, cx| project.git_scans_complete(cx))
8737 .await;
8738
8739 let repos = project.read_with(cx, |project, cx| {
8740 project
8741 .repositories(cx)
8742 .values()
8743 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8744 .collect::<Vec<_>>()
8745 });
8746 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8747}
8748
8749#[gpui::test(iterations = 10)]
8750async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8751 init_test(cx);
8752 cx.update(|cx| {
8753 cx.update_global::<SettingsStore, _>(|store, cx| {
8754 store.update_user_settings::<WorktreeSettings>(cx, |project_settings| {
8755 project_settings.file_scan_exclusions = Some(Vec::new());
8756 });
8757 });
8758 });
8759 let fs = FakeFs::new(cx.background_executor.clone());
8760 fs.insert_tree(
8761 path!("/root"),
8762 json!({
8763 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8764 "tree": {
8765 ".git": {},
8766 ".gitignore": "ignored-dir\n",
8767 "tracked-dir": {
8768 "tracked-file1": "",
8769 "ancestor-ignored-file1": "",
8770 },
8771 "ignored-dir": {
8772 "ignored-file1": ""
8773 }
8774 }
8775 }),
8776 )
8777 .await;
8778 fs.set_head_and_index_for_repo(
8779 path!("/root/tree/.git").as_ref(),
8780 &[
8781 (".gitignore".into(), "ignored-dir\n".into()),
8782 ("tracked-dir/tracked-file1".into(), "".into()),
8783 ],
8784 );
8785
8786 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8787
8788 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8789 tree.flush_fs_events(cx).await;
8790 project
8791 .update(cx, |project, cx| project.git_scans_complete(cx))
8792 .await;
8793 cx.executor().run_until_parked();
8794
8795 let repository = project.read_with(cx, |project, cx| {
8796 project.repositories(cx).values().next().unwrap().clone()
8797 });
8798
8799 tree.read_with(cx, |tree, _| {
8800 tree.as_local()
8801 .unwrap()
8802 .manually_refresh_entries_for_paths(vec![Path::new("ignored-dir").into()])
8803 })
8804 .recv()
8805 .await;
8806
8807 cx.read(|cx| {
8808 assert_entry_git_state(
8809 tree.read(cx),
8810 repository.read(cx),
8811 "tracked-dir/tracked-file1",
8812 None,
8813 false,
8814 );
8815 assert_entry_git_state(
8816 tree.read(cx),
8817 repository.read(cx),
8818 "tracked-dir/ancestor-ignored-file1",
8819 None,
8820 false,
8821 );
8822 assert_entry_git_state(
8823 tree.read(cx),
8824 repository.read(cx),
8825 "ignored-dir/ignored-file1",
8826 None,
8827 true,
8828 );
8829 });
8830
8831 fs.create_file(
8832 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8833 Default::default(),
8834 )
8835 .await
8836 .unwrap();
8837 fs.set_index_for_repo(
8838 path!("/root/tree/.git").as_ref(),
8839 &[
8840 (".gitignore".into(), "ignored-dir\n".into()),
8841 ("tracked-dir/tracked-file1".into(), "".into()),
8842 ("tracked-dir/tracked-file2".into(), "".into()),
8843 ],
8844 );
8845 fs.create_file(
8846 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8847 Default::default(),
8848 )
8849 .await
8850 .unwrap();
8851 fs.create_file(
8852 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8853 Default::default(),
8854 )
8855 .await
8856 .unwrap();
8857
8858 cx.executor().run_until_parked();
8859 cx.read(|cx| {
8860 assert_entry_git_state(
8861 tree.read(cx),
8862 repository.read(cx),
8863 "tracked-dir/tracked-file2",
8864 Some(StatusCode::Added),
8865 false,
8866 );
8867 assert_entry_git_state(
8868 tree.read(cx),
8869 repository.read(cx),
8870 "tracked-dir/ancestor-ignored-file2",
8871 None,
8872 false,
8873 );
8874 assert_entry_git_state(
8875 tree.read(cx),
8876 repository.read(cx),
8877 "ignored-dir/ignored-file2",
8878 None,
8879 true,
8880 );
8881 assert!(tree.read(cx).entry_for_path(".git").unwrap().is_ignored);
8882 });
8883}
8884
8885#[gpui::test]
8886async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
8887 init_test(cx);
8888
8889 let fs = FakeFs::new(cx.executor());
8890 fs.insert_tree(
8891 path!("/project"),
8892 json!({
8893 ".git": {
8894 "worktrees": {
8895 "some-worktree": {
8896 "commondir": "../..\n",
8897 // For is_git_dir
8898 "HEAD": "",
8899 "config": ""
8900 }
8901 },
8902 "modules": {
8903 "subdir": {
8904 "some-submodule": {
8905 // For is_git_dir
8906 "HEAD": "",
8907 "config": "",
8908 }
8909 }
8910 }
8911 },
8912 "src": {
8913 "a.txt": "A",
8914 },
8915 "some-worktree": {
8916 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
8917 "src": {
8918 "b.txt": "B",
8919 }
8920 },
8921 "subdir": {
8922 "some-submodule": {
8923 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
8924 "c.txt": "C",
8925 }
8926 }
8927 }),
8928 )
8929 .await;
8930
8931 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
8932 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
8933 scan_complete.await;
8934
8935 let mut repositories = project.update(cx, |project, cx| {
8936 project
8937 .repositories(cx)
8938 .values()
8939 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8940 .collect::<Vec<_>>()
8941 });
8942 repositories.sort();
8943 pretty_assertions::assert_eq!(
8944 repositories,
8945 [
8946 Path::new(path!("/project")).into(),
8947 Path::new(path!("/project/some-worktree")).into(),
8948 Path::new(path!("/project/subdir/some-submodule")).into(),
8949 ]
8950 );
8951
8952 // Generate a git-related event for the worktree and check that it's refreshed.
8953 fs.with_git_state(
8954 path!("/project/some-worktree/.git").as_ref(),
8955 true,
8956 |state| {
8957 state
8958 .head_contents
8959 .insert("src/b.txt".into(), "b".to_owned());
8960 state
8961 .index_contents
8962 .insert("src/b.txt".into(), "b".to_owned());
8963 },
8964 )
8965 .unwrap();
8966 cx.run_until_parked();
8967
8968 let buffer = project
8969 .update(cx, |project, cx| {
8970 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
8971 })
8972 .await
8973 .unwrap();
8974 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
8975 let (repo, _) = project
8976 .git_store()
8977 .read(cx)
8978 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
8979 .unwrap();
8980 pretty_assertions::assert_eq!(
8981 repo.read(cx).work_directory_abs_path,
8982 Path::new(path!("/project/some-worktree")).into(),
8983 );
8984 let barrier = repo.update(cx, |repo, _| repo.barrier());
8985 (repo.clone(), barrier)
8986 });
8987 barrier.await.unwrap();
8988 worktree_repo.update(cx, |repo, _| {
8989 pretty_assertions::assert_eq!(
8990 repo.status_for_path(&"src/b.txt".into()).unwrap().status,
8991 StatusCode::Modified.worktree(),
8992 );
8993 });
8994
8995 // The same for the submodule.
8996 fs.with_git_state(
8997 path!("/project/subdir/some-submodule/.git").as_ref(),
8998 true,
8999 |state| {
9000 state.head_contents.insert("c.txt".into(), "c".to_owned());
9001 state.index_contents.insert("c.txt".into(), "c".to_owned());
9002 },
9003 )
9004 .unwrap();
9005 cx.run_until_parked();
9006
9007 let buffer = project
9008 .update(cx, |project, cx| {
9009 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9010 })
9011 .await
9012 .unwrap();
9013 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9014 let (repo, _) = project
9015 .git_store()
9016 .read(cx)
9017 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9018 .unwrap();
9019 pretty_assertions::assert_eq!(
9020 repo.read(cx).work_directory_abs_path,
9021 Path::new(path!("/project/subdir/some-submodule")).into(),
9022 );
9023 let barrier = repo.update(cx, |repo, _| repo.barrier());
9024 (repo.clone(), barrier)
9025 });
9026 barrier.await.unwrap();
9027 submodule_repo.update(cx, |repo, _| {
9028 pretty_assertions::assert_eq!(
9029 repo.status_for_path(&"c.txt".into()).unwrap().status,
9030 StatusCode::Modified.worktree(),
9031 );
9032 });
9033}
9034
9035#[gpui::test]
9036async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9037 init_test(cx);
9038 let fs = FakeFs::new(cx.background_executor.clone());
9039 fs.insert_tree(
9040 path!("/root"),
9041 json!({
9042 "project": {
9043 ".git": {},
9044 "child1": {
9045 "a.txt": "A",
9046 },
9047 "child2": {
9048 "b.txt": "B",
9049 }
9050 }
9051 }),
9052 )
9053 .await;
9054
9055 let project = Project::test(
9056 fs.clone(),
9057 [
9058 path!("/root/project/child1").as_ref(),
9059 path!("/root/project/child2").as_ref(),
9060 ],
9061 cx,
9062 )
9063 .await;
9064
9065 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9066 tree.flush_fs_events(cx).await;
9067 project
9068 .update(cx, |project, cx| project.git_scans_complete(cx))
9069 .await;
9070 cx.executor().run_until_parked();
9071
9072 let repos = project.read_with(cx, |project, cx| {
9073 project
9074 .repositories(cx)
9075 .values()
9076 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9077 .collect::<Vec<_>>()
9078 });
9079 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9080}
9081
9082async fn search(
9083 project: &Entity<Project>,
9084 query: SearchQuery,
9085 cx: &mut gpui::TestAppContext,
9086) -> Result<HashMap<String, Vec<Range<usize>>>> {
9087 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9088 let mut results = HashMap::default();
9089 while let Ok(search_result) = search_rx.recv().await {
9090 match search_result {
9091 SearchResult::Buffer { buffer, ranges } => {
9092 results.entry(buffer).or_insert(ranges);
9093 }
9094 SearchResult::LimitReached => {}
9095 }
9096 }
9097 Ok(results
9098 .into_iter()
9099 .map(|(buffer, ranges)| {
9100 buffer.update(cx, |buffer, cx| {
9101 let path = buffer
9102 .file()
9103 .unwrap()
9104 .full_path(cx)
9105 .to_string_lossy()
9106 .to_string();
9107 let ranges = ranges
9108 .into_iter()
9109 .map(|range| range.to_offset(buffer))
9110 .collect::<Vec<_>>();
9111 (path, ranges)
9112 })
9113 })
9114 .collect())
9115}
9116
9117pub fn init_test(cx: &mut gpui::TestAppContext) {
9118 zlog::init_test();
9119
9120 cx.update(|cx| {
9121 let settings_store = SettingsStore::test(cx);
9122 cx.set_global(settings_store);
9123 release_channel::init(SemanticVersion::default(), cx);
9124 language::init(cx);
9125 Project::init_settings(cx);
9126 });
9127}
9128
9129fn json_lang() -> Arc<Language> {
9130 Arc::new(Language::new(
9131 LanguageConfig {
9132 name: "JSON".into(),
9133 matcher: LanguageMatcher {
9134 path_suffixes: vec!["json".to_string()],
9135 ..Default::default()
9136 },
9137 ..Default::default()
9138 },
9139 None,
9140 ))
9141}
9142
9143fn js_lang() -> Arc<Language> {
9144 Arc::new(Language::new(
9145 LanguageConfig {
9146 name: "JavaScript".into(),
9147 matcher: LanguageMatcher {
9148 path_suffixes: vec!["js".to_string()],
9149 ..Default::default()
9150 },
9151 ..Default::default()
9152 },
9153 None,
9154 ))
9155}
9156
9157fn rust_lang() -> Arc<Language> {
9158 Arc::new(Language::new(
9159 LanguageConfig {
9160 name: "Rust".into(),
9161 matcher: LanguageMatcher {
9162 path_suffixes: vec!["rs".to_string()],
9163 ..Default::default()
9164 },
9165 ..Default::default()
9166 },
9167 Some(tree_sitter_rust::LANGUAGE.into()),
9168 ))
9169}
9170
9171fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9172 struct PythonMootToolchainLister(Arc<FakeFs>);
9173 #[async_trait]
9174 impl ToolchainLister for PythonMootToolchainLister {
9175 async fn list(
9176 &self,
9177 worktree_root: PathBuf,
9178 subroot_relative_path: Arc<Path>,
9179 _: Option<HashMap<String, String>>,
9180 ) -> ToolchainList {
9181 // This lister will always return a path .venv directories within ancestors
9182 let ancestors = subroot_relative_path
9183 .ancestors()
9184 .map(ToOwned::to_owned)
9185 .collect::<Vec<_>>();
9186 let mut toolchains = vec![];
9187 for ancestor in ancestors {
9188 let venv_path = worktree_root.join(ancestor).join(".venv");
9189 if self.0.is_dir(&venv_path).await {
9190 toolchains.push(Toolchain {
9191 name: SharedString::new("Python Venv"),
9192 path: venv_path.to_string_lossy().into_owned().into(),
9193 language_name: LanguageName(SharedString::new_static("Python")),
9194 as_json: serde_json::Value::Null,
9195 })
9196 }
9197 }
9198 ToolchainList {
9199 toolchains,
9200 ..Default::default()
9201 }
9202 }
9203 async fn resolve(
9204 &self,
9205 _: PathBuf,
9206 _: Option<HashMap<String, String>>,
9207 ) -> anyhow::Result<Toolchain> {
9208 Err(anyhow::anyhow!("Not implemented"))
9209 }
9210 fn meta(&self) -> ToolchainMetadata {
9211 ToolchainMetadata {
9212 term: SharedString::new_static("Virtual Environment"),
9213 new_toolchain_placeholder: SharedString::new_static(
9214 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9215 ),
9216 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9217 }
9218 }
9219 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9220 vec![]
9221 }
9222 }
9223 Arc::new(
9224 Language::new(
9225 LanguageConfig {
9226 name: "Python".into(),
9227 matcher: LanguageMatcher {
9228 path_suffixes: vec!["py".to_string()],
9229 ..Default::default()
9230 },
9231 ..Default::default()
9232 },
9233 None, // We're not testing Python parsing with this language.
9234 )
9235 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9236 "pyproject.toml",
9237 ))))
9238 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9239 )
9240}
9241
9242fn typescript_lang() -> Arc<Language> {
9243 Arc::new(Language::new(
9244 LanguageConfig {
9245 name: "TypeScript".into(),
9246 matcher: LanguageMatcher {
9247 path_suffixes: vec!["ts".to_string()],
9248 ..Default::default()
9249 },
9250 ..Default::default()
9251 },
9252 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9253 ))
9254}
9255
9256fn tsx_lang() -> Arc<Language> {
9257 Arc::new(Language::new(
9258 LanguageConfig {
9259 name: "tsx".into(),
9260 matcher: LanguageMatcher {
9261 path_suffixes: vec!["tsx".to_string()],
9262 ..Default::default()
9263 },
9264 ..Default::default()
9265 },
9266 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9267 ))
9268}
9269
9270fn get_all_tasks(
9271 project: &Entity<Project>,
9272 task_contexts: Arc<TaskContexts>,
9273 cx: &mut App,
9274) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9275 let new_tasks = project.update(cx, |project, cx| {
9276 project.task_store.update(cx, |task_store, cx| {
9277 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9278 this.used_and_current_resolved_tasks(task_contexts, cx)
9279 })
9280 })
9281 });
9282
9283 cx.background_spawn(async move {
9284 let (mut old, new) = new_tasks.await;
9285 old.extend(new);
9286 old
9287 })
9288}
9289
9290#[track_caller]
9291fn assert_entry_git_state(
9292 tree: &Worktree,
9293 repository: &Repository,
9294 path: &str,
9295 index_status: Option<StatusCode>,
9296 is_ignored: bool,
9297) {
9298 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9299 let entry = tree
9300 .entry_for_path(path)
9301 .unwrap_or_else(|| panic!("entry {path} not found"));
9302 let status = repository
9303 .status_for_path(&path.into())
9304 .map(|entry| entry.status);
9305 let expected = index_status.map(|index_status| {
9306 TrackedStatus {
9307 index_status,
9308 worktree_status: StatusCode::Unmodified,
9309 }
9310 .into()
9311 });
9312 assert_eq!(
9313 status, expected,
9314 "expected {path} to have git status: {expected:?}"
9315 );
9316 assert_eq!(
9317 entry.is_ignored, is_ignored,
9318 "expected {path} to have is_ignored: {is_ignored}"
9319 );
9320}
9321
9322#[track_caller]
9323fn git_init(path: &Path) -> git2::Repository {
9324 let mut init_opts = RepositoryInitOptions::new();
9325 init_opts.initial_head("main");
9326 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9327}
9328
9329#[track_caller]
9330fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9331 let path = path.as_ref();
9332 let mut index = repo.index().expect("Failed to get index");
9333 index.add_path(path).expect("Failed to add file");
9334 index.write().expect("Failed to write index");
9335}
9336
9337#[track_caller]
9338fn git_remove_index(path: &Path, repo: &git2::Repository) {
9339 let mut index = repo.index().expect("Failed to get index");
9340 index.remove_path(path).expect("Failed to add file");
9341 index.write().expect("Failed to write index");
9342}
9343
9344#[track_caller]
9345fn git_commit(msg: &'static str, repo: &git2::Repository) {
9346 use git2::Signature;
9347
9348 let signature = Signature::now("test", "test@zed.dev").unwrap();
9349 let oid = repo.index().unwrap().write_tree().unwrap();
9350 let tree = repo.find_tree(oid).unwrap();
9351 if let Ok(head) = repo.head() {
9352 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9353
9354 let parent_commit = parent_obj.as_commit().unwrap();
9355
9356 repo.commit(
9357 Some("HEAD"),
9358 &signature,
9359 &signature,
9360 msg,
9361 &tree,
9362 &[parent_commit],
9363 )
9364 .expect("Failed to commit with parent");
9365 } else {
9366 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9367 .expect("Failed to commit");
9368 }
9369}
9370
9371#[cfg(any())]
9372#[track_caller]
9373fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9374 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9375}
9376
9377#[track_caller]
9378fn git_stash(repo: &mut git2::Repository) {
9379 use git2::Signature;
9380
9381 let signature = Signature::now("test", "test@zed.dev").unwrap();
9382 repo.stash_save(&signature, "N/A", None)
9383 .expect("Failed to stash");
9384}
9385
9386#[track_caller]
9387fn git_reset(offset: usize, repo: &git2::Repository) {
9388 let head = repo.head().expect("Couldn't get repo head");
9389 let object = head.peel(git2::ObjectType::Commit).unwrap();
9390 let commit = object.as_commit().unwrap();
9391 let new_head = commit
9392 .parents()
9393 .inspect(|parnet| {
9394 parnet.message();
9395 })
9396 .nth(offset)
9397 .expect("Not enough history");
9398 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9399 .expect("Could not reset");
9400}
9401
9402#[cfg(any())]
9403#[track_caller]
9404fn git_branch(name: &str, repo: &git2::Repository) {
9405 let head = repo
9406 .head()
9407 .expect("Couldn't get repo head")
9408 .peel_to_commit()
9409 .expect("HEAD is not a commit");
9410 repo.branch(name, &head, false).expect("Failed to commit");
9411}
9412
9413#[cfg(any())]
9414#[track_caller]
9415fn git_checkout(name: &str, repo: &git2::Repository) {
9416 repo.set_head(name).expect("Failed to set head");
9417 repo.checkout_head(None).expect("Failed to check out head");
9418}
9419
9420#[cfg(any())]
9421#[track_caller]
9422fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9423 repo.statuses(None)
9424 .unwrap()
9425 .iter()
9426 .map(|status| (status.path().unwrap().to_string(), status.status()))
9427 .collect()
9428}
9429
9430#[gpui::test]
9431async fn test_find_project_path_abs(
9432 background_executor: BackgroundExecutor,
9433 cx: &mut gpui::TestAppContext,
9434) {
9435 // find_project_path should work with absolute paths
9436 init_test(cx);
9437
9438 let fs = FakeFs::new(background_executor);
9439 fs.insert_tree(
9440 path!("/root"),
9441 json!({
9442 "project1": {
9443 "file1.txt": "content1",
9444 "subdir": {
9445 "file2.txt": "content2"
9446 }
9447 },
9448 "project2": {
9449 "file3.txt": "content3"
9450 }
9451 }),
9452 )
9453 .await;
9454
9455 let project = Project::test(
9456 fs.clone(),
9457 [
9458 path!("/root/project1").as_ref(),
9459 path!("/root/project2").as_ref(),
9460 ],
9461 cx,
9462 )
9463 .await;
9464
9465 // Make sure the worktrees are fully initialized
9466 project
9467 .update(cx, |project, cx| project.git_scans_complete(cx))
9468 .await;
9469 cx.run_until_parked();
9470
9471 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9472 project.read_with(cx, |project, cx| {
9473 let worktrees: Vec<_> = project.worktrees(cx).collect();
9474 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9475 let id1 = worktrees[0].read(cx).id();
9476 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9477 let id2 = worktrees[1].read(cx).id();
9478 (abs_path1, id1, abs_path2, id2)
9479 });
9480
9481 project.update(cx, |project, cx| {
9482 let abs_path = project1_abs_path.join("file1.txt");
9483 let found_path = project.find_project_path(abs_path, cx).unwrap();
9484 assert_eq!(found_path.worktree_id, project1_id);
9485 assert_eq!(found_path.path.as_ref(), Path::new("file1.txt"));
9486
9487 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9488 let found_path = project.find_project_path(abs_path, cx).unwrap();
9489 assert_eq!(found_path.worktree_id, project1_id);
9490 assert_eq!(found_path.path.as_ref(), Path::new("subdir/file2.txt"));
9491
9492 let abs_path = project2_abs_path.join("file3.txt");
9493 let found_path = project.find_project_path(abs_path, cx).unwrap();
9494 assert_eq!(found_path.worktree_id, project2_id);
9495 assert_eq!(found_path.path.as_ref(), Path::new("file3.txt"));
9496
9497 let abs_path = project1_abs_path.join("nonexistent.txt");
9498 let found_path = project.find_project_path(abs_path, cx);
9499 assert!(
9500 found_path.is_some(),
9501 "Should find project path for nonexistent file in worktree"
9502 );
9503
9504 // Test with an absolute path outside any worktree
9505 let abs_path = Path::new("/some/other/path");
9506 let found_path = project.find_project_path(abs_path, cx);
9507 assert!(
9508 found_path.is_none(),
9509 "Should not find project path for path outside any worktree"
9510 );
9511 });
9512}