1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event, git_store::StatusEntry, task_inventory::TaskContexts, task_store::TaskSettingsLocation,
5 *,
6};
7use async_trait::async_trait;
8use buffer_diff::{
9 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
10 DiffHunkStatusKind, assert_hunks,
11};
12use fs::FakeFs;
13use futures::{StreamExt, future};
14use git::{
15 GitHostingProviderRegistry,
16 repository::{RepoPath, repo_path},
17 status::{StatusCode, TrackedStatus},
18};
19use git2::RepositoryInitOptions;
20use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
21use itertools::Itertools;
22use language::{
23 Diagnostic, DiagnosticEntry, DiagnosticSet, DiagnosticSourceKind, DiskState, FakeLspAdapter,
24 LanguageConfig, LanguageMatcher, LanguageName, LineEnding, ManifestName, ManifestProvider,
25 ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList, ToolchainLister,
26 language_settings::{LanguageSettingsContent, language_settings},
27 tree_sitter_rust, tree_sitter_typescript,
28};
29use lsp::{
30 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
31 Uri, WillRenameFiles, notification::DidRenameFiles,
32};
33use parking_lot::Mutex;
34use paths::{config_dir, global_gitignore_path, tasks_file};
35use postage::stream::Stream as _;
36use pretty_assertions::{assert_eq, assert_matches};
37use rand::{Rng as _, rngs::StdRng};
38use serde_json::json;
39#[cfg(not(windows))]
40use std::os;
41use std::{env, mem, num::NonZeroU32, ops::Range, str::FromStr, sync::OnceLock, task::Poll};
42use task::{ResolvedTask, ShellKind, TaskContext};
43use unindent::Unindent as _;
44use util::{
45 TryFutureExt as _, assert_set_eq, maybe, path,
46 paths::PathMatcher,
47 rel_path::rel_path,
48 test::{TempTree, marked_text_offsets},
49 uri,
50};
51use worktree::WorktreeModelHandle as _;
52
53#[gpui::test]
54async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
55 cx.executor().allow_parking();
56
57 let (tx, mut rx) = futures::channel::mpsc::unbounded();
58 let _thread = std::thread::spawn(move || {
59 #[cfg(not(target_os = "windows"))]
60 std::fs::metadata("/tmp").unwrap();
61 #[cfg(target_os = "windows")]
62 std::fs::metadata("C:/Windows").unwrap();
63 std::thread::sleep(Duration::from_millis(1000));
64 tx.unbounded_send(1).unwrap();
65 });
66 rx.next().await.unwrap();
67}
68
69#[gpui::test]
70async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
71 cx.executor().allow_parking();
72
73 let io_task = smol::unblock(move || {
74 println!("sleeping on thread {:?}", std::thread::current().id());
75 std::thread::sleep(Duration::from_millis(10));
76 1
77 });
78
79 let task = cx.foreground_executor().spawn(async move {
80 io_task.await;
81 });
82
83 task.await;
84}
85
86#[cfg(not(windows))]
87#[gpui::test]
88async fn test_symlinks(cx: &mut gpui::TestAppContext) {
89 init_test(cx);
90 cx.executor().allow_parking();
91
92 let dir = TempTree::new(json!({
93 "root": {
94 "apple": "",
95 "banana": {
96 "carrot": {
97 "date": "",
98 "endive": "",
99 }
100 },
101 "fennel": {
102 "grape": "",
103 }
104 }
105 }));
106
107 let root_link_path = dir.path().join("root_link");
108 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
109 os::unix::fs::symlink(
110 dir.path().join("root/fennel"),
111 dir.path().join("root/finnochio"),
112 )
113 .unwrap();
114
115 let project = Project::test(
116 Arc::new(RealFs::new(None, cx.executor())),
117 [root_link_path.as_ref()],
118 cx,
119 )
120 .await;
121
122 project.update(cx, |project, cx| {
123 let tree = project.worktrees(cx).next().unwrap().read(cx);
124 assert_eq!(tree.file_count(), 5);
125 assert_eq!(
126 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
127 tree.entry_for_path(rel_path("finnochio/grape"))
128 .unwrap()
129 .inode
130 );
131 });
132}
133
134#[gpui::test]
135async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
136 init_test(cx);
137
138 let dir = TempTree::new(json!({
139 ".editorconfig": r#"
140 root = true
141 [*.rs]
142 indent_style = tab
143 indent_size = 3
144 end_of_line = lf
145 insert_final_newline = true
146 trim_trailing_whitespace = true
147 max_line_length = 120
148 [*.js]
149 tab_width = 10
150 max_line_length = off
151 "#,
152 ".zed": {
153 "settings.json": r#"{
154 "tab_size": 8,
155 "hard_tabs": false,
156 "ensure_final_newline_on_save": false,
157 "remove_trailing_whitespace_on_save": false,
158 "preferred_line_length": 64,
159 "soft_wrap": "editor_width",
160 }"#,
161 },
162 "a.rs": "fn a() {\n A\n}",
163 "b": {
164 ".editorconfig": r#"
165 [*.rs]
166 indent_size = 2
167 max_line_length = off,
168 "#,
169 "b.rs": "fn b() {\n B\n}",
170 },
171 "c.js": "def c\n C\nend",
172 "README.json": "tabs are better\n",
173 }));
174
175 let path = dir.path();
176 let fs = FakeFs::new(cx.executor());
177 fs.insert_tree_from_real_fs(path, path).await;
178 let project = Project::test(fs, [path], cx).await;
179
180 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
181 language_registry.add(js_lang());
182 language_registry.add(json_lang());
183 language_registry.add(rust_lang());
184
185 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
186
187 cx.executor().run_until_parked();
188
189 cx.update(|cx| {
190 let tree = worktree.read(cx);
191 let settings_for = |path: &str| {
192 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
193 let file = File::for_entry(file_entry, worktree.clone());
194 let file_language = project
195 .read(cx)
196 .languages()
197 .load_language_for_file_path(file.path.as_std_path());
198 let file_language = cx
199 .background_executor()
200 .block(file_language)
201 .expect("Failed to get file language");
202 let file = file as _;
203 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
204 };
205
206 let settings_a = settings_for("a.rs");
207 let settings_b = settings_for("b/b.rs");
208 let settings_c = settings_for("c.js");
209 let settings_readme = settings_for("README.json");
210
211 // .editorconfig overrides .zed/settings
212 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
213 assert_eq!(settings_a.hard_tabs, true);
214 assert_eq!(settings_a.ensure_final_newline_on_save, true);
215 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
216 assert_eq!(settings_a.preferred_line_length, 120);
217
218 // .editorconfig in b/ overrides .editorconfig in root
219 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
220
221 // "indent_size" is not set, so "tab_width" is used
222 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
223
224 // When max_line_length is "off", default to .zed/settings.json
225 assert_eq!(settings_b.preferred_line_length, 64);
226 assert_eq!(settings_c.preferred_line_length, 64);
227
228 // README.md should not be affected by .editorconfig's globe "*.rs"
229 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
230 });
231}
232
233#[gpui::test]
234async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
235 init_test(cx);
236 cx.update(|cx| {
237 GitHostingProviderRegistry::default_global(cx);
238 git_hosting_providers::init(cx);
239 });
240
241 let fs = FakeFs::new(cx.executor());
242 let str_path = path!("/dir");
243 let path = Path::new(str_path);
244
245 fs.insert_tree(
246 path!("/dir"),
247 json!({
248 ".zed": {
249 "settings.json": r#"{
250 "git_hosting_providers": [
251 {
252 "provider": "gitlab",
253 "base_url": "https://google.com",
254 "name": "foo"
255 }
256 ]
257 }"#
258 },
259 }),
260 )
261 .await;
262
263 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
264 let (_worktree, _) =
265 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
266 cx.executor().run_until_parked();
267
268 cx.update(|cx| {
269 let provider = GitHostingProviderRegistry::global(cx);
270 assert!(
271 provider
272 .list_hosting_providers()
273 .into_iter()
274 .any(|provider| provider.name() == "foo")
275 );
276 });
277
278 fs.atomic_write(
279 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
280 "{}".into(),
281 )
282 .await
283 .unwrap();
284
285 cx.run_until_parked();
286
287 cx.update(|cx| {
288 let provider = GitHostingProviderRegistry::global(cx);
289 assert!(
290 !provider
291 .list_hosting_providers()
292 .into_iter()
293 .any(|provider| provider.name() == "foo")
294 );
295 });
296}
297
298#[gpui::test]
299async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
300 init_test(cx);
301 TaskStore::init(None);
302
303 let fs = FakeFs::new(cx.executor());
304 fs.insert_tree(
305 path!("/dir"),
306 json!({
307 ".zed": {
308 "settings.json": r#"{ "tab_size": 8 }"#,
309 "tasks.json": r#"[{
310 "label": "cargo check all",
311 "command": "cargo",
312 "args": ["check", "--all"]
313 },]"#,
314 },
315 "a": {
316 "a.rs": "fn a() {\n A\n}"
317 },
318 "b": {
319 ".zed": {
320 "settings.json": r#"{ "tab_size": 2 }"#,
321 "tasks.json": r#"[{
322 "label": "cargo check",
323 "command": "cargo",
324 "args": ["check"]
325 },]"#,
326 },
327 "b.rs": "fn b() {\n B\n}"
328 }
329 }),
330 )
331 .await;
332
333 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
334 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
335
336 cx.executor().run_until_parked();
337 let worktree_id = cx.update(|cx| {
338 project.update(cx, |project, cx| {
339 project.worktrees(cx).next().unwrap().read(cx).id()
340 })
341 });
342
343 let mut task_contexts = TaskContexts::default();
344 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
345 let task_contexts = Arc::new(task_contexts);
346
347 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
348 id: worktree_id,
349 directory_in_worktree: rel_path(".zed").into(),
350 id_base: "local worktree tasks from directory \".zed\"".into(),
351 };
352
353 let all_tasks = cx
354 .update(|cx| {
355 let tree = worktree.read(cx);
356
357 let file_a = File::for_entry(
358 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
359 worktree.clone(),
360 ) as _;
361 let settings_a = language_settings(None, Some(&file_a), cx);
362 let file_b = File::for_entry(
363 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
364 worktree.clone(),
365 ) as _;
366 let settings_b = language_settings(None, Some(&file_b), cx);
367
368 assert_eq!(settings_a.tab_size.get(), 8);
369 assert_eq!(settings_b.tab_size.get(), 2);
370
371 get_all_tasks(&project, task_contexts.clone(), cx)
372 })
373 .await
374 .into_iter()
375 .map(|(source_kind, task)| {
376 let resolved = task.resolved;
377 (
378 source_kind,
379 task.resolved_label,
380 resolved.args,
381 resolved.env,
382 )
383 })
384 .collect::<Vec<_>>();
385 assert_eq!(
386 all_tasks,
387 vec![
388 (
389 TaskSourceKind::Worktree {
390 id: worktree_id,
391 directory_in_worktree: rel_path("b/.zed").into(),
392 id_base: "local worktree tasks from directory \"b/.zed\"".into()
393 },
394 "cargo check".to_string(),
395 vec!["check".to_string()],
396 HashMap::default(),
397 ),
398 (
399 topmost_local_task_source_kind.clone(),
400 "cargo check all".to_string(),
401 vec!["check".to_string(), "--all".to_string()],
402 HashMap::default(),
403 ),
404 ]
405 );
406
407 let (_, resolved_task) = cx
408 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
409 .await
410 .into_iter()
411 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
412 .expect("should have one global task");
413 project.update(cx, |project, cx| {
414 let task_inventory = project
415 .task_store
416 .read(cx)
417 .task_inventory()
418 .cloned()
419 .unwrap();
420 task_inventory.update(cx, |inventory, _| {
421 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
422 inventory
423 .update_file_based_tasks(
424 TaskSettingsLocation::Global(tasks_file()),
425 Some(
426 &json!([{
427 "label": "cargo check unstable",
428 "command": "cargo",
429 "args": [
430 "check",
431 "--all",
432 "--all-targets"
433 ],
434 "env": {
435 "RUSTFLAGS": "-Zunstable-options"
436 }
437 }])
438 .to_string(),
439 ),
440 )
441 .unwrap();
442 });
443 });
444 cx.run_until_parked();
445
446 let all_tasks = cx
447 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
448 .await
449 .into_iter()
450 .map(|(source_kind, task)| {
451 let resolved = task.resolved;
452 (
453 source_kind,
454 task.resolved_label,
455 resolved.args,
456 resolved.env,
457 )
458 })
459 .collect::<Vec<_>>();
460 assert_eq!(
461 all_tasks,
462 vec![
463 (
464 topmost_local_task_source_kind.clone(),
465 "cargo check all".to_string(),
466 vec!["check".to_string(), "--all".to_string()],
467 HashMap::default(),
468 ),
469 (
470 TaskSourceKind::Worktree {
471 id: worktree_id,
472 directory_in_worktree: rel_path("b/.zed").into(),
473 id_base: "local worktree tasks from directory \"b/.zed\"".into()
474 },
475 "cargo check".to_string(),
476 vec!["check".to_string()],
477 HashMap::default(),
478 ),
479 (
480 TaskSourceKind::AbsPath {
481 abs_path: paths::tasks_file().clone(),
482 id_base: "global tasks.json".into(),
483 },
484 "cargo check unstable".to_string(),
485 vec![
486 "check".to_string(),
487 "--all".to_string(),
488 "--all-targets".to_string(),
489 ],
490 HashMap::from_iter(Some((
491 "RUSTFLAGS".to_string(),
492 "-Zunstable-options".to_string()
493 ))),
494 ),
495 ]
496 );
497}
498
499#[gpui::test]
500async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
501 init_test(cx);
502 TaskStore::init(None);
503
504 let fs = FakeFs::new(cx.executor());
505 fs.insert_tree(
506 path!("/dir"),
507 json!({
508 ".zed": {
509 "tasks.json": r#"[{
510 "label": "test worktree root",
511 "command": "echo $ZED_WORKTREE_ROOT"
512 }]"#,
513 },
514 "a": {
515 "a.rs": "fn a() {\n A\n}"
516 },
517 }),
518 )
519 .await;
520
521 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
522 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
523
524 cx.executor().run_until_parked();
525 let worktree_id = cx.update(|cx| {
526 project.update(cx, |project, cx| {
527 project.worktrees(cx).next().unwrap().read(cx).id()
528 })
529 });
530
531 let active_non_worktree_item_tasks = cx
532 .update(|cx| {
533 get_all_tasks(
534 &project,
535 Arc::new(TaskContexts {
536 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
537 active_worktree_context: None,
538 other_worktree_contexts: Vec::new(),
539 lsp_task_sources: HashMap::default(),
540 latest_selection: None,
541 }),
542 cx,
543 )
544 })
545 .await;
546 assert!(
547 active_non_worktree_item_tasks.is_empty(),
548 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
549 );
550
551 let active_worktree_tasks = cx
552 .update(|cx| {
553 get_all_tasks(
554 &project,
555 Arc::new(TaskContexts {
556 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
557 active_worktree_context: Some((worktree_id, {
558 let mut worktree_context = TaskContext::default();
559 worktree_context
560 .task_variables
561 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
562 worktree_context
563 })),
564 other_worktree_contexts: Vec::new(),
565 lsp_task_sources: HashMap::default(),
566 latest_selection: None,
567 }),
568 cx,
569 )
570 })
571 .await;
572 assert_eq!(
573 active_worktree_tasks
574 .into_iter()
575 .map(|(source_kind, task)| {
576 let resolved = task.resolved;
577 (source_kind, resolved.command.unwrap())
578 })
579 .collect::<Vec<_>>(),
580 vec![(
581 TaskSourceKind::Worktree {
582 id: worktree_id,
583 directory_in_worktree: rel_path(".zed").into(),
584 id_base: "local worktree tasks from directory \".zed\"".into(),
585 },
586 "echo /dir".to_string(),
587 )]
588 );
589}
590
591#[gpui::test]
592async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
593 cx: &mut gpui::TestAppContext,
594) {
595 pub(crate) struct PyprojectTomlManifestProvider;
596
597 impl ManifestProvider for PyprojectTomlManifestProvider {
598 fn name(&self) -> ManifestName {
599 SharedString::new_static("pyproject.toml").into()
600 }
601
602 fn search(
603 &self,
604 ManifestQuery {
605 path,
606 depth,
607 delegate,
608 }: ManifestQuery,
609 ) -> Option<Arc<RelPath>> {
610 for path in path.ancestors().take(depth) {
611 let p = path.join(rel_path("pyproject.toml"));
612 if delegate.exists(&p, Some(false)) {
613 return Some(path.into());
614 }
615 }
616
617 None
618 }
619 }
620
621 init_test(cx);
622 let fs = FakeFs::new(cx.executor());
623
624 fs.insert_tree(
625 path!("/the-root"),
626 json!({
627 ".zed": {
628 "settings.json": r#"
629 {
630 "languages": {
631 "Python": {
632 "language_servers": ["ty"]
633 }
634 }
635 }"#
636 },
637 "project-a": {
638 ".venv": {},
639 "file.py": "",
640 "pyproject.toml": ""
641 },
642 "project-b": {
643 ".venv": {},
644 "source_file.py":"",
645 "another_file.py": "",
646 "pyproject.toml": ""
647 }
648 }),
649 )
650 .await;
651 cx.update(|cx| {
652 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
653 });
654
655 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
656 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
657 let _fake_python_server = language_registry.register_fake_lsp(
658 "Python",
659 FakeLspAdapter {
660 name: "ty",
661 capabilities: lsp::ServerCapabilities {
662 ..Default::default()
663 },
664 ..Default::default()
665 },
666 );
667
668 language_registry.add(python_lang(fs.clone()));
669 let (first_buffer, _handle) = project
670 .update(cx, |project, cx| {
671 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
672 })
673 .await
674 .unwrap();
675 cx.executor().run_until_parked();
676 let servers = project.update(cx, |project, cx| {
677 project.lsp_store.update(cx, |this, cx| {
678 first_buffer.update(cx, |buffer, cx| {
679 this.language_servers_for_local_buffer(buffer, cx)
680 .map(|(adapter, server)| (adapter.clone(), server.clone()))
681 .collect::<Vec<_>>()
682 })
683 })
684 });
685 cx.executor().run_until_parked();
686 assert_eq!(servers.len(), 1);
687 let (adapter, server) = servers.into_iter().next().unwrap();
688 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
689 assert_eq!(server.server_id(), LanguageServerId(0));
690 // `workspace_folders` are set to the rooting point.
691 assert_eq!(
692 server.workspace_folders(),
693 BTreeSet::from_iter(
694 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
695 )
696 );
697
698 let (second_project_buffer, _other_handle) = project
699 .update(cx, |project, cx| {
700 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
701 })
702 .await
703 .unwrap();
704 cx.executor().run_until_parked();
705 let servers = project.update(cx, |project, cx| {
706 project.lsp_store.update(cx, |this, cx| {
707 second_project_buffer.update(cx, |buffer, cx| {
708 this.language_servers_for_local_buffer(buffer, cx)
709 .map(|(adapter, server)| (adapter.clone(), server.clone()))
710 .collect::<Vec<_>>()
711 })
712 })
713 });
714 cx.executor().run_until_parked();
715 assert_eq!(servers.len(), 1);
716 let (adapter, server) = servers.into_iter().next().unwrap();
717 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
718 // We're not using venvs at all here, so both folders should fall under the same root.
719 assert_eq!(server.server_id(), LanguageServerId(0));
720 // Now, let's select a different toolchain for one of subprojects.
721
722 let Toolchains {
723 toolchains: available_toolchains_for_b,
724 root_path,
725 ..
726 } = project
727 .update(cx, |this, cx| {
728 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
729 this.available_toolchains(
730 ProjectPath {
731 worktree_id,
732 path: rel_path("project-b/source_file.py").into(),
733 },
734 LanguageName::new("Python"),
735 cx,
736 )
737 })
738 .await
739 .expect("A toolchain to be discovered");
740 assert_eq!(root_path.as_ref(), rel_path("project-b"));
741 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
742 let currently_active_toolchain = project
743 .update(cx, |this, cx| {
744 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
745 this.active_toolchain(
746 ProjectPath {
747 worktree_id,
748 path: rel_path("project-b/source_file.py").into(),
749 },
750 LanguageName::new("Python"),
751 cx,
752 )
753 })
754 .await;
755
756 assert!(currently_active_toolchain.is_none());
757 let _ = project
758 .update(cx, |this, cx| {
759 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
760 this.activate_toolchain(
761 ProjectPath {
762 worktree_id,
763 path: root_path,
764 },
765 available_toolchains_for_b
766 .toolchains
767 .into_iter()
768 .next()
769 .unwrap(),
770 cx,
771 )
772 })
773 .await
774 .unwrap();
775 cx.run_until_parked();
776 let servers = project.update(cx, |project, cx| {
777 project.lsp_store.update(cx, |this, cx| {
778 second_project_buffer.update(cx, |buffer, cx| {
779 this.language_servers_for_local_buffer(buffer, cx)
780 .map(|(adapter, server)| (adapter.clone(), server.clone()))
781 .collect::<Vec<_>>()
782 })
783 })
784 });
785 cx.executor().run_until_parked();
786 assert_eq!(servers.len(), 1);
787 let (adapter, server) = servers.into_iter().next().unwrap();
788 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
789 // There's a new language server in town.
790 assert_eq!(server.server_id(), LanguageServerId(1));
791}
792
793#[gpui::test]
794async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
795 init_test(cx);
796
797 let fs = FakeFs::new(cx.executor());
798 fs.insert_tree(
799 path!("/dir"),
800 json!({
801 "test.rs": "const A: i32 = 1;",
802 "test2.rs": "",
803 "Cargo.toml": "a = 1",
804 "package.json": "{\"a\": 1}",
805 }),
806 )
807 .await;
808
809 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
810 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
811
812 let mut fake_rust_servers = language_registry.register_fake_lsp(
813 "Rust",
814 FakeLspAdapter {
815 name: "the-rust-language-server",
816 capabilities: lsp::ServerCapabilities {
817 completion_provider: Some(lsp::CompletionOptions {
818 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
819 ..Default::default()
820 }),
821 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
822 lsp::TextDocumentSyncOptions {
823 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
824 ..Default::default()
825 },
826 )),
827 ..Default::default()
828 },
829 ..Default::default()
830 },
831 );
832 let mut fake_json_servers = language_registry.register_fake_lsp(
833 "JSON",
834 FakeLspAdapter {
835 name: "the-json-language-server",
836 capabilities: lsp::ServerCapabilities {
837 completion_provider: Some(lsp::CompletionOptions {
838 trigger_characters: Some(vec![":".to_string()]),
839 ..Default::default()
840 }),
841 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
842 lsp::TextDocumentSyncOptions {
843 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
844 ..Default::default()
845 },
846 )),
847 ..Default::default()
848 },
849 ..Default::default()
850 },
851 );
852
853 // Open a buffer without an associated language server.
854 let (toml_buffer, _handle) = project
855 .update(cx, |project, cx| {
856 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
857 })
858 .await
859 .unwrap();
860
861 // Open a buffer with an associated language server before the language for it has been loaded.
862 let (rust_buffer, _handle2) = project
863 .update(cx, |project, cx| {
864 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
865 })
866 .await
867 .unwrap();
868 rust_buffer.update(cx, |buffer, _| {
869 assert_eq!(buffer.language().map(|l| l.name()), None);
870 });
871
872 // Now we add the languages to the project, and ensure they get assigned to all
873 // the relevant open buffers.
874 language_registry.add(json_lang());
875 language_registry.add(rust_lang());
876 cx.executor().run_until_parked();
877 rust_buffer.update(cx, |buffer, _| {
878 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
879 });
880
881 // A server is started up, and it is notified about Rust files.
882 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
883 assert_eq!(
884 fake_rust_server
885 .receive_notification::<lsp::notification::DidOpenTextDocument>()
886 .await
887 .text_document,
888 lsp::TextDocumentItem {
889 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
890 version: 0,
891 text: "const A: i32 = 1;".to_string(),
892 language_id: "rust".to_string(),
893 }
894 );
895
896 // The buffer is configured based on the language server's capabilities.
897 rust_buffer.update(cx, |buffer, _| {
898 assert_eq!(
899 buffer
900 .completion_triggers()
901 .iter()
902 .cloned()
903 .collect::<Vec<_>>(),
904 &[".".to_string(), "::".to_string()]
905 );
906 });
907 toml_buffer.update(cx, |buffer, _| {
908 assert!(buffer.completion_triggers().is_empty());
909 });
910
911 // Edit a buffer. The changes are reported to the language server.
912 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
913 assert_eq!(
914 fake_rust_server
915 .receive_notification::<lsp::notification::DidChangeTextDocument>()
916 .await
917 .text_document,
918 lsp::VersionedTextDocumentIdentifier::new(
919 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
920 1
921 )
922 );
923
924 // Open a third buffer with a different associated language server.
925 let (json_buffer, _json_handle) = project
926 .update(cx, |project, cx| {
927 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
928 })
929 .await
930 .unwrap();
931
932 // A json language server is started up and is only notified about the json buffer.
933 let mut fake_json_server = fake_json_servers.next().await.unwrap();
934 assert_eq!(
935 fake_json_server
936 .receive_notification::<lsp::notification::DidOpenTextDocument>()
937 .await
938 .text_document,
939 lsp::TextDocumentItem {
940 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
941 version: 0,
942 text: "{\"a\": 1}".to_string(),
943 language_id: "json".to_string(),
944 }
945 );
946
947 // This buffer is configured based on the second language server's
948 // capabilities.
949 json_buffer.update(cx, |buffer, _| {
950 assert_eq!(
951 buffer
952 .completion_triggers()
953 .iter()
954 .cloned()
955 .collect::<Vec<_>>(),
956 &[":".to_string()]
957 );
958 });
959
960 // When opening another buffer whose language server is already running,
961 // it is also configured based on the existing language server's capabilities.
962 let (rust_buffer2, _handle4) = project
963 .update(cx, |project, cx| {
964 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
965 })
966 .await
967 .unwrap();
968 rust_buffer2.update(cx, |buffer, _| {
969 assert_eq!(
970 buffer
971 .completion_triggers()
972 .iter()
973 .cloned()
974 .collect::<Vec<_>>(),
975 &[".".to_string(), "::".to_string()]
976 );
977 });
978
979 // Changes are reported only to servers matching the buffer's language.
980 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
981 rust_buffer2.update(cx, |buffer, cx| {
982 buffer.edit([(0..0, "let x = 1;")], None, cx)
983 });
984 assert_eq!(
985 fake_rust_server
986 .receive_notification::<lsp::notification::DidChangeTextDocument>()
987 .await
988 .text_document,
989 lsp::VersionedTextDocumentIdentifier::new(
990 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
991 1
992 )
993 );
994
995 // Save notifications are reported to all servers.
996 project
997 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
998 .await
999 .unwrap();
1000 assert_eq!(
1001 fake_rust_server
1002 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1003 .await
1004 .text_document,
1005 lsp::TextDocumentIdentifier::new(
1006 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1007 )
1008 );
1009 assert_eq!(
1010 fake_json_server
1011 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1012 .await
1013 .text_document,
1014 lsp::TextDocumentIdentifier::new(
1015 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1016 )
1017 );
1018
1019 // Renames are reported only to servers matching the buffer's language.
1020 fs.rename(
1021 Path::new(path!("/dir/test2.rs")),
1022 Path::new(path!("/dir/test3.rs")),
1023 Default::default(),
1024 )
1025 .await
1026 .unwrap();
1027 assert_eq!(
1028 fake_rust_server
1029 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1030 .await
1031 .text_document,
1032 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1033 );
1034 assert_eq!(
1035 fake_rust_server
1036 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1037 .await
1038 .text_document,
1039 lsp::TextDocumentItem {
1040 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1041 version: 0,
1042 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1043 language_id: "rust".to_string(),
1044 },
1045 );
1046
1047 rust_buffer2.update(cx, |buffer, cx| {
1048 buffer.update_diagnostics(
1049 LanguageServerId(0),
1050 DiagnosticSet::from_sorted_entries(
1051 vec![DiagnosticEntry {
1052 diagnostic: Default::default(),
1053 range: Anchor::MIN..Anchor::MAX,
1054 }],
1055 &buffer.snapshot(),
1056 ),
1057 cx,
1058 );
1059 assert_eq!(
1060 buffer
1061 .snapshot()
1062 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1063 .count(),
1064 1
1065 );
1066 });
1067
1068 // When the rename changes the extension of the file, the buffer gets closed on the old
1069 // language server and gets opened on the new one.
1070 fs.rename(
1071 Path::new(path!("/dir/test3.rs")),
1072 Path::new(path!("/dir/test3.json")),
1073 Default::default(),
1074 )
1075 .await
1076 .unwrap();
1077 assert_eq!(
1078 fake_rust_server
1079 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1080 .await
1081 .text_document,
1082 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1083 );
1084 assert_eq!(
1085 fake_json_server
1086 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1087 .await
1088 .text_document,
1089 lsp::TextDocumentItem {
1090 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1091 version: 0,
1092 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1093 language_id: "json".to_string(),
1094 },
1095 );
1096
1097 // We clear the diagnostics, since the language has changed.
1098 rust_buffer2.update(cx, |buffer, _| {
1099 assert_eq!(
1100 buffer
1101 .snapshot()
1102 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1103 .count(),
1104 0
1105 );
1106 });
1107
1108 // The renamed file's version resets after changing language server.
1109 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1110 assert_eq!(
1111 fake_json_server
1112 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1113 .await
1114 .text_document,
1115 lsp::VersionedTextDocumentIdentifier::new(
1116 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1117 1
1118 )
1119 );
1120
1121 // Restart language servers
1122 project.update(cx, |project, cx| {
1123 project.restart_language_servers_for_buffers(
1124 vec![rust_buffer.clone(), json_buffer.clone()],
1125 HashSet::default(),
1126 cx,
1127 );
1128 });
1129
1130 let mut rust_shutdown_requests = fake_rust_server
1131 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1132 let mut json_shutdown_requests = fake_json_server
1133 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1134 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1135
1136 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1137 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1138
1139 // Ensure rust document is reopened in new rust language server
1140 assert_eq!(
1141 fake_rust_server
1142 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1143 .await
1144 .text_document,
1145 lsp::TextDocumentItem {
1146 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1147 version: 0,
1148 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1149 language_id: "rust".to_string(),
1150 }
1151 );
1152
1153 // Ensure json documents are reopened in new json language server
1154 assert_set_eq!(
1155 [
1156 fake_json_server
1157 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1158 .await
1159 .text_document,
1160 fake_json_server
1161 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1162 .await
1163 .text_document,
1164 ],
1165 [
1166 lsp::TextDocumentItem {
1167 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1168 version: 0,
1169 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1170 language_id: "json".to_string(),
1171 },
1172 lsp::TextDocumentItem {
1173 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1174 version: 0,
1175 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1176 language_id: "json".to_string(),
1177 }
1178 ]
1179 );
1180
1181 // Close notifications are reported only to servers matching the buffer's language.
1182 cx.update(|_| drop(_json_handle));
1183 let close_message = lsp::DidCloseTextDocumentParams {
1184 text_document: lsp::TextDocumentIdentifier::new(
1185 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1186 ),
1187 };
1188 assert_eq!(
1189 fake_json_server
1190 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1191 .await,
1192 close_message,
1193 );
1194}
1195
1196#[gpui::test]
1197async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1198 init_test(cx);
1199
1200 let fs = FakeFs::new(cx.executor());
1201 fs.insert_tree(
1202 path!("/the-root"),
1203 json!({
1204 ".gitignore": "target\n",
1205 "Cargo.lock": "",
1206 "src": {
1207 "a.rs": "",
1208 "b.rs": "",
1209 },
1210 "target": {
1211 "x": {
1212 "out": {
1213 "x.rs": ""
1214 }
1215 },
1216 "y": {
1217 "out": {
1218 "y.rs": "",
1219 }
1220 },
1221 "z": {
1222 "out": {
1223 "z.rs": ""
1224 }
1225 }
1226 }
1227 }),
1228 )
1229 .await;
1230 fs.insert_tree(
1231 path!("/the-registry"),
1232 json!({
1233 "dep1": {
1234 "src": {
1235 "dep1.rs": "",
1236 }
1237 },
1238 "dep2": {
1239 "src": {
1240 "dep2.rs": "",
1241 }
1242 },
1243 }),
1244 )
1245 .await;
1246 fs.insert_tree(
1247 path!("/the/stdlib"),
1248 json!({
1249 "LICENSE": "",
1250 "src": {
1251 "string.rs": "",
1252 }
1253 }),
1254 )
1255 .await;
1256
1257 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1258 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1259 (project.languages().clone(), project.lsp_store())
1260 });
1261 language_registry.add(rust_lang());
1262 let mut fake_servers = language_registry.register_fake_lsp(
1263 "Rust",
1264 FakeLspAdapter {
1265 name: "the-language-server",
1266 ..Default::default()
1267 },
1268 );
1269
1270 cx.executor().run_until_parked();
1271
1272 // Start the language server by opening a buffer with a compatible file extension.
1273 project
1274 .update(cx, |project, cx| {
1275 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1276 })
1277 .await
1278 .unwrap();
1279
1280 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1281 project.update(cx, |project, cx| {
1282 let worktree = project.worktrees(cx).next().unwrap();
1283 assert_eq!(
1284 worktree
1285 .read(cx)
1286 .snapshot()
1287 .entries(true, 0)
1288 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1289 .collect::<Vec<_>>(),
1290 &[
1291 ("", false),
1292 (".gitignore", false),
1293 ("Cargo.lock", false),
1294 ("src", false),
1295 ("src/a.rs", false),
1296 ("src/b.rs", false),
1297 ("target", true),
1298 ]
1299 );
1300 });
1301
1302 let prev_read_dir_count = fs.read_dir_call_count();
1303
1304 let fake_server = fake_servers.next().await.unwrap();
1305 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1306 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1307 id
1308 });
1309
1310 // Simulate jumping to a definition in a dependency outside of the worktree.
1311 let _out_of_worktree_buffer = project
1312 .update(cx, |project, cx| {
1313 project.open_local_buffer_via_lsp(
1314 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1315 server_id,
1316 cx,
1317 )
1318 })
1319 .await
1320 .unwrap();
1321
1322 // Keep track of the FS events reported to the language server.
1323 let file_changes = Arc::new(Mutex::new(Vec::new()));
1324 fake_server
1325 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1326 registrations: vec![lsp::Registration {
1327 id: Default::default(),
1328 method: "workspace/didChangeWatchedFiles".to_string(),
1329 register_options: serde_json::to_value(
1330 lsp::DidChangeWatchedFilesRegistrationOptions {
1331 watchers: vec![
1332 lsp::FileSystemWatcher {
1333 glob_pattern: lsp::GlobPattern::String(
1334 path!("/the-root/Cargo.toml").to_string(),
1335 ),
1336 kind: None,
1337 },
1338 lsp::FileSystemWatcher {
1339 glob_pattern: lsp::GlobPattern::String(
1340 path!("/the-root/src/*.{rs,c}").to_string(),
1341 ),
1342 kind: None,
1343 },
1344 lsp::FileSystemWatcher {
1345 glob_pattern: lsp::GlobPattern::String(
1346 path!("/the-root/target/y/**/*.rs").to_string(),
1347 ),
1348 kind: None,
1349 },
1350 lsp::FileSystemWatcher {
1351 glob_pattern: lsp::GlobPattern::String(
1352 path!("/the/stdlib/src/**/*.rs").to_string(),
1353 ),
1354 kind: None,
1355 },
1356 lsp::FileSystemWatcher {
1357 glob_pattern: lsp::GlobPattern::String(
1358 path!("**/Cargo.lock").to_string(),
1359 ),
1360 kind: None,
1361 },
1362 ],
1363 },
1364 )
1365 .ok(),
1366 }],
1367 })
1368 .await
1369 .into_response()
1370 .unwrap();
1371 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1372 let file_changes = file_changes.clone();
1373 move |params, _| {
1374 let mut file_changes = file_changes.lock();
1375 file_changes.extend(params.changes);
1376 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1377 }
1378 });
1379
1380 cx.executor().run_until_parked();
1381 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1382 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 5);
1383
1384 let mut new_watched_paths = fs.watched_paths();
1385 new_watched_paths.retain(|path| {
1386 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1387 });
1388 assert_eq!(
1389 &new_watched_paths,
1390 &[
1391 Path::new(path!("/the-root")),
1392 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1393 Path::new(path!("/the/stdlib/src"))
1394 ]
1395 );
1396
1397 // Now the language server has asked us to watch an ignored directory path,
1398 // so we recursively load it.
1399 project.update(cx, |project, cx| {
1400 let worktree = project.visible_worktrees(cx).next().unwrap();
1401 assert_eq!(
1402 worktree
1403 .read(cx)
1404 .snapshot()
1405 .entries(true, 0)
1406 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1407 .collect::<Vec<_>>(),
1408 &[
1409 ("", false),
1410 (".gitignore", false),
1411 ("Cargo.lock", false),
1412 ("src", false),
1413 ("src/a.rs", false),
1414 ("src/b.rs", false),
1415 ("target", true),
1416 ("target/x", true),
1417 ("target/y", true),
1418 ("target/y/out", true),
1419 ("target/y/out/y.rs", true),
1420 ("target/z", true),
1421 ]
1422 );
1423 });
1424
1425 // Perform some file system mutations, two of which match the watched patterns,
1426 // and one of which does not.
1427 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1428 .await
1429 .unwrap();
1430 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1431 .await
1432 .unwrap();
1433 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1434 .await
1435 .unwrap();
1436 fs.create_file(
1437 path!("/the-root/target/x/out/x2.rs").as_ref(),
1438 Default::default(),
1439 )
1440 .await
1441 .unwrap();
1442 fs.create_file(
1443 path!("/the-root/target/y/out/y2.rs").as_ref(),
1444 Default::default(),
1445 )
1446 .await
1447 .unwrap();
1448 fs.save(
1449 path!("/the-root/Cargo.lock").as_ref(),
1450 &"".into(),
1451 Default::default(),
1452 )
1453 .await
1454 .unwrap();
1455 fs.save(
1456 path!("/the-stdlib/LICENSE").as_ref(),
1457 &"".into(),
1458 Default::default(),
1459 )
1460 .await
1461 .unwrap();
1462 fs.save(
1463 path!("/the/stdlib/src/string.rs").as_ref(),
1464 &"".into(),
1465 Default::default(),
1466 )
1467 .await
1468 .unwrap();
1469
1470 // The language server receives events for the FS mutations that match its watch patterns.
1471 cx.executor().run_until_parked();
1472 assert_eq!(
1473 &*file_changes.lock(),
1474 &[
1475 lsp::FileEvent {
1476 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1477 typ: lsp::FileChangeType::CHANGED,
1478 },
1479 lsp::FileEvent {
1480 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1481 typ: lsp::FileChangeType::DELETED,
1482 },
1483 lsp::FileEvent {
1484 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1485 typ: lsp::FileChangeType::CREATED,
1486 },
1487 lsp::FileEvent {
1488 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1489 typ: lsp::FileChangeType::CREATED,
1490 },
1491 lsp::FileEvent {
1492 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1493 typ: lsp::FileChangeType::CHANGED,
1494 },
1495 ]
1496 );
1497}
1498
1499#[gpui::test]
1500async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1501 init_test(cx);
1502
1503 let fs = FakeFs::new(cx.executor());
1504 fs.insert_tree(
1505 path!("/dir"),
1506 json!({
1507 "a.rs": "let a = 1;",
1508 "b.rs": "let b = 2;"
1509 }),
1510 )
1511 .await;
1512
1513 let project = Project::test(
1514 fs,
1515 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1516 cx,
1517 )
1518 .await;
1519 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1520
1521 let buffer_a = project
1522 .update(cx, |project, cx| {
1523 project.open_local_buffer(path!("/dir/a.rs"), cx)
1524 })
1525 .await
1526 .unwrap();
1527 let buffer_b = project
1528 .update(cx, |project, cx| {
1529 project.open_local_buffer(path!("/dir/b.rs"), cx)
1530 })
1531 .await
1532 .unwrap();
1533
1534 lsp_store.update(cx, |lsp_store, cx| {
1535 lsp_store
1536 .update_diagnostics(
1537 LanguageServerId(0),
1538 lsp::PublishDiagnosticsParams {
1539 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1540 version: None,
1541 diagnostics: vec![lsp::Diagnostic {
1542 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1543 severity: Some(lsp::DiagnosticSeverity::ERROR),
1544 message: "error 1".to_string(),
1545 ..Default::default()
1546 }],
1547 },
1548 None,
1549 DiagnosticSourceKind::Pushed,
1550 &[],
1551 cx,
1552 )
1553 .unwrap();
1554 lsp_store
1555 .update_diagnostics(
1556 LanguageServerId(0),
1557 lsp::PublishDiagnosticsParams {
1558 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1559 version: None,
1560 diagnostics: vec![lsp::Diagnostic {
1561 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1562 severity: Some(DiagnosticSeverity::WARNING),
1563 message: "error 2".to_string(),
1564 ..Default::default()
1565 }],
1566 },
1567 None,
1568 DiagnosticSourceKind::Pushed,
1569 &[],
1570 cx,
1571 )
1572 .unwrap();
1573 });
1574
1575 buffer_a.update(cx, |buffer, _| {
1576 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1577 assert_eq!(
1578 chunks
1579 .iter()
1580 .map(|(s, d)| (s.as_str(), *d))
1581 .collect::<Vec<_>>(),
1582 &[
1583 ("let ", None),
1584 ("a", Some(DiagnosticSeverity::ERROR)),
1585 (" = 1;", None),
1586 ]
1587 );
1588 });
1589 buffer_b.update(cx, |buffer, _| {
1590 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1591 assert_eq!(
1592 chunks
1593 .iter()
1594 .map(|(s, d)| (s.as_str(), *d))
1595 .collect::<Vec<_>>(),
1596 &[
1597 ("let ", None),
1598 ("b", Some(DiagnosticSeverity::WARNING)),
1599 (" = 2;", None),
1600 ]
1601 );
1602 });
1603}
1604
1605#[gpui::test]
1606async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1607 init_test(cx);
1608
1609 let fs = FakeFs::new(cx.executor());
1610 fs.insert_tree(
1611 path!("/root"),
1612 json!({
1613 "dir": {
1614 ".git": {
1615 "HEAD": "ref: refs/heads/main",
1616 },
1617 ".gitignore": "b.rs",
1618 "a.rs": "let a = 1;",
1619 "b.rs": "let b = 2;",
1620 },
1621 "other.rs": "let b = c;"
1622 }),
1623 )
1624 .await;
1625
1626 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1627 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1628 let (worktree, _) = project
1629 .update(cx, |project, cx| {
1630 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1631 })
1632 .await
1633 .unwrap();
1634 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1635
1636 let (worktree, _) = project
1637 .update(cx, |project, cx| {
1638 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1639 })
1640 .await
1641 .unwrap();
1642 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1643
1644 let server_id = LanguageServerId(0);
1645 lsp_store.update(cx, |lsp_store, cx| {
1646 lsp_store
1647 .update_diagnostics(
1648 server_id,
1649 lsp::PublishDiagnosticsParams {
1650 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1651 version: None,
1652 diagnostics: vec![lsp::Diagnostic {
1653 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1654 severity: Some(lsp::DiagnosticSeverity::ERROR),
1655 message: "unused variable 'b'".to_string(),
1656 ..Default::default()
1657 }],
1658 },
1659 None,
1660 DiagnosticSourceKind::Pushed,
1661 &[],
1662 cx,
1663 )
1664 .unwrap();
1665 lsp_store
1666 .update_diagnostics(
1667 server_id,
1668 lsp::PublishDiagnosticsParams {
1669 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1670 version: None,
1671 diagnostics: vec![lsp::Diagnostic {
1672 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1673 severity: Some(lsp::DiagnosticSeverity::ERROR),
1674 message: "unknown variable 'c'".to_string(),
1675 ..Default::default()
1676 }],
1677 },
1678 None,
1679 DiagnosticSourceKind::Pushed,
1680 &[],
1681 cx,
1682 )
1683 .unwrap();
1684 });
1685
1686 let main_ignored_buffer = project
1687 .update(cx, |project, cx| {
1688 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1689 })
1690 .await
1691 .unwrap();
1692 main_ignored_buffer.update(cx, |buffer, _| {
1693 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1694 assert_eq!(
1695 chunks
1696 .iter()
1697 .map(|(s, d)| (s.as_str(), *d))
1698 .collect::<Vec<_>>(),
1699 &[
1700 ("let ", None),
1701 ("b", Some(DiagnosticSeverity::ERROR)),
1702 (" = 2;", None),
1703 ],
1704 "Gigitnored buffers should still get in-buffer diagnostics",
1705 );
1706 });
1707 let other_buffer = project
1708 .update(cx, |project, cx| {
1709 project.open_buffer((other_worktree_id, rel_path("")), cx)
1710 })
1711 .await
1712 .unwrap();
1713 other_buffer.update(cx, |buffer, _| {
1714 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1715 assert_eq!(
1716 chunks
1717 .iter()
1718 .map(|(s, d)| (s.as_str(), *d))
1719 .collect::<Vec<_>>(),
1720 &[
1721 ("let b = ", None),
1722 ("c", Some(DiagnosticSeverity::ERROR)),
1723 (";", None),
1724 ],
1725 "Buffers from hidden projects should still get in-buffer diagnostics"
1726 );
1727 });
1728
1729 project.update(cx, |project, cx| {
1730 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1731 assert_eq!(
1732 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1733 vec![(
1734 ProjectPath {
1735 worktree_id: main_worktree_id,
1736 path: rel_path("b.rs").into(),
1737 },
1738 server_id,
1739 DiagnosticSummary {
1740 error_count: 1,
1741 warning_count: 0,
1742 }
1743 )]
1744 );
1745 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1746 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1747 });
1748}
1749
1750#[gpui::test]
1751async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1752 init_test(cx);
1753
1754 let progress_token = "the-progress-token";
1755
1756 let fs = FakeFs::new(cx.executor());
1757 fs.insert_tree(
1758 path!("/dir"),
1759 json!({
1760 "a.rs": "fn a() { A }",
1761 "b.rs": "const y: i32 = 1",
1762 }),
1763 )
1764 .await;
1765
1766 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1767 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1768
1769 language_registry.add(rust_lang());
1770 let mut fake_servers = language_registry.register_fake_lsp(
1771 "Rust",
1772 FakeLspAdapter {
1773 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1774 disk_based_diagnostics_sources: vec!["disk".into()],
1775 ..Default::default()
1776 },
1777 );
1778
1779 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1780
1781 // Cause worktree to start the fake language server
1782 let _ = project
1783 .update(cx, |project, cx| {
1784 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1785 })
1786 .await
1787 .unwrap();
1788
1789 let mut events = cx.events(&project);
1790
1791 let fake_server = fake_servers.next().await.unwrap();
1792 assert_eq!(
1793 events.next().await.unwrap(),
1794 Event::LanguageServerAdded(
1795 LanguageServerId(0),
1796 fake_server.server.name(),
1797 Some(worktree_id)
1798 ),
1799 );
1800
1801 fake_server
1802 .start_progress(format!("{}/0", progress_token))
1803 .await;
1804 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1805 assert_eq!(
1806 events.next().await.unwrap(),
1807 Event::DiskBasedDiagnosticsStarted {
1808 language_server_id: LanguageServerId(0),
1809 }
1810 );
1811
1812 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1813 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1814 version: None,
1815 diagnostics: vec![lsp::Diagnostic {
1816 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1817 severity: Some(lsp::DiagnosticSeverity::ERROR),
1818 message: "undefined variable 'A'".to_string(),
1819 ..Default::default()
1820 }],
1821 });
1822 assert_eq!(
1823 events.next().await.unwrap(),
1824 Event::DiagnosticsUpdated {
1825 language_server_id: LanguageServerId(0),
1826 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1827 }
1828 );
1829
1830 fake_server.end_progress(format!("{}/0", progress_token));
1831 assert_eq!(
1832 events.next().await.unwrap(),
1833 Event::DiskBasedDiagnosticsFinished {
1834 language_server_id: LanguageServerId(0)
1835 }
1836 );
1837
1838 let buffer = project
1839 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1840 .await
1841 .unwrap();
1842
1843 buffer.update(cx, |buffer, _| {
1844 let snapshot = buffer.snapshot();
1845 let diagnostics = snapshot
1846 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1847 .collect::<Vec<_>>();
1848 assert_eq!(
1849 diagnostics,
1850 &[DiagnosticEntry {
1851 range: Point::new(0, 9)..Point::new(0, 10),
1852 diagnostic: Diagnostic {
1853 severity: lsp::DiagnosticSeverity::ERROR,
1854 message: "undefined variable 'A'".to_string(),
1855 group_id: 0,
1856 is_primary: true,
1857 source_kind: DiagnosticSourceKind::Pushed,
1858 ..Diagnostic::default()
1859 }
1860 }]
1861 )
1862 });
1863
1864 // Ensure publishing empty diagnostics twice only results in one update event.
1865 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1866 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1867 version: None,
1868 diagnostics: Default::default(),
1869 });
1870 assert_eq!(
1871 events.next().await.unwrap(),
1872 Event::DiagnosticsUpdated {
1873 language_server_id: LanguageServerId(0),
1874 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1875 }
1876 );
1877
1878 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
1879 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1880 version: None,
1881 diagnostics: Default::default(),
1882 });
1883 cx.executor().run_until_parked();
1884 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1885}
1886
1887#[gpui::test]
1888async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1889 init_test(cx);
1890
1891 let progress_token = "the-progress-token";
1892
1893 let fs = FakeFs::new(cx.executor());
1894 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1895
1896 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1897
1898 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1899 language_registry.add(rust_lang());
1900 let mut fake_servers = language_registry.register_fake_lsp(
1901 "Rust",
1902 FakeLspAdapter {
1903 name: "the-language-server",
1904 disk_based_diagnostics_sources: vec!["disk".into()],
1905 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1906 ..FakeLspAdapter::default()
1907 },
1908 );
1909
1910 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1911
1912 let (buffer, _handle) = project
1913 .update(cx, |project, cx| {
1914 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1915 })
1916 .await
1917 .unwrap();
1918 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1919 // Simulate diagnostics starting to update.
1920 let fake_server = fake_servers.next().await.unwrap();
1921 fake_server.start_progress(progress_token).await;
1922
1923 // Restart the server before the diagnostics finish updating.
1924 project.update(cx, |project, cx| {
1925 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1926 });
1927 let mut events = cx.events(&project);
1928
1929 // Simulate the newly started server sending more diagnostics.
1930 let fake_server = fake_servers.next().await.unwrap();
1931 assert_eq!(
1932 events.next().await.unwrap(),
1933 Event::LanguageServerRemoved(LanguageServerId(0))
1934 );
1935 assert_eq!(
1936 events.next().await.unwrap(),
1937 Event::LanguageServerAdded(
1938 LanguageServerId(1),
1939 fake_server.server.name(),
1940 Some(worktree_id)
1941 )
1942 );
1943 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1944 fake_server.start_progress(progress_token).await;
1945 assert_eq!(
1946 events.next().await.unwrap(),
1947 Event::LanguageServerBufferRegistered {
1948 server_id: LanguageServerId(1),
1949 buffer_id,
1950 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1951 name: Some(fake_server.server.name())
1952 }
1953 );
1954 assert_eq!(
1955 events.next().await.unwrap(),
1956 Event::DiskBasedDiagnosticsStarted {
1957 language_server_id: LanguageServerId(1)
1958 }
1959 );
1960 project.update(cx, |project, cx| {
1961 assert_eq!(
1962 project
1963 .language_servers_running_disk_based_diagnostics(cx)
1964 .collect::<Vec<_>>(),
1965 [LanguageServerId(1)]
1966 );
1967 });
1968
1969 // All diagnostics are considered done, despite the old server's diagnostic
1970 // task never completing.
1971 fake_server.end_progress(progress_token);
1972 assert_eq!(
1973 events.next().await.unwrap(),
1974 Event::DiskBasedDiagnosticsFinished {
1975 language_server_id: LanguageServerId(1)
1976 }
1977 );
1978 project.update(cx, |project, cx| {
1979 assert_eq!(
1980 project
1981 .language_servers_running_disk_based_diagnostics(cx)
1982 .collect::<Vec<_>>(),
1983 [] as [language::LanguageServerId; 0]
1984 );
1985 });
1986}
1987
1988#[gpui::test]
1989async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
1990 init_test(cx);
1991
1992 let fs = FakeFs::new(cx.executor());
1993 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
1994
1995 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1996
1997 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1998 language_registry.add(rust_lang());
1999 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2000
2001 let (buffer, _) = project
2002 .update(cx, |project, cx| {
2003 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2004 })
2005 .await
2006 .unwrap();
2007
2008 // Publish diagnostics
2009 let fake_server = fake_servers.next().await.unwrap();
2010 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2011 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2012 version: None,
2013 diagnostics: vec![lsp::Diagnostic {
2014 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2015 severity: Some(lsp::DiagnosticSeverity::ERROR),
2016 message: "the message".to_string(),
2017 ..Default::default()
2018 }],
2019 });
2020
2021 cx.executor().run_until_parked();
2022 buffer.update(cx, |buffer, _| {
2023 assert_eq!(
2024 buffer
2025 .snapshot()
2026 .diagnostics_in_range::<_, usize>(0..1, false)
2027 .map(|entry| entry.diagnostic.message)
2028 .collect::<Vec<_>>(),
2029 ["the message".to_string()]
2030 );
2031 });
2032 project.update(cx, |project, cx| {
2033 assert_eq!(
2034 project.diagnostic_summary(false, cx),
2035 DiagnosticSummary {
2036 error_count: 1,
2037 warning_count: 0,
2038 }
2039 );
2040 });
2041
2042 project.update(cx, |project, cx| {
2043 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2044 });
2045
2046 // The diagnostics are cleared.
2047 cx.executor().run_until_parked();
2048 buffer.update(cx, |buffer, _| {
2049 assert_eq!(
2050 buffer
2051 .snapshot()
2052 .diagnostics_in_range::<_, usize>(0..1, false)
2053 .map(|entry| entry.diagnostic.message)
2054 .collect::<Vec<_>>(),
2055 Vec::<String>::new(),
2056 );
2057 });
2058 project.update(cx, |project, cx| {
2059 assert_eq!(
2060 project.diagnostic_summary(false, cx),
2061 DiagnosticSummary {
2062 error_count: 0,
2063 warning_count: 0,
2064 }
2065 );
2066 });
2067}
2068
2069#[gpui::test]
2070async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2071 init_test(cx);
2072
2073 let fs = FakeFs::new(cx.executor());
2074 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2075
2076 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2077 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2078
2079 language_registry.add(rust_lang());
2080 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2081
2082 let (buffer, _handle) = project
2083 .update(cx, |project, cx| {
2084 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2085 })
2086 .await
2087 .unwrap();
2088
2089 // Before restarting the server, report diagnostics with an unknown buffer version.
2090 let fake_server = fake_servers.next().await.unwrap();
2091 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2092 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2093 version: Some(10000),
2094 diagnostics: Vec::new(),
2095 });
2096 cx.executor().run_until_parked();
2097 project.update(cx, |project, cx| {
2098 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2099 });
2100
2101 let mut fake_server = fake_servers.next().await.unwrap();
2102 let notification = fake_server
2103 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2104 .await
2105 .text_document;
2106 assert_eq!(notification.version, 0);
2107}
2108
2109#[gpui::test]
2110async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2111 init_test(cx);
2112
2113 let progress_token = "the-progress-token";
2114
2115 let fs = FakeFs::new(cx.executor());
2116 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2117
2118 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2119
2120 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2121 language_registry.add(rust_lang());
2122 let mut fake_servers = language_registry.register_fake_lsp(
2123 "Rust",
2124 FakeLspAdapter {
2125 name: "the-language-server",
2126 disk_based_diagnostics_sources: vec!["disk".into()],
2127 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2128 ..Default::default()
2129 },
2130 );
2131
2132 let (buffer, _handle) = project
2133 .update(cx, |project, cx| {
2134 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2135 })
2136 .await
2137 .unwrap();
2138
2139 // Simulate diagnostics starting to update.
2140 let mut fake_server = fake_servers.next().await.unwrap();
2141 fake_server
2142 .start_progress_with(
2143 "another-token",
2144 lsp::WorkDoneProgressBegin {
2145 cancellable: Some(false),
2146 ..Default::default()
2147 },
2148 )
2149 .await;
2150 fake_server
2151 .start_progress_with(
2152 progress_token,
2153 lsp::WorkDoneProgressBegin {
2154 cancellable: Some(true),
2155 ..Default::default()
2156 },
2157 )
2158 .await;
2159 cx.executor().run_until_parked();
2160
2161 project.update(cx, |project, cx| {
2162 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2163 });
2164
2165 let cancel_notification = fake_server
2166 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2167 .await;
2168 assert_eq!(
2169 cancel_notification.token,
2170 NumberOrString::String(progress_token.into())
2171 );
2172}
2173
2174#[gpui::test]
2175async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2176 init_test(cx);
2177
2178 let fs = FakeFs::new(cx.executor());
2179 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2180 .await;
2181
2182 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2183 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2184
2185 let mut fake_rust_servers = language_registry.register_fake_lsp(
2186 "Rust",
2187 FakeLspAdapter {
2188 name: "rust-lsp",
2189 ..Default::default()
2190 },
2191 );
2192 let mut fake_js_servers = language_registry.register_fake_lsp(
2193 "JavaScript",
2194 FakeLspAdapter {
2195 name: "js-lsp",
2196 ..Default::default()
2197 },
2198 );
2199 language_registry.add(rust_lang());
2200 language_registry.add(js_lang());
2201
2202 let _rs_buffer = project
2203 .update(cx, |project, cx| {
2204 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2205 })
2206 .await
2207 .unwrap();
2208 let _js_buffer = project
2209 .update(cx, |project, cx| {
2210 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2211 })
2212 .await
2213 .unwrap();
2214
2215 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2216 assert_eq!(
2217 fake_rust_server_1
2218 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2219 .await
2220 .text_document
2221 .uri
2222 .as_str(),
2223 uri!("file:///dir/a.rs")
2224 );
2225
2226 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2227 assert_eq!(
2228 fake_js_server
2229 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2230 .await
2231 .text_document
2232 .uri
2233 .as_str(),
2234 uri!("file:///dir/b.js")
2235 );
2236
2237 // Disable Rust language server, ensuring only that server gets stopped.
2238 cx.update(|cx| {
2239 SettingsStore::update_global(cx, |settings, cx| {
2240 settings.update_user_settings(cx, |settings| {
2241 settings.languages_mut().insert(
2242 "Rust".into(),
2243 LanguageSettingsContent {
2244 enable_language_server: Some(false),
2245 ..Default::default()
2246 },
2247 );
2248 });
2249 })
2250 });
2251 fake_rust_server_1
2252 .receive_notification::<lsp::notification::Exit>()
2253 .await;
2254
2255 // Enable Rust and disable JavaScript language servers, ensuring that the
2256 // former gets started again and that the latter stops.
2257 cx.update(|cx| {
2258 SettingsStore::update_global(cx, |settings, cx| {
2259 settings.update_user_settings(cx, |settings| {
2260 settings.languages_mut().insert(
2261 "Rust".into(),
2262 LanguageSettingsContent {
2263 enable_language_server: Some(true),
2264 ..Default::default()
2265 },
2266 );
2267 settings.languages_mut().insert(
2268 "JavaScript".into(),
2269 LanguageSettingsContent {
2270 enable_language_server: Some(false),
2271 ..Default::default()
2272 },
2273 );
2274 });
2275 })
2276 });
2277 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2278 assert_eq!(
2279 fake_rust_server_2
2280 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2281 .await
2282 .text_document
2283 .uri
2284 .as_str(),
2285 uri!("file:///dir/a.rs")
2286 );
2287 fake_js_server
2288 .receive_notification::<lsp::notification::Exit>()
2289 .await;
2290}
2291
2292#[gpui::test(iterations = 3)]
2293async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2294 init_test(cx);
2295
2296 let text = "
2297 fn a() { A }
2298 fn b() { BB }
2299 fn c() { CCC }
2300 "
2301 .unindent();
2302
2303 let fs = FakeFs::new(cx.executor());
2304 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2305
2306 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2307 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2308
2309 language_registry.add(rust_lang());
2310 let mut fake_servers = language_registry.register_fake_lsp(
2311 "Rust",
2312 FakeLspAdapter {
2313 disk_based_diagnostics_sources: vec!["disk".into()],
2314 ..Default::default()
2315 },
2316 );
2317
2318 let buffer = project
2319 .update(cx, |project, cx| {
2320 project.open_local_buffer(path!("/dir/a.rs"), cx)
2321 })
2322 .await
2323 .unwrap();
2324
2325 let _handle = project.update(cx, |project, cx| {
2326 project.register_buffer_with_language_servers(&buffer, cx)
2327 });
2328
2329 let mut fake_server = fake_servers.next().await.unwrap();
2330 let open_notification = fake_server
2331 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2332 .await;
2333
2334 // Edit the buffer, moving the content down
2335 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2336 let change_notification_1 = fake_server
2337 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2338 .await;
2339 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2340
2341 // Report some diagnostics for the initial version of the buffer
2342 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2343 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2344 version: Some(open_notification.text_document.version),
2345 diagnostics: vec![
2346 lsp::Diagnostic {
2347 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2348 severity: Some(DiagnosticSeverity::ERROR),
2349 message: "undefined variable 'A'".to_string(),
2350 source: Some("disk".to_string()),
2351 ..Default::default()
2352 },
2353 lsp::Diagnostic {
2354 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2355 severity: Some(DiagnosticSeverity::ERROR),
2356 message: "undefined variable 'BB'".to_string(),
2357 source: Some("disk".to_string()),
2358 ..Default::default()
2359 },
2360 lsp::Diagnostic {
2361 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2362 severity: Some(DiagnosticSeverity::ERROR),
2363 source: Some("disk".to_string()),
2364 message: "undefined variable 'CCC'".to_string(),
2365 ..Default::default()
2366 },
2367 ],
2368 });
2369
2370 // The diagnostics have moved down since they were created.
2371 cx.executor().run_until_parked();
2372 buffer.update(cx, |buffer, _| {
2373 assert_eq!(
2374 buffer
2375 .snapshot()
2376 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2377 .collect::<Vec<_>>(),
2378 &[
2379 DiagnosticEntry {
2380 range: Point::new(3, 9)..Point::new(3, 11),
2381 diagnostic: Diagnostic {
2382 source: Some("disk".into()),
2383 severity: DiagnosticSeverity::ERROR,
2384 message: "undefined variable 'BB'".to_string(),
2385 is_disk_based: true,
2386 group_id: 1,
2387 is_primary: true,
2388 source_kind: DiagnosticSourceKind::Pushed,
2389 ..Diagnostic::default()
2390 },
2391 },
2392 DiagnosticEntry {
2393 range: Point::new(4, 9)..Point::new(4, 12),
2394 diagnostic: Diagnostic {
2395 source: Some("disk".into()),
2396 severity: DiagnosticSeverity::ERROR,
2397 message: "undefined variable 'CCC'".to_string(),
2398 is_disk_based: true,
2399 group_id: 2,
2400 is_primary: true,
2401 source_kind: DiagnosticSourceKind::Pushed,
2402 ..Diagnostic::default()
2403 }
2404 }
2405 ]
2406 );
2407 assert_eq!(
2408 chunks_with_diagnostics(buffer, 0..buffer.len()),
2409 [
2410 ("\n\nfn a() { ".to_string(), None),
2411 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2412 (" }\nfn b() { ".to_string(), None),
2413 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2414 (" }\nfn c() { ".to_string(), None),
2415 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2416 (" }\n".to_string(), None),
2417 ]
2418 );
2419 assert_eq!(
2420 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2421 [
2422 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2423 (" }\nfn c() { ".to_string(), None),
2424 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2425 ]
2426 );
2427 });
2428
2429 // Ensure overlapping diagnostics are highlighted correctly.
2430 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2431 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2432 version: Some(open_notification.text_document.version),
2433 diagnostics: vec![
2434 lsp::Diagnostic {
2435 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2436 severity: Some(DiagnosticSeverity::ERROR),
2437 message: "undefined variable 'A'".to_string(),
2438 source: Some("disk".to_string()),
2439 ..Default::default()
2440 },
2441 lsp::Diagnostic {
2442 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2443 severity: Some(DiagnosticSeverity::WARNING),
2444 message: "unreachable statement".to_string(),
2445 source: Some("disk".to_string()),
2446 ..Default::default()
2447 },
2448 ],
2449 });
2450
2451 cx.executor().run_until_parked();
2452 buffer.update(cx, |buffer, _| {
2453 assert_eq!(
2454 buffer
2455 .snapshot()
2456 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2457 .collect::<Vec<_>>(),
2458 &[
2459 DiagnosticEntry {
2460 range: Point::new(2, 9)..Point::new(2, 12),
2461 diagnostic: Diagnostic {
2462 source: Some("disk".into()),
2463 severity: DiagnosticSeverity::WARNING,
2464 message: "unreachable statement".to_string(),
2465 is_disk_based: true,
2466 group_id: 4,
2467 is_primary: true,
2468 source_kind: DiagnosticSourceKind::Pushed,
2469 ..Diagnostic::default()
2470 }
2471 },
2472 DiagnosticEntry {
2473 range: Point::new(2, 9)..Point::new(2, 10),
2474 diagnostic: Diagnostic {
2475 source: Some("disk".into()),
2476 severity: DiagnosticSeverity::ERROR,
2477 message: "undefined variable 'A'".to_string(),
2478 is_disk_based: true,
2479 group_id: 3,
2480 is_primary: true,
2481 source_kind: DiagnosticSourceKind::Pushed,
2482 ..Diagnostic::default()
2483 },
2484 }
2485 ]
2486 );
2487 assert_eq!(
2488 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2489 [
2490 ("fn a() { ".to_string(), None),
2491 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2492 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2493 ("\n".to_string(), None),
2494 ]
2495 );
2496 assert_eq!(
2497 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2498 [
2499 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2500 ("\n".to_string(), None),
2501 ]
2502 );
2503 });
2504
2505 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2506 // changes since the last save.
2507 buffer.update(cx, |buffer, cx| {
2508 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2509 buffer.edit(
2510 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2511 None,
2512 cx,
2513 );
2514 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2515 });
2516 let change_notification_2 = fake_server
2517 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2518 .await;
2519 assert!(
2520 change_notification_2.text_document.version > change_notification_1.text_document.version
2521 );
2522
2523 // Handle out-of-order diagnostics
2524 fake_server.notify::<lsp::notification::PublishDiagnostics>(&lsp::PublishDiagnosticsParams {
2525 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2526 version: Some(change_notification_2.text_document.version),
2527 diagnostics: vec![
2528 lsp::Diagnostic {
2529 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2530 severity: Some(DiagnosticSeverity::ERROR),
2531 message: "undefined variable 'BB'".to_string(),
2532 source: Some("disk".to_string()),
2533 ..Default::default()
2534 },
2535 lsp::Diagnostic {
2536 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2537 severity: Some(DiagnosticSeverity::WARNING),
2538 message: "undefined variable 'A'".to_string(),
2539 source: Some("disk".to_string()),
2540 ..Default::default()
2541 },
2542 ],
2543 });
2544
2545 cx.executor().run_until_parked();
2546 buffer.update(cx, |buffer, _| {
2547 assert_eq!(
2548 buffer
2549 .snapshot()
2550 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2551 .collect::<Vec<_>>(),
2552 &[
2553 DiagnosticEntry {
2554 range: Point::new(2, 21)..Point::new(2, 22),
2555 diagnostic: Diagnostic {
2556 source: Some("disk".into()),
2557 severity: DiagnosticSeverity::WARNING,
2558 message: "undefined variable 'A'".to_string(),
2559 is_disk_based: true,
2560 group_id: 6,
2561 is_primary: true,
2562 source_kind: DiagnosticSourceKind::Pushed,
2563 ..Diagnostic::default()
2564 }
2565 },
2566 DiagnosticEntry {
2567 range: Point::new(3, 9)..Point::new(3, 14),
2568 diagnostic: Diagnostic {
2569 source: Some("disk".into()),
2570 severity: DiagnosticSeverity::ERROR,
2571 message: "undefined variable 'BB'".to_string(),
2572 is_disk_based: true,
2573 group_id: 5,
2574 is_primary: true,
2575 source_kind: DiagnosticSourceKind::Pushed,
2576 ..Diagnostic::default()
2577 },
2578 }
2579 ]
2580 );
2581 });
2582}
2583
2584#[gpui::test]
2585async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2586 init_test(cx);
2587
2588 let text = concat!(
2589 "let one = ;\n", //
2590 "let two = \n",
2591 "let three = 3;\n",
2592 );
2593
2594 let fs = FakeFs::new(cx.executor());
2595 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2596
2597 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2598 let buffer = project
2599 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2600 .await
2601 .unwrap();
2602
2603 project.update(cx, |project, cx| {
2604 project.lsp_store.update(cx, |lsp_store, cx| {
2605 lsp_store
2606 .update_diagnostic_entries(
2607 LanguageServerId(0),
2608 PathBuf::from("/dir/a.rs"),
2609 None,
2610 None,
2611 vec![
2612 DiagnosticEntry {
2613 range: Unclipped(PointUtf16::new(0, 10))
2614 ..Unclipped(PointUtf16::new(0, 10)),
2615 diagnostic: Diagnostic {
2616 severity: DiagnosticSeverity::ERROR,
2617 message: "syntax error 1".to_string(),
2618 source_kind: DiagnosticSourceKind::Pushed,
2619 ..Diagnostic::default()
2620 },
2621 },
2622 DiagnosticEntry {
2623 range: Unclipped(PointUtf16::new(1, 10))
2624 ..Unclipped(PointUtf16::new(1, 10)),
2625 diagnostic: Diagnostic {
2626 severity: DiagnosticSeverity::ERROR,
2627 message: "syntax error 2".to_string(),
2628 source_kind: DiagnosticSourceKind::Pushed,
2629 ..Diagnostic::default()
2630 },
2631 },
2632 ],
2633 cx,
2634 )
2635 .unwrap();
2636 })
2637 });
2638
2639 // An empty range is extended forward to include the following character.
2640 // At the end of a line, an empty range is extended backward to include
2641 // the preceding character.
2642 buffer.update(cx, |buffer, _| {
2643 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2644 assert_eq!(
2645 chunks
2646 .iter()
2647 .map(|(s, d)| (s.as_str(), *d))
2648 .collect::<Vec<_>>(),
2649 &[
2650 ("let one = ", None),
2651 (";", Some(DiagnosticSeverity::ERROR)),
2652 ("\nlet two =", None),
2653 (" ", Some(DiagnosticSeverity::ERROR)),
2654 ("\nlet three = 3;\n", None)
2655 ]
2656 );
2657 });
2658}
2659
2660#[gpui::test]
2661async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2662 init_test(cx);
2663
2664 let fs = FakeFs::new(cx.executor());
2665 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2666 .await;
2667
2668 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2669 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2670
2671 lsp_store.update(cx, |lsp_store, cx| {
2672 lsp_store
2673 .update_diagnostic_entries(
2674 LanguageServerId(0),
2675 Path::new("/dir/a.rs").to_owned(),
2676 None,
2677 None,
2678 vec![DiagnosticEntry {
2679 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2680 diagnostic: Diagnostic {
2681 severity: DiagnosticSeverity::ERROR,
2682 is_primary: true,
2683 message: "syntax error a1".to_string(),
2684 source_kind: DiagnosticSourceKind::Pushed,
2685 ..Diagnostic::default()
2686 },
2687 }],
2688 cx,
2689 )
2690 .unwrap();
2691 lsp_store
2692 .update_diagnostic_entries(
2693 LanguageServerId(1),
2694 Path::new("/dir/a.rs").to_owned(),
2695 None,
2696 None,
2697 vec![DiagnosticEntry {
2698 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2699 diagnostic: Diagnostic {
2700 severity: DiagnosticSeverity::ERROR,
2701 is_primary: true,
2702 message: "syntax error b1".to_string(),
2703 source_kind: DiagnosticSourceKind::Pushed,
2704 ..Diagnostic::default()
2705 },
2706 }],
2707 cx,
2708 )
2709 .unwrap();
2710
2711 assert_eq!(
2712 lsp_store.diagnostic_summary(false, cx),
2713 DiagnosticSummary {
2714 error_count: 2,
2715 warning_count: 0,
2716 }
2717 );
2718 });
2719}
2720
2721#[gpui::test]
2722async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2723 init_test(cx);
2724
2725 let text = "
2726 fn a() {
2727 f1();
2728 }
2729 fn b() {
2730 f2();
2731 }
2732 fn c() {
2733 f3();
2734 }
2735 "
2736 .unindent();
2737
2738 let fs = FakeFs::new(cx.executor());
2739 fs.insert_tree(
2740 path!("/dir"),
2741 json!({
2742 "a.rs": text.clone(),
2743 }),
2744 )
2745 .await;
2746
2747 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2748 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2749
2750 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2751 language_registry.add(rust_lang());
2752 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2753
2754 let (buffer, _handle) = project
2755 .update(cx, |project, cx| {
2756 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2757 })
2758 .await
2759 .unwrap();
2760
2761 let mut fake_server = fake_servers.next().await.unwrap();
2762 let lsp_document_version = fake_server
2763 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2764 .await
2765 .text_document
2766 .version;
2767
2768 // Simulate editing the buffer after the language server computes some edits.
2769 buffer.update(cx, |buffer, cx| {
2770 buffer.edit(
2771 [(
2772 Point::new(0, 0)..Point::new(0, 0),
2773 "// above first function\n",
2774 )],
2775 None,
2776 cx,
2777 );
2778 buffer.edit(
2779 [(
2780 Point::new(2, 0)..Point::new(2, 0),
2781 " // inside first function\n",
2782 )],
2783 None,
2784 cx,
2785 );
2786 buffer.edit(
2787 [(
2788 Point::new(6, 4)..Point::new(6, 4),
2789 "// inside second function ",
2790 )],
2791 None,
2792 cx,
2793 );
2794
2795 assert_eq!(
2796 buffer.text(),
2797 "
2798 // above first function
2799 fn a() {
2800 // inside first function
2801 f1();
2802 }
2803 fn b() {
2804 // inside second function f2();
2805 }
2806 fn c() {
2807 f3();
2808 }
2809 "
2810 .unindent()
2811 );
2812 });
2813
2814 let edits = lsp_store
2815 .update(cx, |lsp_store, cx| {
2816 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2817 &buffer,
2818 vec![
2819 // replace body of first function
2820 lsp::TextEdit {
2821 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2822 new_text: "
2823 fn a() {
2824 f10();
2825 }
2826 "
2827 .unindent(),
2828 },
2829 // edit inside second function
2830 lsp::TextEdit {
2831 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2832 new_text: "00".into(),
2833 },
2834 // edit inside third function via two distinct edits
2835 lsp::TextEdit {
2836 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2837 new_text: "4000".into(),
2838 },
2839 lsp::TextEdit {
2840 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2841 new_text: "".into(),
2842 },
2843 ],
2844 LanguageServerId(0),
2845 Some(lsp_document_version),
2846 cx,
2847 )
2848 })
2849 .await
2850 .unwrap();
2851
2852 buffer.update(cx, |buffer, cx| {
2853 for (range, new_text) in edits {
2854 buffer.edit([(range, new_text)], None, cx);
2855 }
2856 assert_eq!(
2857 buffer.text(),
2858 "
2859 // above first function
2860 fn a() {
2861 // inside first function
2862 f10();
2863 }
2864 fn b() {
2865 // inside second function f200();
2866 }
2867 fn c() {
2868 f4000();
2869 }
2870 "
2871 .unindent()
2872 );
2873 });
2874}
2875
2876#[gpui::test]
2877async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2878 init_test(cx);
2879
2880 let text = "
2881 use a::b;
2882 use a::c;
2883
2884 fn f() {
2885 b();
2886 c();
2887 }
2888 "
2889 .unindent();
2890
2891 let fs = FakeFs::new(cx.executor());
2892 fs.insert_tree(
2893 path!("/dir"),
2894 json!({
2895 "a.rs": text.clone(),
2896 }),
2897 )
2898 .await;
2899
2900 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2901 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2902 let buffer = project
2903 .update(cx, |project, cx| {
2904 project.open_local_buffer(path!("/dir/a.rs"), cx)
2905 })
2906 .await
2907 .unwrap();
2908
2909 // Simulate the language server sending us a small edit in the form of a very large diff.
2910 // Rust-analyzer does this when performing a merge-imports code action.
2911 let edits = lsp_store
2912 .update(cx, |lsp_store, cx| {
2913 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2914 &buffer,
2915 [
2916 // Replace the first use statement without editing the semicolon.
2917 lsp::TextEdit {
2918 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2919 new_text: "a::{b, c}".into(),
2920 },
2921 // Reinsert the remainder of the file between the semicolon and the final
2922 // newline of the file.
2923 lsp::TextEdit {
2924 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2925 new_text: "\n\n".into(),
2926 },
2927 lsp::TextEdit {
2928 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2929 new_text: "
2930 fn f() {
2931 b();
2932 c();
2933 }"
2934 .unindent(),
2935 },
2936 // Delete everything after the first newline of the file.
2937 lsp::TextEdit {
2938 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2939 new_text: "".into(),
2940 },
2941 ],
2942 LanguageServerId(0),
2943 None,
2944 cx,
2945 )
2946 })
2947 .await
2948 .unwrap();
2949
2950 buffer.update(cx, |buffer, cx| {
2951 let edits = edits
2952 .into_iter()
2953 .map(|(range, text)| {
2954 (
2955 range.start.to_point(buffer)..range.end.to_point(buffer),
2956 text,
2957 )
2958 })
2959 .collect::<Vec<_>>();
2960
2961 assert_eq!(
2962 edits,
2963 [
2964 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2965 (Point::new(1, 0)..Point::new(2, 0), "".into())
2966 ]
2967 );
2968
2969 for (range, new_text) in edits {
2970 buffer.edit([(range, new_text)], None, cx);
2971 }
2972 assert_eq!(
2973 buffer.text(),
2974 "
2975 use a::{b, c};
2976
2977 fn f() {
2978 b();
2979 c();
2980 }
2981 "
2982 .unindent()
2983 );
2984 });
2985}
2986
2987#[gpui::test]
2988async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
2989 cx: &mut gpui::TestAppContext,
2990) {
2991 init_test(cx);
2992
2993 let text = "Path()";
2994
2995 let fs = FakeFs::new(cx.executor());
2996 fs.insert_tree(
2997 path!("/dir"),
2998 json!({
2999 "a.rs": text
3000 }),
3001 )
3002 .await;
3003
3004 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3005 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3006 let buffer = project
3007 .update(cx, |project, cx| {
3008 project.open_local_buffer(path!("/dir/a.rs"), cx)
3009 })
3010 .await
3011 .unwrap();
3012
3013 // Simulate the language server sending us a pair of edits at the same location,
3014 // with an insertion following a replacement (which violates the LSP spec).
3015 let edits = lsp_store
3016 .update(cx, |lsp_store, cx| {
3017 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3018 &buffer,
3019 [
3020 lsp::TextEdit {
3021 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3022 new_text: "Path".into(),
3023 },
3024 lsp::TextEdit {
3025 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3026 new_text: "from path import Path\n\n\n".into(),
3027 },
3028 ],
3029 LanguageServerId(0),
3030 None,
3031 cx,
3032 )
3033 })
3034 .await
3035 .unwrap();
3036
3037 buffer.update(cx, |buffer, cx| {
3038 buffer.edit(edits, None, cx);
3039 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3040 });
3041}
3042
3043#[gpui::test]
3044async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3045 init_test(cx);
3046
3047 let text = "
3048 use a::b;
3049 use a::c;
3050
3051 fn f() {
3052 b();
3053 c();
3054 }
3055 "
3056 .unindent();
3057
3058 let fs = FakeFs::new(cx.executor());
3059 fs.insert_tree(
3060 path!("/dir"),
3061 json!({
3062 "a.rs": text.clone(),
3063 }),
3064 )
3065 .await;
3066
3067 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3068 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3069 let buffer = project
3070 .update(cx, |project, cx| {
3071 project.open_local_buffer(path!("/dir/a.rs"), cx)
3072 })
3073 .await
3074 .unwrap();
3075
3076 // Simulate the language server sending us edits in a non-ordered fashion,
3077 // with ranges sometimes being inverted or pointing to invalid locations.
3078 let edits = lsp_store
3079 .update(cx, |lsp_store, cx| {
3080 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3081 &buffer,
3082 [
3083 lsp::TextEdit {
3084 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3085 new_text: "\n\n".into(),
3086 },
3087 lsp::TextEdit {
3088 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3089 new_text: "a::{b, c}".into(),
3090 },
3091 lsp::TextEdit {
3092 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3093 new_text: "".into(),
3094 },
3095 lsp::TextEdit {
3096 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3097 new_text: "
3098 fn f() {
3099 b();
3100 c();
3101 }"
3102 .unindent(),
3103 },
3104 ],
3105 LanguageServerId(0),
3106 None,
3107 cx,
3108 )
3109 })
3110 .await
3111 .unwrap();
3112
3113 buffer.update(cx, |buffer, cx| {
3114 let edits = edits
3115 .into_iter()
3116 .map(|(range, text)| {
3117 (
3118 range.start.to_point(buffer)..range.end.to_point(buffer),
3119 text,
3120 )
3121 })
3122 .collect::<Vec<_>>();
3123
3124 assert_eq!(
3125 edits,
3126 [
3127 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3128 (Point::new(1, 0)..Point::new(2, 0), "".into())
3129 ]
3130 );
3131
3132 for (range, new_text) in edits {
3133 buffer.edit([(range, new_text)], None, cx);
3134 }
3135 assert_eq!(
3136 buffer.text(),
3137 "
3138 use a::{b, c};
3139
3140 fn f() {
3141 b();
3142 c();
3143 }
3144 "
3145 .unindent()
3146 );
3147 });
3148}
3149
3150fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3151 buffer: &Buffer,
3152 range: Range<T>,
3153) -> Vec<(String, Option<DiagnosticSeverity>)> {
3154 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3155 for chunk in buffer.snapshot().chunks(range, true) {
3156 if chunks
3157 .last()
3158 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3159 {
3160 chunks.last_mut().unwrap().0.push_str(chunk.text);
3161 } else {
3162 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3163 }
3164 }
3165 chunks
3166}
3167
3168#[gpui::test(iterations = 10)]
3169async fn test_definition(cx: &mut gpui::TestAppContext) {
3170 init_test(cx);
3171
3172 let fs = FakeFs::new(cx.executor());
3173 fs.insert_tree(
3174 path!("/dir"),
3175 json!({
3176 "a.rs": "const fn a() { A }",
3177 "b.rs": "const y: i32 = crate::a()",
3178 }),
3179 )
3180 .await;
3181
3182 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3183
3184 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3185 language_registry.add(rust_lang());
3186 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3187
3188 let (buffer, _handle) = project
3189 .update(cx, |project, cx| {
3190 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3191 })
3192 .await
3193 .unwrap();
3194
3195 let fake_server = fake_servers.next().await.unwrap();
3196 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3197 let params = params.text_document_position_params;
3198 assert_eq!(
3199 params.text_document.uri.to_file_path().unwrap(),
3200 Path::new(path!("/dir/b.rs")),
3201 );
3202 assert_eq!(params.position, lsp::Position::new(0, 22));
3203
3204 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3205 lsp::Location::new(
3206 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3207 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3208 ),
3209 )))
3210 });
3211 let mut definitions = project
3212 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3213 .await
3214 .unwrap()
3215 .unwrap();
3216
3217 // Assert no new language server started
3218 cx.executor().run_until_parked();
3219 assert!(fake_servers.try_next().is_err());
3220
3221 assert_eq!(definitions.len(), 1);
3222 let definition = definitions.pop().unwrap();
3223 cx.update(|cx| {
3224 let target_buffer = definition.target.buffer.read(cx);
3225 assert_eq!(
3226 target_buffer
3227 .file()
3228 .unwrap()
3229 .as_local()
3230 .unwrap()
3231 .abs_path(cx),
3232 Path::new(path!("/dir/a.rs")),
3233 );
3234 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3235 assert_eq!(
3236 list_worktrees(&project, cx),
3237 [
3238 (path!("/dir/a.rs").as_ref(), false),
3239 (path!("/dir/b.rs").as_ref(), true)
3240 ],
3241 );
3242
3243 drop(definition);
3244 });
3245 cx.update(|cx| {
3246 assert_eq!(
3247 list_worktrees(&project, cx),
3248 [(path!("/dir/b.rs").as_ref(), true)]
3249 );
3250 });
3251
3252 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3253 project
3254 .read(cx)
3255 .worktrees(cx)
3256 .map(|worktree| {
3257 let worktree = worktree.read(cx);
3258 (
3259 worktree.as_local().unwrap().abs_path().as_ref(),
3260 worktree.is_visible(),
3261 )
3262 })
3263 .collect::<Vec<_>>()
3264 }
3265}
3266
3267#[gpui::test]
3268async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3269 init_test(cx);
3270
3271 let fs = FakeFs::new(cx.executor());
3272 fs.insert_tree(
3273 path!("/dir"),
3274 json!({
3275 "a.ts": "",
3276 }),
3277 )
3278 .await;
3279
3280 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3281
3282 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3283 language_registry.add(typescript_lang());
3284 let mut fake_language_servers = language_registry.register_fake_lsp(
3285 "TypeScript",
3286 FakeLspAdapter {
3287 capabilities: lsp::ServerCapabilities {
3288 completion_provider: Some(lsp::CompletionOptions {
3289 trigger_characters: Some(vec![".".to_string()]),
3290 ..Default::default()
3291 }),
3292 ..Default::default()
3293 },
3294 ..Default::default()
3295 },
3296 );
3297
3298 let (buffer, _handle) = project
3299 .update(cx, |p, cx| {
3300 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3301 })
3302 .await
3303 .unwrap();
3304
3305 let fake_server = fake_language_servers.next().await.unwrap();
3306
3307 // When text_edit exists, it takes precedence over insert_text and label
3308 let text = "let a = obj.fqn";
3309 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3310 let completions = project.update(cx, |project, cx| {
3311 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3312 });
3313
3314 fake_server
3315 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3316 Ok(Some(lsp::CompletionResponse::Array(vec![
3317 lsp::CompletionItem {
3318 label: "labelText".into(),
3319 insert_text: Some("insertText".into()),
3320 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3321 range: lsp::Range::new(
3322 lsp::Position::new(0, text.len() as u32 - 3),
3323 lsp::Position::new(0, text.len() as u32),
3324 ),
3325 new_text: "textEditText".into(),
3326 })),
3327 ..Default::default()
3328 },
3329 ])))
3330 })
3331 .next()
3332 .await;
3333
3334 let completions = completions
3335 .await
3336 .unwrap()
3337 .into_iter()
3338 .flat_map(|response| response.completions)
3339 .collect::<Vec<_>>();
3340 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3341
3342 assert_eq!(completions.len(), 1);
3343 assert_eq!(completions[0].new_text, "textEditText");
3344 assert_eq!(
3345 completions[0].replace_range.to_offset(&snapshot),
3346 text.len() - 3..text.len()
3347 );
3348}
3349
3350#[gpui::test]
3351async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3352 init_test(cx);
3353
3354 let fs = FakeFs::new(cx.executor());
3355 fs.insert_tree(
3356 path!("/dir"),
3357 json!({
3358 "a.ts": "",
3359 }),
3360 )
3361 .await;
3362
3363 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3364
3365 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3366 language_registry.add(typescript_lang());
3367 let mut fake_language_servers = language_registry.register_fake_lsp(
3368 "TypeScript",
3369 FakeLspAdapter {
3370 capabilities: lsp::ServerCapabilities {
3371 completion_provider: Some(lsp::CompletionOptions {
3372 trigger_characters: Some(vec![".".to_string()]),
3373 ..Default::default()
3374 }),
3375 ..Default::default()
3376 },
3377 ..Default::default()
3378 },
3379 );
3380
3381 let (buffer, _handle) = project
3382 .update(cx, |p, cx| {
3383 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3384 })
3385 .await
3386 .unwrap();
3387
3388 let fake_server = fake_language_servers.next().await.unwrap();
3389 let text = "let a = obj.fqn";
3390
3391 // Test 1: When text_edit is None but insert_text exists with default edit_range
3392 {
3393 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3394 let completions = project.update(cx, |project, cx| {
3395 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3396 });
3397
3398 fake_server
3399 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3400 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3401 is_incomplete: false,
3402 item_defaults: Some(lsp::CompletionListItemDefaults {
3403 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3404 lsp::Range::new(
3405 lsp::Position::new(0, text.len() as u32 - 3),
3406 lsp::Position::new(0, text.len() as u32),
3407 ),
3408 )),
3409 ..Default::default()
3410 }),
3411 items: vec![lsp::CompletionItem {
3412 label: "labelText".into(),
3413 insert_text: Some("insertText".into()),
3414 text_edit: None,
3415 ..Default::default()
3416 }],
3417 })))
3418 })
3419 .next()
3420 .await;
3421
3422 let completions = completions
3423 .await
3424 .unwrap()
3425 .into_iter()
3426 .flat_map(|response| response.completions)
3427 .collect::<Vec<_>>();
3428 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3429
3430 assert_eq!(completions.len(), 1);
3431 assert_eq!(completions[0].new_text, "insertText");
3432 assert_eq!(
3433 completions[0].replace_range.to_offset(&snapshot),
3434 text.len() - 3..text.len()
3435 );
3436 }
3437
3438 // Test 2: When both text_edit and insert_text are None with default edit_range
3439 {
3440 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3441 let completions = project.update(cx, |project, cx| {
3442 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3443 });
3444
3445 fake_server
3446 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3447 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3448 is_incomplete: false,
3449 item_defaults: Some(lsp::CompletionListItemDefaults {
3450 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3451 lsp::Range::new(
3452 lsp::Position::new(0, text.len() as u32 - 3),
3453 lsp::Position::new(0, text.len() as u32),
3454 ),
3455 )),
3456 ..Default::default()
3457 }),
3458 items: vec![lsp::CompletionItem {
3459 label: "labelText".into(),
3460 insert_text: None,
3461 text_edit: None,
3462 ..Default::default()
3463 }],
3464 })))
3465 })
3466 .next()
3467 .await;
3468
3469 let completions = completions
3470 .await
3471 .unwrap()
3472 .into_iter()
3473 .flat_map(|response| response.completions)
3474 .collect::<Vec<_>>();
3475 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3476
3477 assert_eq!(completions.len(), 1);
3478 assert_eq!(completions[0].new_text, "labelText");
3479 assert_eq!(
3480 completions[0].replace_range.to_offset(&snapshot),
3481 text.len() - 3..text.len()
3482 );
3483 }
3484}
3485
3486#[gpui::test]
3487async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3488 init_test(cx);
3489
3490 let fs = FakeFs::new(cx.executor());
3491 fs.insert_tree(
3492 path!("/dir"),
3493 json!({
3494 "a.ts": "",
3495 }),
3496 )
3497 .await;
3498
3499 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3500
3501 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3502 language_registry.add(typescript_lang());
3503 let mut fake_language_servers = language_registry.register_fake_lsp(
3504 "TypeScript",
3505 FakeLspAdapter {
3506 capabilities: lsp::ServerCapabilities {
3507 completion_provider: Some(lsp::CompletionOptions {
3508 trigger_characters: Some(vec![":".to_string()]),
3509 ..Default::default()
3510 }),
3511 ..Default::default()
3512 },
3513 ..Default::default()
3514 },
3515 );
3516
3517 let (buffer, _handle) = project
3518 .update(cx, |p, cx| {
3519 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3520 })
3521 .await
3522 .unwrap();
3523
3524 let fake_server = fake_language_servers.next().await.unwrap();
3525
3526 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3527 let text = "let a = b.fqn";
3528 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3529 let completions = project.update(cx, |project, cx| {
3530 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3531 });
3532
3533 fake_server
3534 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3535 Ok(Some(lsp::CompletionResponse::Array(vec![
3536 lsp::CompletionItem {
3537 label: "fullyQualifiedName?".into(),
3538 insert_text: Some("fullyQualifiedName".into()),
3539 ..Default::default()
3540 },
3541 ])))
3542 })
3543 .next()
3544 .await;
3545 let completions = completions
3546 .await
3547 .unwrap()
3548 .into_iter()
3549 .flat_map(|response| response.completions)
3550 .collect::<Vec<_>>();
3551 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3552 assert_eq!(completions.len(), 1);
3553 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3554 assert_eq!(
3555 completions[0].replace_range.to_offset(&snapshot),
3556 text.len() - 3..text.len()
3557 );
3558
3559 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3560 let text = "let a = \"atoms/cmp\"";
3561 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3562 let completions = project.update(cx, |project, cx| {
3563 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3564 });
3565
3566 fake_server
3567 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3568 Ok(Some(lsp::CompletionResponse::Array(vec![
3569 lsp::CompletionItem {
3570 label: "component".into(),
3571 ..Default::default()
3572 },
3573 ])))
3574 })
3575 .next()
3576 .await;
3577 let completions = completions
3578 .await
3579 .unwrap()
3580 .into_iter()
3581 .flat_map(|response| response.completions)
3582 .collect::<Vec<_>>();
3583 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3584 assert_eq!(completions.len(), 1);
3585 assert_eq!(completions[0].new_text, "component");
3586 assert_eq!(
3587 completions[0].replace_range.to_offset(&snapshot),
3588 text.len() - 4..text.len() - 1
3589 );
3590}
3591
3592#[gpui::test]
3593async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3594 init_test(cx);
3595
3596 let fs = FakeFs::new(cx.executor());
3597 fs.insert_tree(
3598 path!("/dir"),
3599 json!({
3600 "a.ts": "",
3601 }),
3602 )
3603 .await;
3604
3605 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3606
3607 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3608 language_registry.add(typescript_lang());
3609 let mut fake_language_servers = language_registry.register_fake_lsp(
3610 "TypeScript",
3611 FakeLspAdapter {
3612 capabilities: lsp::ServerCapabilities {
3613 completion_provider: Some(lsp::CompletionOptions {
3614 trigger_characters: Some(vec![":".to_string()]),
3615 ..Default::default()
3616 }),
3617 ..Default::default()
3618 },
3619 ..Default::default()
3620 },
3621 );
3622
3623 let (buffer, _handle) = project
3624 .update(cx, |p, cx| {
3625 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3626 })
3627 .await
3628 .unwrap();
3629
3630 let fake_server = fake_language_servers.next().await.unwrap();
3631
3632 let text = "let a = b.fqn";
3633 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3634 let completions = project.update(cx, |project, cx| {
3635 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3636 });
3637
3638 fake_server
3639 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3640 Ok(Some(lsp::CompletionResponse::Array(vec![
3641 lsp::CompletionItem {
3642 label: "fullyQualifiedName?".into(),
3643 insert_text: Some("fully\rQualified\r\nName".into()),
3644 ..Default::default()
3645 },
3646 ])))
3647 })
3648 .next()
3649 .await;
3650 let completions = completions
3651 .await
3652 .unwrap()
3653 .into_iter()
3654 .flat_map(|response| response.completions)
3655 .collect::<Vec<_>>();
3656 assert_eq!(completions.len(), 1);
3657 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3658}
3659
3660#[gpui::test(iterations = 10)]
3661async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3662 init_test(cx);
3663
3664 let fs = FakeFs::new(cx.executor());
3665 fs.insert_tree(
3666 path!("/dir"),
3667 json!({
3668 "a.ts": "a",
3669 }),
3670 )
3671 .await;
3672
3673 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3674
3675 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3676 language_registry.add(typescript_lang());
3677 let mut fake_language_servers = language_registry.register_fake_lsp(
3678 "TypeScript",
3679 FakeLspAdapter {
3680 capabilities: lsp::ServerCapabilities {
3681 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3682 lsp::CodeActionOptions {
3683 resolve_provider: Some(true),
3684 ..lsp::CodeActionOptions::default()
3685 },
3686 )),
3687 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3688 commands: vec!["_the/command".to_string()],
3689 ..lsp::ExecuteCommandOptions::default()
3690 }),
3691 ..lsp::ServerCapabilities::default()
3692 },
3693 ..FakeLspAdapter::default()
3694 },
3695 );
3696
3697 let (buffer, _handle) = project
3698 .update(cx, |p, cx| {
3699 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3700 })
3701 .await
3702 .unwrap();
3703
3704 let fake_server = fake_language_servers.next().await.unwrap();
3705
3706 // Language server returns code actions that contain commands, and not edits.
3707 let actions = project.update(cx, |project, cx| {
3708 project.code_actions(&buffer, 0..0, None, cx)
3709 });
3710 fake_server
3711 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3712 Ok(Some(vec![
3713 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3714 title: "The code action".into(),
3715 data: Some(serde_json::json!({
3716 "command": "_the/command",
3717 })),
3718 ..lsp::CodeAction::default()
3719 }),
3720 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3721 title: "two".into(),
3722 ..lsp::CodeAction::default()
3723 }),
3724 ]))
3725 })
3726 .next()
3727 .await;
3728
3729 let action = actions.await.unwrap().unwrap()[0].clone();
3730 let apply = project.update(cx, |project, cx| {
3731 project.apply_code_action(buffer.clone(), action, true, cx)
3732 });
3733
3734 // Resolving the code action does not populate its edits. In absence of
3735 // edits, we must execute the given command.
3736 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3737 |mut action, _| async move {
3738 if action.data.is_some() {
3739 action.command = Some(lsp::Command {
3740 title: "The command".into(),
3741 command: "_the/command".into(),
3742 arguments: Some(vec![json!("the-argument")]),
3743 });
3744 }
3745 Ok(action)
3746 },
3747 );
3748
3749 // While executing the command, the language server sends the editor
3750 // a `workspaceEdit` request.
3751 fake_server
3752 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3753 let fake = fake_server.clone();
3754 move |params, _| {
3755 assert_eq!(params.command, "_the/command");
3756 let fake = fake.clone();
3757 async move {
3758 fake.server
3759 .request::<lsp::request::ApplyWorkspaceEdit>(
3760 lsp::ApplyWorkspaceEditParams {
3761 label: None,
3762 edit: lsp::WorkspaceEdit {
3763 changes: Some(
3764 [(
3765 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3766 vec![lsp::TextEdit {
3767 range: lsp::Range::new(
3768 lsp::Position::new(0, 0),
3769 lsp::Position::new(0, 0),
3770 ),
3771 new_text: "X".into(),
3772 }],
3773 )]
3774 .into_iter()
3775 .collect(),
3776 ),
3777 ..Default::default()
3778 },
3779 },
3780 )
3781 .await
3782 .into_response()
3783 .unwrap();
3784 Ok(Some(json!(null)))
3785 }
3786 }
3787 })
3788 .next()
3789 .await;
3790
3791 // Applying the code action returns a project transaction containing the edits
3792 // sent by the language server in its `workspaceEdit` request.
3793 let transaction = apply.await.unwrap();
3794 assert!(transaction.0.contains_key(&buffer));
3795 buffer.update(cx, |buffer, cx| {
3796 assert_eq!(buffer.text(), "Xa");
3797 buffer.undo(cx);
3798 assert_eq!(buffer.text(), "a");
3799 });
3800}
3801
3802#[gpui::test]
3803async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3804 init_test(cx);
3805 let fs = FakeFs::new(cx.background_executor.clone());
3806 let expected_contents = "content";
3807 fs.as_fake()
3808 .insert_tree(
3809 "/root",
3810 json!({
3811 "test.txt": expected_contents
3812 }),
3813 )
3814 .await;
3815
3816 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3817
3818 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3819 let worktree = project.worktrees(cx).next().unwrap();
3820 let entry_id = worktree
3821 .read(cx)
3822 .entry_for_path(rel_path("test.txt"))
3823 .unwrap()
3824 .id;
3825 (worktree, entry_id)
3826 });
3827 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3828 let _result = project
3829 .update(cx, |project, cx| {
3830 project.rename_entry(
3831 entry_id,
3832 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3833 cx,
3834 )
3835 })
3836 .await
3837 .unwrap();
3838 worktree.read_with(cx, |worktree, _| {
3839 assert!(
3840 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3841 "Old file should have been removed"
3842 );
3843 assert!(
3844 worktree
3845 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3846 .is_some(),
3847 "Whole directory hierarchy and the new file should have been created"
3848 );
3849 });
3850 assert_eq!(
3851 worktree
3852 .update(cx, |worktree, cx| {
3853 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3854 })
3855 .await
3856 .unwrap()
3857 .text,
3858 expected_contents,
3859 "Moved file's contents should be preserved"
3860 );
3861
3862 let entry_id = worktree.read_with(cx, |worktree, _| {
3863 worktree
3864 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3865 .unwrap()
3866 .id
3867 });
3868
3869 let _result = project
3870 .update(cx, |project, cx| {
3871 project.rename_entry(
3872 entry_id,
3873 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3874 cx,
3875 )
3876 })
3877 .await
3878 .unwrap();
3879 worktree.read_with(cx, |worktree, _| {
3880 assert!(
3881 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3882 "First file should not reappear"
3883 );
3884 assert!(
3885 worktree
3886 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3887 .is_none(),
3888 "Old file should have been removed"
3889 );
3890 assert!(
3891 worktree
3892 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3893 .is_some(),
3894 "No error should have occurred after moving into existing directory"
3895 );
3896 });
3897 assert_eq!(
3898 worktree
3899 .update(cx, |worktree, cx| {
3900 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3901 })
3902 .await
3903 .unwrap()
3904 .text,
3905 expected_contents,
3906 "Moved file's contents should be preserved"
3907 );
3908}
3909
3910#[gpui::test(iterations = 10)]
3911async fn test_save_file(cx: &mut gpui::TestAppContext) {
3912 init_test(cx);
3913
3914 let fs = FakeFs::new(cx.executor());
3915 fs.insert_tree(
3916 path!("/dir"),
3917 json!({
3918 "file1": "the old contents",
3919 }),
3920 )
3921 .await;
3922
3923 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3924 let buffer = project
3925 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3926 .await
3927 .unwrap();
3928 buffer.update(cx, |buffer, cx| {
3929 assert_eq!(buffer.text(), "the old contents");
3930 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3931 });
3932
3933 project
3934 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3935 .await
3936 .unwrap();
3937
3938 let new_text = fs
3939 .load(Path::new(path!("/dir/file1")))
3940 .await
3941 .unwrap()
3942 .replace("\r\n", "\n");
3943 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3944}
3945
3946#[gpui::test(iterations = 10)]
3947async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3948 // Issue: #24349
3949 init_test(cx);
3950
3951 let fs = FakeFs::new(cx.executor());
3952 fs.insert_tree(path!("/dir"), json!({})).await;
3953
3954 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3955 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3956
3957 language_registry.add(rust_lang());
3958 let mut fake_rust_servers = language_registry.register_fake_lsp(
3959 "Rust",
3960 FakeLspAdapter {
3961 name: "the-rust-language-server",
3962 capabilities: lsp::ServerCapabilities {
3963 completion_provider: Some(lsp::CompletionOptions {
3964 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3965 ..Default::default()
3966 }),
3967 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3968 lsp::TextDocumentSyncOptions {
3969 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3970 ..Default::default()
3971 },
3972 )),
3973 ..Default::default()
3974 },
3975 ..Default::default()
3976 },
3977 );
3978
3979 let buffer = project
3980 .update(cx, |this, cx| this.create_buffer(false, cx))
3981 .unwrap()
3982 .await;
3983 project.update(cx, |this, cx| {
3984 this.register_buffer_with_language_servers(&buffer, cx);
3985 buffer.update(cx, |buffer, cx| {
3986 assert!(!this.has_language_servers_for(buffer, cx));
3987 })
3988 });
3989
3990 project
3991 .update(cx, |this, cx| {
3992 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
3993 this.save_buffer_as(
3994 buffer.clone(),
3995 ProjectPath {
3996 worktree_id,
3997 path: rel_path("file.rs").into(),
3998 },
3999 cx,
4000 )
4001 })
4002 .await
4003 .unwrap();
4004 // A server is started up, and it is notified about Rust files.
4005 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4006 assert_eq!(
4007 fake_rust_server
4008 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4009 .await
4010 .text_document,
4011 lsp::TextDocumentItem {
4012 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4013 version: 0,
4014 text: "".to_string(),
4015 language_id: "rust".to_string(),
4016 }
4017 );
4018
4019 project.update(cx, |this, cx| {
4020 buffer.update(cx, |buffer, cx| {
4021 assert!(this.has_language_servers_for(buffer, cx));
4022 })
4023 });
4024}
4025
4026#[gpui::test(iterations = 30)]
4027async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4028 init_test(cx);
4029
4030 let fs = FakeFs::new(cx.executor());
4031 fs.insert_tree(
4032 path!("/dir"),
4033 json!({
4034 "file1": "the original contents",
4035 }),
4036 )
4037 .await;
4038
4039 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4040 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4041 let buffer = project
4042 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4043 .await
4044 .unwrap();
4045
4046 // Simulate buffer diffs being slow, so that they don't complete before
4047 // the next file change occurs.
4048 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4049
4050 // Change the buffer's file on disk, and then wait for the file change
4051 // to be detected by the worktree, so that the buffer starts reloading.
4052 fs.save(
4053 path!("/dir/file1").as_ref(),
4054 &"the first contents".into(),
4055 Default::default(),
4056 )
4057 .await
4058 .unwrap();
4059 worktree.next_event(cx).await;
4060
4061 // Change the buffer's file again. Depending on the random seed, the
4062 // previous file change may still be in progress.
4063 fs.save(
4064 path!("/dir/file1").as_ref(),
4065 &"the second contents".into(),
4066 Default::default(),
4067 )
4068 .await
4069 .unwrap();
4070 worktree.next_event(cx).await;
4071
4072 cx.executor().run_until_parked();
4073 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4074 buffer.read_with(cx, |buffer, _| {
4075 assert_eq!(buffer.text(), on_disk_text);
4076 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4077 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4078 });
4079}
4080
4081#[gpui::test(iterations = 30)]
4082async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4083 init_test(cx);
4084
4085 let fs = FakeFs::new(cx.executor());
4086 fs.insert_tree(
4087 path!("/dir"),
4088 json!({
4089 "file1": "the original contents",
4090 }),
4091 )
4092 .await;
4093
4094 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4095 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4096 let buffer = project
4097 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4098 .await
4099 .unwrap();
4100
4101 // Simulate buffer diffs being slow, so that they don't complete before
4102 // the next file change occurs.
4103 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4104
4105 // Change the buffer's file on disk, and then wait for the file change
4106 // to be detected by the worktree, so that the buffer starts reloading.
4107 fs.save(
4108 path!("/dir/file1").as_ref(),
4109 &"the first contents".into(),
4110 Default::default(),
4111 )
4112 .await
4113 .unwrap();
4114 worktree.next_event(cx).await;
4115
4116 cx.executor()
4117 .spawn(cx.executor().simulate_random_delay())
4118 .await;
4119
4120 // Perform a noop edit, causing the buffer's version to increase.
4121 buffer.update(cx, |buffer, cx| {
4122 buffer.edit([(0..0, " ")], None, cx);
4123 buffer.undo(cx);
4124 });
4125
4126 cx.executor().run_until_parked();
4127 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4128 buffer.read_with(cx, |buffer, _| {
4129 let buffer_text = buffer.text();
4130 if buffer_text == on_disk_text {
4131 assert!(
4132 !buffer.is_dirty() && !buffer.has_conflict(),
4133 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4134 );
4135 }
4136 // If the file change occurred while the buffer was processing the first
4137 // change, the buffer will be in a conflicting state.
4138 else {
4139 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4140 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4141 }
4142 });
4143}
4144
4145#[gpui::test]
4146async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4147 init_test(cx);
4148
4149 let fs = FakeFs::new(cx.executor());
4150 fs.insert_tree(
4151 path!("/dir"),
4152 json!({
4153 "file1": "the old contents",
4154 }),
4155 )
4156 .await;
4157
4158 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4159 let buffer = project
4160 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4161 .await
4162 .unwrap();
4163 buffer.update(cx, |buffer, cx| {
4164 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4165 });
4166
4167 project
4168 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4169 .await
4170 .unwrap();
4171
4172 let new_text = fs
4173 .load(Path::new(path!("/dir/file1")))
4174 .await
4175 .unwrap()
4176 .replace("\r\n", "\n");
4177 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4178}
4179
4180#[gpui::test]
4181async fn test_save_as(cx: &mut gpui::TestAppContext) {
4182 init_test(cx);
4183
4184 let fs = FakeFs::new(cx.executor());
4185 fs.insert_tree("/dir", json!({})).await;
4186
4187 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4188
4189 let languages = project.update(cx, |project, _| project.languages().clone());
4190 languages.add(rust_lang());
4191
4192 let buffer = project.update(cx, |project, cx| {
4193 project.create_local_buffer("", None, false, cx)
4194 });
4195 buffer.update(cx, |buffer, cx| {
4196 buffer.edit([(0..0, "abc")], None, cx);
4197 assert!(buffer.is_dirty());
4198 assert!(!buffer.has_conflict());
4199 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4200 });
4201 project
4202 .update(cx, |project, cx| {
4203 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4204 let path = ProjectPath {
4205 worktree_id,
4206 path: rel_path("file1.rs").into(),
4207 };
4208 project.save_buffer_as(buffer.clone(), path, cx)
4209 })
4210 .await
4211 .unwrap();
4212 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4213
4214 cx.executor().run_until_parked();
4215 buffer.update(cx, |buffer, cx| {
4216 assert_eq!(
4217 buffer.file().unwrap().full_path(cx),
4218 Path::new("dir/file1.rs")
4219 );
4220 assert!(!buffer.is_dirty());
4221 assert!(!buffer.has_conflict());
4222 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4223 });
4224
4225 let opened_buffer = project
4226 .update(cx, |project, cx| {
4227 project.open_local_buffer("/dir/file1.rs", cx)
4228 })
4229 .await
4230 .unwrap();
4231 assert_eq!(opened_buffer, buffer);
4232}
4233
4234#[gpui::test(retries = 5)]
4235async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4236 use worktree::WorktreeModelHandle as _;
4237
4238 init_test(cx);
4239 cx.executor().allow_parking();
4240
4241 let dir = TempTree::new(json!({
4242 "a": {
4243 "file1": "",
4244 "file2": "",
4245 "file3": "",
4246 },
4247 "b": {
4248 "c": {
4249 "file4": "",
4250 "file5": "",
4251 }
4252 }
4253 }));
4254
4255 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4256
4257 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4258 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4259 async move { buffer.await.unwrap() }
4260 };
4261 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4262 project.update(cx, |project, cx| {
4263 let tree = project.worktrees(cx).next().unwrap();
4264 tree.read(cx)
4265 .entry_for_path(rel_path(path))
4266 .unwrap_or_else(|| panic!("no entry for path {}", path))
4267 .id
4268 })
4269 };
4270
4271 let buffer2 = buffer_for_path("a/file2", cx).await;
4272 let buffer3 = buffer_for_path("a/file3", cx).await;
4273 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4274 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4275
4276 let file2_id = id_for_path("a/file2", cx);
4277 let file3_id = id_for_path("a/file3", cx);
4278 let file4_id = id_for_path("b/c/file4", cx);
4279
4280 // Create a remote copy of this worktree.
4281 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4282 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4283
4284 let updates = Arc::new(Mutex::new(Vec::new()));
4285 tree.update(cx, |tree, cx| {
4286 let updates = updates.clone();
4287 tree.observe_updates(0, cx, move |update| {
4288 updates.lock().push(update);
4289 async { true }
4290 });
4291 });
4292
4293 let remote = cx.update(|cx| {
4294 Worktree::remote(
4295 0,
4296 1,
4297 metadata,
4298 project.read(cx).client().into(),
4299 project.read(cx).path_style(cx),
4300 cx,
4301 )
4302 });
4303
4304 cx.executor().run_until_parked();
4305
4306 cx.update(|cx| {
4307 assert!(!buffer2.read(cx).is_dirty());
4308 assert!(!buffer3.read(cx).is_dirty());
4309 assert!(!buffer4.read(cx).is_dirty());
4310 assert!(!buffer5.read(cx).is_dirty());
4311 });
4312
4313 // Rename and delete files and directories.
4314 tree.flush_fs_events(cx).await;
4315 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4316 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4317 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4318 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4319 tree.flush_fs_events(cx).await;
4320
4321 cx.update(|app| {
4322 assert_eq!(
4323 tree.read(app).paths().collect::<Vec<_>>(),
4324 vec![
4325 rel_path("a"),
4326 rel_path("a/file1"),
4327 rel_path("a/file2.new"),
4328 rel_path("b"),
4329 rel_path("d"),
4330 rel_path("d/file3"),
4331 rel_path("d/file4"),
4332 ]
4333 );
4334 });
4335
4336 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4337 assert_eq!(id_for_path("d/file3", cx), file3_id);
4338 assert_eq!(id_for_path("d/file4", cx), file4_id);
4339
4340 cx.update(|cx| {
4341 assert_eq!(
4342 buffer2.read(cx).file().unwrap().path().as_ref(),
4343 rel_path("a/file2.new")
4344 );
4345 assert_eq!(
4346 buffer3.read(cx).file().unwrap().path().as_ref(),
4347 rel_path("d/file3")
4348 );
4349 assert_eq!(
4350 buffer4.read(cx).file().unwrap().path().as_ref(),
4351 rel_path("d/file4")
4352 );
4353 assert_eq!(
4354 buffer5.read(cx).file().unwrap().path().as_ref(),
4355 rel_path("b/c/file5")
4356 );
4357
4358 assert_matches!(
4359 buffer2.read(cx).file().unwrap().disk_state(),
4360 DiskState::Present { .. }
4361 );
4362 assert_matches!(
4363 buffer3.read(cx).file().unwrap().disk_state(),
4364 DiskState::Present { .. }
4365 );
4366 assert_matches!(
4367 buffer4.read(cx).file().unwrap().disk_state(),
4368 DiskState::Present { .. }
4369 );
4370 assert_eq!(
4371 buffer5.read(cx).file().unwrap().disk_state(),
4372 DiskState::Deleted
4373 );
4374 });
4375
4376 // Update the remote worktree. Check that it becomes consistent with the
4377 // local worktree.
4378 cx.executor().run_until_parked();
4379
4380 remote.update(cx, |remote, _| {
4381 for update in updates.lock().drain(..) {
4382 remote.as_remote_mut().unwrap().update_from_remote(update);
4383 }
4384 });
4385 cx.executor().run_until_parked();
4386 remote.update(cx, |remote, _| {
4387 assert_eq!(
4388 remote.paths().collect::<Vec<_>>(),
4389 vec![
4390 rel_path("a"),
4391 rel_path("a/file1"),
4392 rel_path("a/file2.new"),
4393 rel_path("b"),
4394 rel_path("d"),
4395 rel_path("d/file3"),
4396 rel_path("d/file4"),
4397 ]
4398 );
4399 });
4400}
4401
4402#[gpui::test(iterations = 10)]
4403async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4404 init_test(cx);
4405
4406 let fs = FakeFs::new(cx.executor());
4407 fs.insert_tree(
4408 path!("/dir"),
4409 json!({
4410 "a": {
4411 "file1": "",
4412 }
4413 }),
4414 )
4415 .await;
4416
4417 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4418 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4419 let tree_id = tree.update(cx, |tree, _| tree.id());
4420
4421 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4422 project.update(cx, |project, cx| {
4423 let tree = project.worktrees(cx).next().unwrap();
4424 tree.read(cx)
4425 .entry_for_path(rel_path(path))
4426 .unwrap_or_else(|| panic!("no entry for path {}", path))
4427 .id
4428 })
4429 };
4430
4431 let dir_id = id_for_path("a", cx);
4432 let file_id = id_for_path("a/file1", cx);
4433 let buffer = project
4434 .update(cx, |p, cx| {
4435 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4436 })
4437 .await
4438 .unwrap();
4439 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4440
4441 project
4442 .update(cx, |project, cx| {
4443 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4444 })
4445 .unwrap()
4446 .await
4447 .into_included()
4448 .unwrap();
4449 cx.executor().run_until_parked();
4450
4451 assert_eq!(id_for_path("b", cx), dir_id);
4452 assert_eq!(id_for_path("b/file1", cx), file_id);
4453 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4454}
4455
4456#[gpui::test]
4457async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4458 init_test(cx);
4459
4460 let fs = FakeFs::new(cx.executor());
4461 fs.insert_tree(
4462 "/dir",
4463 json!({
4464 "a.txt": "a-contents",
4465 "b.txt": "b-contents",
4466 }),
4467 )
4468 .await;
4469
4470 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4471
4472 // Spawn multiple tasks to open paths, repeating some paths.
4473 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4474 (
4475 p.open_local_buffer("/dir/a.txt", cx),
4476 p.open_local_buffer("/dir/b.txt", cx),
4477 p.open_local_buffer("/dir/a.txt", cx),
4478 )
4479 });
4480
4481 let buffer_a_1 = buffer_a_1.await.unwrap();
4482 let buffer_a_2 = buffer_a_2.await.unwrap();
4483 let buffer_b = buffer_b.await.unwrap();
4484 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4485 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4486
4487 // There is only one buffer per path.
4488 let buffer_a_id = buffer_a_1.entity_id();
4489 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4490
4491 // Open the same path again while it is still open.
4492 drop(buffer_a_1);
4493 let buffer_a_3 = project
4494 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4495 .await
4496 .unwrap();
4497
4498 // There's still only one buffer per path.
4499 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4500}
4501
4502#[gpui::test]
4503async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4504 init_test(cx);
4505
4506 let fs = FakeFs::new(cx.executor());
4507 fs.insert_tree(
4508 path!("/dir"),
4509 json!({
4510 "file1": "abc",
4511 "file2": "def",
4512 "file3": "ghi",
4513 }),
4514 )
4515 .await;
4516
4517 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4518
4519 let buffer1 = project
4520 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4521 .await
4522 .unwrap();
4523 let events = Arc::new(Mutex::new(Vec::new()));
4524
4525 // initially, the buffer isn't dirty.
4526 buffer1.update(cx, |buffer, cx| {
4527 cx.subscribe(&buffer1, {
4528 let events = events.clone();
4529 move |_, _, event, _| match event {
4530 BufferEvent::Operation { .. } => {}
4531 _ => events.lock().push(event.clone()),
4532 }
4533 })
4534 .detach();
4535
4536 assert!(!buffer.is_dirty());
4537 assert!(events.lock().is_empty());
4538
4539 buffer.edit([(1..2, "")], None, cx);
4540 });
4541
4542 // after the first edit, the buffer is dirty, and emits a dirtied event.
4543 buffer1.update(cx, |buffer, cx| {
4544 assert!(buffer.text() == "ac");
4545 assert!(buffer.is_dirty());
4546 assert_eq!(
4547 *events.lock(),
4548 &[
4549 language::BufferEvent::Edited,
4550 language::BufferEvent::DirtyChanged
4551 ]
4552 );
4553 events.lock().clear();
4554 buffer.did_save(
4555 buffer.version(),
4556 buffer.file().unwrap().disk_state().mtime(),
4557 cx,
4558 );
4559 });
4560
4561 // after saving, the buffer is not dirty, and emits a saved event.
4562 buffer1.update(cx, |buffer, cx| {
4563 assert!(!buffer.is_dirty());
4564 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4565 events.lock().clear();
4566
4567 buffer.edit([(1..1, "B")], None, cx);
4568 buffer.edit([(2..2, "D")], None, cx);
4569 });
4570
4571 // after editing again, the buffer is dirty, and emits another dirty event.
4572 buffer1.update(cx, |buffer, cx| {
4573 assert!(buffer.text() == "aBDc");
4574 assert!(buffer.is_dirty());
4575 assert_eq!(
4576 *events.lock(),
4577 &[
4578 language::BufferEvent::Edited,
4579 language::BufferEvent::DirtyChanged,
4580 language::BufferEvent::Edited,
4581 ],
4582 );
4583 events.lock().clear();
4584
4585 // After restoring the buffer to its previously-saved state,
4586 // the buffer is not considered dirty anymore.
4587 buffer.edit([(1..3, "")], None, cx);
4588 assert!(buffer.text() == "ac");
4589 assert!(!buffer.is_dirty());
4590 });
4591
4592 assert_eq!(
4593 *events.lock(),
4594 &[
4595 language::BufferEvent::Edited,
4596 language::BufferEvent::DirtyChanged
4597 ]
4598 );
4599
4600 // When a file is deleted, it is not considered dirty.
4601 let events = Arc::new(Mutex::new(Vec::new()));
4602 let buffer2 = project
4603 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4604 .await
4605 .unwrap();
4606 buffer2.update(cx, |_, cx| {
4607 cx.subscribe(&buffer2, {
4608 let events = events.clone();
4609 move |_, _, event, _| match event {
4610 BufferEvent::Operation { .. } => {}
4611 _ => events.lock().push(event.clone()),
4612 }
4613 })
4614 .detach();
4615 });
4616
4617 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4618 .await
4619 .unwrap();
4620 cx.executor().run_until_parked();
4621 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4622 assert_eq!(
4623 mem::take(&mut *events.lock()),
4624 &[language::BufferEvent::FileHandleChanged]
4625 );
4626
4627 // Buffer becomes dirty when edited.
4628 buffer2.update(cx, |buffer, cx| {
4629 buffer.edit([(2..3, "")], None, cx);
4630 assert_eq!(buffer.is_dirty(), true);
4631 });
4632 assert_eq!(
4633 mem::take(&mut *events.lock()),
4634 &[
4635 language::BufferEvent::Edited,
4636 language::BufferEvent::DirtyChanged
4637 ]
4638 );
4639
4640 // Buffer becomes clean again when all of its content is removed, because
4641 // the file was deleted.
4642 buffer2.update(cx, |buffer, cx| {
4643 buffer.edit([(0..2, "")], None, cx);
4644 assert_eq!(buffer.is_empty(), true);
4645 assert_eq!(buffer.is_dirty(), false);
4646 });
4647 assert_eq!(
4648 *events.lock(),
4649 &[
4650 language::BufferEvent::Edited,
4651 language::BufferEvent::DirtyChanged
4652 ]
4653 );
4654
4655 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4656 let events = Arc::new(Mutex::new(Vec::new()));
4657 let buffer3 = project
4658 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4659 .await
4660 .unwrap();
4661 buffer3.update(cx, |_, cx| {
4662 cx.subscribe(&buffer3, {
4663 let events = events.clone();
4664 move |_, _, event, _| match event {
4665 BufferEvent::Operation { .. } => {}
4666 _ => events.lock().push(event.clone()),
4667 }
4668 })
4669 .detach();
4670 });
4671
4672 buffer3.update(cx, |buffer, cx| {
4673 buffer.edit([(0..0, "x")], None, cx);
4674 });
4675 events.lock().clear();
4676 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4677 .await
4678 .unwrap();
4679 cx.executor().run_until_parked();
4680 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4681 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4682}
4683
4684#[gpui::test]
4685async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4686 init_test(cx);
4687
4688 let (initial_contents, initial_offsets) =
4689 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4690 let fs = FakeFs::new(cx.executor());
4691 fs.insert_tree(
4692 path!("/dir"),
4693 json!({
4694 "the-file": initial_contents,
4695 }),
4696 )
4697 .await;
4698 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4699 let buffer = project
4700 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4701 .await
4702 .unwrap();
4703
4704 let anchors = initial_offsets
4705 .iter()
4706 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4707 .collect::<Vec<_>>();
4708
4709 // Change the file on disk, adding two new lines of text, and removing
4710 // one line.
4711 buffer.update(cx, |buffer, _| {
4712 assert!(!buffer.is_dirty());
4713 assert!(!buffer.has_conflict());
4714 });
4715
4716 let (new_contents, new_offsets) =
4717 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4718 fs.save(
4719 path!("/dir/the-file").as_ref(),
4720 &new_contents.as_str().into(),
4721 LineEnding::Unix,
4722 )
4723 .await
4724 .unwrap();
4725
4726 // Because the buffer was not modified, it is reloaded from disk. Its
4727 // contents are edited according to the diff between the old and new
4728 // file contents.
4729 cx.executor().run_until_parked();
4730 buffer.update(cx, |buffer, _| {
4731 assert_eq!(buffer.text(), new_contents);
4732 assert!(!buffer.is_dirty());
4733 assert!(!buffer.has_conflict());
4734
4735 let anchor_offsets = anchors
4736 .iter()
4737 .map(|anchor| anchor.to_offset(&*buffer))
4738 .collect::<Vec<_>>();
4739 assert_eq!(anchor_offsets, new_offsets);
4740 });
4741
4742 // Modify the buffer
4743 buffer.update(cx, |buffer, cx| {
4744 buffer.edit([(0..0, " ")], None, cx);
4745 assert!(buffer.is_dirty());
4746 assert!(!buffer.has_conflict());
4747 });
4748
4749 // Change the file on disk again, adding blank lines to the beginning.
4750 fs.save(
4751 path!("/dir/the-file").as_ref(),
4752 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4753 LineEnding::Unix,
4754 )
4755 .await
4756 .unwrap();
4757
4758 // Because the buffer is modified, it doesn't reload from disk, but is
4759 // marked as having a conflict.
4760 cx.executor().run_until_parked();
4761 buffer.update(cx, |buffer, _| {
4762 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4763 assert!(buffer.has_conflict());
4764 });
4765}
4766
4767#[gpui::test]
4768async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4769 init_test(cx);
4770
4771 let fs = FakeFs::new(cx.executor());
4772 fs.insert_tree(
4773 path!("/dir"),
4774 json!({
4775 "file1": "a\nb\nc\n",
4776 "file2": "one\r\ntwo\r\nthree\r\n",
4777 }),
4778 )
4779 .await;
4780
4781 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4782 let buffer1 = project
4783 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4784 .await
4785 .unwrap();
4786 let buffer2 = project
4787 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4788 .await
4789 .unwrap();
4790
4791 buffer1.update(cx, |buffer, _| {
4792 assert_eq!(buffer.text(), "a\nb\nc\n");
4793 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4794 });
4795 buffer2.update(cx, |buffer, _| {
4796 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4797 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4798 });
4799
4800 // Change a file's line endings on disk from unix to windows. The buffer's
4801 // state updates correctly.
4802 fs.save(
4803 path!("/dir/file1").as_ref(),
4804 &"aaa\nb\nc\n".into(),
4805 LineEnding::Windows,
4806 )
4807 .await
4808 .unwrap();
4809 cx.executor().run_until_parked();
4810 buffer1.update(cx, |buffer, _| {
4811 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4812 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4813 });
4814
4815 // Save a file with windows line endings. The file is written correctly.
4816 buffer2.update(cx, |buffer, cx| {
4817 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4818 });
4819 project
4820 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4821 .await
4822 .unwrap();
4823 assert_eq!(
4824 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4825 "one\r\ntwo\r\nthree\r\nfour\r\n",
4826 );
4827}
4828
4829#[gpui::test]
4830async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4831 init_test(cx);
4832
4833 let fs = FakeFs::new(cx.executor());
4834 fs.insert_tree(
4835 path!("/dir"),
4836 json!({
4837 "a.rs": "
4838 fn foo(mut v: Vec<usize>) {
4839 for x in &v {
4840 v.push(1);
4841 }
4842 }
4843 "
4844 .unindent(),
4845 }),
4846 )
4847 .await;
4848
4849 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4850 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4851 let buffer = project
4852 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4853 .await
4854 .unwrap();
4855
4856 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4857 let message = lsp::PublishDiagnosticsParams {
4858 uri: buffer_uri.clone(),
4859 diagnostics: vec![
4860 lsp::Diagnostic {
4861 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4862 severity: Some(DiagnosticSeverity::WARNING),
4863 message: "error 1".to_string(),
4864 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4865 location: lsp::Location {
4866 uri: buffer_uri.clone(),
4867 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4868 },
4869 message: "error 1 hint 1".to_string(),
4870 }]),
4871 ..Default::default()
4872 },
4873 lsp::Diagnostic {
4874 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4875 severity: Some(DiagnosticSeverity::HINT),
4876 message: "error 1 hint 1".to_string(),
4877 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4878 location: lsp::Location {
4879 uri: buffer_uri.clone(),
4880 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4881 },
4882 message: "original diagnostic".to_string(),
4883 }]),
4884 ..Default::default()
4885 },
4886 lsp::Diagnostic {
4887 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4888 severity: Some(DiagnosticSeverity::ERROR),
4889 message: "error 2".to_string(),
4890 related_information: Some(vec![
4891 lsp::DiagnosticRelatedInformation {
4892 location: lsp::Location {
4893 uri: buffer_uri.clone(),
4894 range: lsp::Range::new(
4895 lsp::Position::new(1, 13),
4896 lsp::Position::new(1, 15),
4897 ),
4898 },
4899 message: "error 2 hint 1".to_string(),
4900 },
4901 lsp::DiagnosticRelatedInformation {
4902 location: lsp::Location {
4903 uri: buffer_uri.clone(),
4904 range: lsp::Range::new(
4905 lsp::Position::new(1, 13),
4906 lsp::Position::new(1, 15),
4907 ),
4908 },
4909 message: "error 2 hint 2".to_string(),
4910 },
4911 ]),
4912 ..Default::default()
4913 },
4914 lsp::Diagnostic {
4915 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4916 severity: Some(DiagnosticSeverity::HINT),
4917 message: "error 2 hint 1".to_string(),
4918 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4919 location: lsp::Location {
4920 uri: buffer_uri.clone(),
4921 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4922 },
4923 message: "original diagnostic".to_string(),
4924 }]),
4925 ..Default::default()
4926 },
4927 lsp::Diagnostic {
4928 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4929 severity: Some(DiagnosticSeverity::HINT),
4930 message: "error 2 hint 2".to_string(),
4931 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4932 location: lsp::Location {
4933 uri: buffer_uri,
4934 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4935 },
4936 message: "original diagnostic".to_string(),
4937 }]),
4938 ..Default::default()
4939 },
4940 ],
4941 version: None,
4942 };
4943
4944 lsp_store
4945 .update(cx, |lsp_store, cx| {
4946 lsp_store.update_diagnostics(
4947 LanguageServerId(0),
4948 message,
4949 None,
4950 DiagnosticSourceKind::Pushed,
4951 &[],
4952 cx,
4953 )
4954 })
4955 .unwrap();
4956 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4957
4958 assert_eq!(
4959 buffer
4960 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4961 .collect::<Vec<_>>(),
4962 &[
4963 DiagnosticEntry {
4964 range: Point::new(1, 8)..Point::new(1, 9),
4965 diagnostic: Diagnostic {
4966 severity: DiagnosticSeverity::WARNING,
4967 message: "error 1".to_string(),
4968 group_id: 1,
4969 is_primary: true,
4970 source_kind: DiagnosticSourceKind::Pushed,
4971 ..Diagnostic::default()
4972 }
4973 },
4974 DiagnosticEntry {
4975 range: Point::new(1, 8)..Point::new(1, 9),
4976 diagnostic: Diagnostic {
4977 severity: DiagnosticSeverity::HINT,
4978 message: "error 1 hint 1".to_string(),
4979 group_id: 1,
4980 is_primary: false,
4981 source_kind: DiagnosticSourceKind::Pushed,
4982 ..Diagnostic::default()
4983 }
4984 },
4985 DiagnosticEntry {
4986 range: Point::new(1, 13)..Point::new(1, 15),
4987 diagnostic: Diagnostic {
4988 severity: DiagnosticSeverity::HINT,
4989 message: "error 2 hint 1".to_string(),
4990 group_id: 0,
4991 is_primary: false,
4992 source_kind: DiagnosticSourceKind::Pushed,
4993 ..Diagnostic::default()
4994 }
4995 },
4996 DiagnosticEntry {
4997 range: Point::new(1, 13)..Point::new(1, 15),
4998 diagnostic: Diagnostic {
4999 severity: DiagnosticSeverity::HINT,
5000 message: "error 2 hint 2".to_string(),
5001 group_id: 0,
5002 is_primary: false,
5003 source_kind: DiagnosticSourceKind::Pushed,
5004 ..Diagnostic::default()
5005 }
5006 },
5007 DiagnosticEntry {
5008 range: Point::new(2, 8)..Point::new(2, 17),
5009 diagnostic: Diagnostic {
5010 severity: DiagnosticSeverity::ERROR,
5011 message: "error 2".to_string(),
5012 group_id: 0,
5013 is_primary: true,
5014 source_kind: DiagnosticSourceKind::Pushed,
5015 ..Diagnostic::default()
5016 }
5017 }
5018 ]
5019 );
5020
5021 assert_eq!(
5022 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5023 &[
5024 DiagnosticEntry {
5025 range: Point::new(1, 13)..Point::new(1, 15),
5026 diagnostic: Diagnostic {
5027 severity: DiagnosticSeverity::HINT,
5028 message: "error 2 hint 1".to_string(),
5029 group_id: 0,
5030 is_primary: false,
5031 source_kind: DiagnosticSourceKind::Pushed,
5032 ..Diagnostic::default()
5033 }
5034 },
5035 DiagnosticEntry {
5036 range: Point::new(1, 13)..Point::new(1, 15),
5037 diagnostic: Diagnostic {
5038 severity: DiagnosticSeverity::HINT,
5039 message: "error 2 hint 2".to_string(),
5040 group_id: 0,
5041 is_primary: false,
5042 source_kind: DiagnosticSourceKind::Pushed,
5043 ..Diagnostic::default()
5044 }
5045 },
5046 DiagnosticEntry {
5047 range: Point::new(2, 8)..Point::new(2, 17),
5048 diagnostic: Diagnostic {
5049 severity: DiagnosticSeverity::ERROR,
5050 message: "error 2".to_string(),
5051 group_id: 0,
5052 is_primary: true,
5053 source_kind: DiagnosticSourceKind::Pushed,
5054 ..Diagnostic::default()
5055 }
5056 }
5057 ]
5058 );
5059
5060 assert_eq!(
5061 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5062 &[
5063 DiagnosticEntry {
5064 range: Point::new(1, 8)..Point::new(1, 9),
5065 diagnostic: Diagnostic {
5066 severity: DiagnosticSeverity::WARNING,
5067 message: "error 1".to_string(),
5068 group_id: 1,
5069 is_primary: true,
5070 source_kind: DiagnosticSourceKind::Pushed,
5071 ..Diagnostic::default()
5072 }
5073 },
5074 DiagnosticEntry {
5075 range: Point::new(1, 8)..Point::new(1, 9),
5076 diagnostic: Diagnostic {
5077 severity: DiagnosticSeverity::HINT,
5078 message: "error 1 hint 1".to_string(),
5079 group_id: 1,
5080 is_primary: false,
5081 source_kind: DiagnosticSourceKind::Pushed,
5082 ..Diagnostic::default()
5083 }
5084 },
5085 ]
5086 );
5087}
5088
5089#[gpui::test]
5090async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5091 init_test(cx);
5092
5093 let fs = FakeFs::new(cx.executor());
5094 fs.insert_tree(
5095 path!("/dir"),
5096 json!({
5097 "one.rs": "const ONE: usize = 1;",
5098 "two": {
5099 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5100 }
5101
5102 }),
5103 )
5104 .await;
5105 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5106
5107 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5108 language_registry.add(rust_lang());
5109 let watched_paths = lsp::FileOperationRegistrationOptions {
5110 filters: vec![
5111 FileOperationFilter {
5112 scheme: Some("file".to_owned()),
5113 pattern: lsp::FileOperationPattern {
5114 glob: "**/*.rs".to_owned(),
5115 matches: Some(lsp::FileOperationPatternKind::File),
5116 options: None,
5117 },
5118 },
5119 FileOperationFilter {
5120 scheme: Some("file".to_owned()),
5121 pattern: lsp::FileOperationPattern {
5122 glob: "**/**".to_owned(),
5123 matches: Some(lsp::FileOperationPatternKind::Folder),
5124 options: None,
5125 },
5126 },
5127 ],
5128 };
5129 let mut fake_servers = language_registry.register_fake_lsp(
5130 "Rust",
5131 FakeLspAdapter {
5132 capabilities: lsp::ServerCapabilities {
5133 workspace: Some(lsp::WorkspaceServerCapabilities {
5134 workspace_folders: None,
5135 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5136 did_rename: Some(watched_paths.clone()),
5137 will_rename: Some(watched_paths),
5138 ..Default::default()
5139 }),
5140 }),
5141 ..Default::default()
5142 },
5143 ..Default::default()
5144 },
5145 );
5146
5147 let _ = project
5148 .update(cx, |project, cx| {
5149 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5150 })
5151 .await
5152 .unwrap();
5153
5154 let fake_server = fake_servers.next().await.unwrap();
5155 let response = project.update(cx, |project, cx| {
5156 let worktree = project.worktrees(cx).next().unwrap();
5157 let entry = worktree
5158 .read(cx)
5159 .entry_for_path(rel_path("one.rs"))
5160 .unwrap();
5161 project.rename_entry(
5162 entry.id,
5163 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5164 cx,
5165 )
5166 });
5167 let expected_edit = lsp::WorkspaceEdit {
5168 changes: None,
5169 document_changes: Some(DocumentChanges::Edits({
5170 vec![TextDocumentEdit {
5171 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5172 range: lsp::Range {
5173 start: lsp::Position {
5174 line: 0,
5175 character: 1,
5176 },
5177 end: lsp::Position {
5178 line: 0,
5179 character: 3,
5180 },
5181 },
5182 new_text: "This is not a drill".to_owned(),
5183 })],
5184 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5185 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5186 version: Some(1337),
5187 },
5188 }]
5189 })),
5190 change_annotations: None,
5191 };
5192 let resolved_workspace_edit = Arc::new(OnceLock::new());
5193 fake_server
5194 .set_request_handler::<WillRenameFiles, _, _>({
5195 let resolved_workspace_edit = resolved_workspace_edit.clone();
5196 let expected_edit = expected_edit.clone();
5197 move |params, _| {
5198 let resolved_workspace_edit = resolved_workspace_edit.clone();
5199 let expected_edit = expected_edit.clone();
5200 async move {
5201 assert_eq!(params.files.len(), 1);
5202 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5203 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5204 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5205 Ok(Some(expected_edit))
5206 }
5207 }
5208 })
5209 .next()
5210 .await
5211 .unwrap();
5212 let _ = response.await.unwrap();
5213 fake_server
5214 .handle_notification::<DidRenameFiles, _>(|params, _| {
5215 assert_eq!(params.files.len(), 1);
5216 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5217 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5218 })
5219 .next()
5220 .await
5221 .unwrap();
5222 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5223}
5224
5225#[gpui::test]
5226async fn test_rename(cx: &mut gpui::TestAppContext) {
5227 // hi
5228 init_test(cx);
5229
5230 let fs = FakeFs::new(cx.executor());
5231 fs.insert_tree(
5232 path!("/dir"),
5233 json!({
5234 "one.rs": "const ONE: usize = 1;",
5235 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5236 }),
5237 )
5238 .await;
5239
5240 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5241
5242 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5243 language_registry.add(rust_lang());
5244 let mut fake_servers = language_registry.register_fake_lsp(
5245 "Rust",
5246 FakeLspAdapter {
5247 capabilities: lsp::ServerCapabilities {
5248 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5249 prepare_provider: Some(true),
5250 work_done_progress_options: Default::default(),
5251 })),
5252 ..Default::default()
5253 },
5254 ..Default::default()
5255 },
5256 );
5257
5258 let (buffer, _handle) = project
5259 .update(cx, |project, cx| {
5260 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5261 })
5262 .await
5263 .unwrap();
5264
5265 let fake_server = fake_servers.next().await.unwrap();
5266
5267 let response = project.update(cx, |project, cx| {
5268 project.prepare_rename(buffer.clone(), 7, cx)
5269 });
5270 fake_server
5271 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5272 assert_eq!(
5273 params.text_document.uri.as_str(),
5274 uri!("file:///dir/one.rs")
5275 );
5276 assert_eq!(params.position, lsp::Position::new(0, 7));
5277 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5278 lsp::Position::new(0, 6),
5279 lsp::Position::new(0, 9),
5280 ))))
5281 })
5282 .next()
5283 .await
5284 .unwrap();
5285 let response = response.await.unwrap();
5286 let PrepareRenameResponse::Success(range) = response else {
5287 panic!("{:?}", response);
5288 };
5289 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5290 assert_eq!(range, 6..9);
5291
5292 let response = project.update(cx, |project, cx| {
5293 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5294 });
5295 fake_server
5296 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5297 assert_eq!(
5298 params.text_document_position.text_document.uri.as_str(),
5299 uri!("file:///dir/one.rs")
5300 );
5301 assert_eq!(
5302 params.text_document_position.position,
5303 lsp::Position::new(0, 7)
5304 );
5305 assert_eq!(params.new_name, "THREE");
5306 Ok(Some(lsp::WorkspaceEdit {
5307 changes: Some(
5308 [
5309 (
5310 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5311 vec![lsp::TextEdit::new(
5312 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5313 "THREE".to_string(),
5314 )],
5315 ),
5316 (
5317 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5318 vec![
5319 lsp::TextEdit::new(
5320 lsp::Range::new(
5321 lsp::Position::new(0, 24),
5322 lsp::Position::new(0, 27),
5323 ),
5324 "THREE".to_string(),
5325 ),
5326 lsp::TextEdit::new(
5327 lsp::Range::new(
5328 lsp::Position::new(0, 35),
5329 lsp::Position::new(0, 38),
5330 ),
5331 "THREE".to_string(),
5332 ),
5333 ],
5334 ),
5335 ]
5336 .into_iter()
5337 .collect(),
5338 ),
5339 ..Default::default()
5340 }))
5341 })
5342 .next()
5343 .await
5344 .unwrap();
5345 let mut transaction = response.await.unwrap().0;
5346 assert_eq!(transaction.len(), 2);
5347 assert_eq!(
5348 transaction
5349 .remove_entry(&buffer)
5350 .unwrap()
5351 .0
5352 .update(cx, |buffer, _| buffer.text()),
5353 "const THREE: usize = 1;"
5354 );
5355 assert_eq!(
5356 transaction
5357 .into_keys()
5358 .next()
5359 .unwrap()
5360 .update(cx, |buffer, _| buffer.text()),
5361 "const TWO: usize = one::THREE + one::THREE;"
5362 );
5363}
5364
5365#[gpui::test]
5366async fn test_search(cx: &mut gpui::TestAppContext) {
5367 init_test(cx);
5368
5369 let fs = FakeFs::new(cx.executor());
5370 fs.insert_tree(
5371 path!("/dir"),
5372 json!({
5373 "one.rs": "const ONE: usize = 1;",
5374 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5375 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5376 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5377 }),
5378 )
5379 .await;
5380 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5381 assert_eq!(
5382 search(
5383 &project,
5384 SearchQuery::text(
5385 "TWO",
5386 false,
5387 true,
5388 false,
5389 Default::default(),
5390 Default::default(),
5391 false,
5392 None
5393 )
5394 .unwrap(),
5395 cx
5396 )
5397 .await
5398 .unwrap(),
5399 HashMap::from_iter([
5400 (path!("dir/two.rs").to_string(), vec![6..9]),
5401 (path!("dir/three.rs").to_string(), vec![37..40])
5402 ])
5403 );
5404
5405 let buffer_4 = project
5406 .update(cx, |project, cx| {
5407 project.open_local_buffer(path!("/dir/four.rs"), cx)
5408 })
5409 .await
5410 .unwrap();
5411 buffer_4.update(cx, |buffer, cx| {
5412 let text = "two::TWO";
5413 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5414 });
5415
5416 assert_eq!(
5417 search(
5418 &project,
5419 SearchQuery::text(
5420 "TWO",
5421 false,
5422 true,
5423 false,
5424 Default::default(),
5425 Default::default(),
5426 false,
5427 None,
5428 )
5429 .unwrap(),
5430 cx
5431 )
5432 .await
5433 .unwrap(),
5434 HashMap::from_iter([
5435 (path!("dir/two.rs").to_string(), vec![6..9]),
5436 (path!("dir/three.rs").to_string(), vec![37..40]),
5437 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5438 ])
5439 );
5440}
5441
5442#[gpui::test]
5443async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5444 init_test(cx);
5445
5446 let search_query = "file";
5447
5448 let fs = FakeFs::new(cx.executor());
5449 fs.insert_tree(
5450 path!("/dir"),
5451 json!({
5452 "one.rs": r#"// Rust file one"#,
5453 "one.ts": r#"// TypeScript file one"#,
5454 "two.rs": r#"// Rust file two"#,
5455 "two.ts": r#"// TypeScript file two"#,
5456 }),
5457 )
5458 .await;
5459 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5460
5461 assert!(
5462 search(
5463 &project,
5464 SearchQuery::text(
5465 search_query,
5466 false,
5467 true,
5468 false,
5469 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5470 Default::default(),
5471 false,
5472 None
5473 )
5474 .unwrap(),
5475 cx
5476 )
5477 .await
5478 .unwrap()
5479 .is_empty(),
5480 "If no inclusions match, no files should be returned"
5481 );
5482
5483 assert_eq!(
5484 search(
5485 &project,
5486 SearchQuery::text(
5487 search_query,
5488 false,
5489 true,
5490 false,
5491 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5492 Default::default(),
5493 false,
5494 None
5495 )
5496 .unwrap(),
5497 cx
5498 )
5499 .await
5500 .unwrap(),
5501 HashMap::from_iter([
5502 (path!("dir/one.rs").to_string(), vec![8..12]),
5503 (path!("dir/two.rs").to_string(), vec![8..12]),
5504 ]),
5505 "Rust only search should give only Rust files"
5506 );
5507
5508 assert_eq!(
5509 search(
5510 &project,
5511 SearchQuery::text(
5512 search_query,
5513 false,
5514 true,
5515 false,
5516 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5517 .unwrap(),
5518 Default::default(),
5519 false,
5520 None,
5521 )
5522 .unwrap(),
5523 cx
5524 )
5525 .await
5526 .unwrap(),
5527 HashMap::from_iter([
5528 (path!("dir/one.ts").to_string(), vec![14..18]),
5529 (path!("dir/two.ts").to_string(), vec![14..18]),
5530 ]),
5531 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5532 );
5533
5534 assert_eq!(
5535 search(
5536 &project,
5537 SearchQuery::text(
5538 search_query,
5539 false,
5540 true,
5541 false,
5542 PathMatcher::new(
5543 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5544 PathStyle::local()
5545 )
5546 .unwrap(),
5547 Default::default(),
5548 false,
5549 None,
5550 )
5551 .unwrap(),
5552 cx
5553 )
5554 .await
5555 .unwrap(),
5556 HashMap::from_iter([
5557 (path!("dir/two.ts").to_string(), vec![14..18]),
5558 (path!("dir/one.rs").to_string(), vec![8..12]),
5559 (path!("dir/one.ts").to_string(), vec![14..18]),
5560 (path!("dir/two.rs").to_string(), vec![8..12]),
5561 ]),
5562 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5563 );
5564}
5565
5566#[gpui::test]
5567async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5568 init_test(cx);
5569
5570 let search_query = "file";
5571
5572 let fs = FakeFs::new(cx.executor());
5573 fs.insert_tree(
5574 path!("/dir"),
5575 json!({
5576 "one.rs": r#"// Rust file one"#,
5577 "one.ts": r#"// TypeScript file one"#,
5578 "two.rs": r#"// Rust file two"#,
5579 "two.ts": r#"// TypeScript file two"#,
5580 }),
5581 )
5582 .await;
5583 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5584
5585 assert_eq!(
5586 search(
5587 &project,
5588 SearchQuery::text(
5589 search_query,
5590 false,
5591 true,
5592 false,
5593 Default::default(),
5594 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5595 false,
5596 None,
5597 )
5598 .unwrap(),
5599 cx
5600 )
5601 .await
5602 .unwrap(),
5603 HashMap::from_iter([
5604 (path!("dir/one.rs").to_string(), vec![8..12]),
5605 (path!("dir/one.ts").to_string(), vec![14..18]),
5606 (path!("dir/two.rs").to_string(), vec![8..12]),
5607 (path!("dir/two.ts").to_string(), vec![14..18]),
5608 ]),
5609 "If no exclusions match, all files should be returned"
5610 );
5611
5612 assert_eq!(
5613 search(
5614 &project,
5615 SearchQuery::text(
5616 search_query,
5617 false,
5618 true,
5619 false,
5620 Default::default(),
5621 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5622 false,
5623 None,
5624 )
5625 .unwrap(),
5626 cx
5627 )
5628 .await
5629 .unwrap(),
5630 HashMap::from_iter([
5631 (path!("dir/one.ts").to_string(), vec![14..18]),
5632 (path!("dir/two.ts").to_string(), vec![14..18]),
5633 ]),
5634 "Rust exclusion search should give only TypeScript files"
5635 );
5636
5637 assert_eq!(
5638 search(
5639 &project,
5640 SearchQuery::text(
5641 search_query,
5642 false,
5643 true,
5644 false,
5645 Default::default(),
5646 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5647 .unwrap(),
5648 false,
5649 None,
5650 )
5651 .unwrap(),
5652 cx
5653 )
5654 .await
5655 .unwrap(),
5656 HashMap::from_iter([
5657 (path!("dir/one.rs").to_string(), vec![8..12]),
5658 (path!("dir/two.rs").to_string(), vec![8..12]),
5659 ]),
5660 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5661 );
5662
5663 assert!(
5664 search(
5665 &project,
5666 SearchQuery::text(
5667 search_query,
5668 false,
5669 true,
5670 false,
5671 Default::default(),
5672 PathMatcher::new(
5673 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5674 PathStyle::local(),
5675 )
5676 .unwrap(),
5677 false,
5678 None,
5679 )
5680 .unwrap(),
5681 cx
5682 )
5683 .await
5684 .unwrap()
5685 .is_empty(),
5686 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5687 );
5688}
5689
5690#[gpui::test]
5691async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5692 init_test(cx);
5693
5694 let search_query = "file";
5695
5696 let fs = FakeFs::new(cx.executor());
5697 fs.insert_tree(
5698 path!("/dir"),
5699 json!({
5700 "one.rs": r#"// Rust file one"#,
5701 "one.ts": r#"// TypeScript file one"#,
5702 "two.rs": r#"// Rust file two"#,
5703 "two.ts": r#"// TypeScript file two"#,
5704 }),
5705 )
5706 .await;
5707
5708 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5709 let path_style = PathStyle::local();
5710 let _buffer = project.update(cx, |project, cx| {
5711 project.create_local_buffer("file", None, false, cx)
5712 });
5713
5714 assert_eq!(
5715 search(
5716 &project,
5717 SearchQuery::text(
5718 search_query,
5719 false,
5720 true,
5721 false,
5722 Default::default(),
5723 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5724 false,
5725 None,
5726 )
5727 .unwrap(),
5728 cx
5729 )
5730 .await
5731 .unwrap(),
5732 HashMap::from_iter([
5733 (path!("dir/one.rs").to_string(), vec![8..12]),
5734 (path!("dir/one.ts").to_string(), vec![14..18]),
5735 (path!("dir/two.rs").to_string(), vec![8..12]),
5736 (path!("dir/two.ts").to_string(), vec![14..18]),
5737 ]),
5738 "If no exclusions match, all files should be returned"
5739 );
5740
5741 assert_eq!(
5742 search(
5743 &project,
5744 SearchQuery::text(
5745 search_query,
5746 false,
5747 true,
5748 false,
5749 Default::default(),
5750 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5751 false,
5752 None,
5753 )
5754 .unwrap(),
5755 cx
5756 )
5757 .await
5758 .unwrap(),
5759 HashMap::from_iter([
5760 (path!("dir/one.ts").to_string(), vec![14..18]),
5761 (path!("dir/two.ts").to_string(), vec![14..18]),
5762 ]),
5763 "Rust exclusion search should give only TypeScript files"
5764 );
5765
5766 assert_eq!(
5767 search(
5768 &project,
5769 SearchQuery::text(
5770 search_query,
5771 false,
5772 true,
5773 false,
5774 Default::default(),
5775 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5776 false,
5777 None,
5778 )
5779 .unwrap(),
5780 cx
5781 )
5782 .await
5783 .unwrap(),
5784 HashMap::from_iter([
5785 (path!("dir/one.rs").to_string(), vec![8..12]),
5786 (path!("dir/two.rs").to_string(), vec![8..12]),
5787 ]),
5788 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5789 );
5790
5791 assert!(
5792 search(
5793 &project,
5794 SearchQuery::text(
5795 search_query,
5796 false,
5797 true,
5798 false,
5799 Default::default(),
5800 PathMatcher::new(
5801 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5802 PathStyle::local(),
5803 )
5804 .unwrap(),
5805 false,
5806 None,
5807 )
5808 .unwrap(),
5809 cx
5810 )
5811 .await
5812 .unwrap()
5813 .is_empty(),
5814 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5815 );
5816}
5817
5818#[gpui::test]
5819async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5820 init_test(cx);
5821
5822 let search_query = "file";
5823
5824 let fs = FakeFs::new(cx.executor());
5825 fs.insert_tree(
5826 path!("/dir"),
5827 json!({
5828 "one.rs": r#"// Rust file one"#,
5829 "one.ts": r#"// TypeScript file one"#,
5830 "two.rs": r#"// Rust file two"#,
5831 "two.ts": r#"// TypeScript file two"#,
5832 }),
5833 )
5834 .await;
5835 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5836 assert!(
5837 search(
5838 &project,
5839 SearchQuery::text(
5840 search_query,
5841 false,
5842 true,
5843 false,
5844 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5845 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5846 false,
5847 None,
5848 )
5849 .unwrap(),
5850 cx
5851 )
5852 .await
5853 .unwrap()
5854 .is_empty(),
5855 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5856 );
5857
5858 assert!(
5859 search(
5860 &project,
5861 SearchQuery::text(
5862 search_query,
5863 false,
5864 true,
5865 false,
5866 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5867 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5868 false,
5869 None,
5870 )
5871 .unwrap(),
5872 cx
5873 )
5874 .await
5875 .unwrap()
5876 .is_empty(),
5877 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5878 );
5879
5880 assert!(
5881 search(
5882 &project,
5883 SearchQuery::text(
5884 search_query,
5885 false,
5886 true,
5887 false,
5888 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5889 .unwrap(),
5890 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5891 .unwrap(),
5892 false,
5893 None,
5894 )
5895 .unwrap(),
5896 cx
5897 )
5898 .await
5899 .unwrap()
5900 .is_empty(),
5901 "Non-matching inclusions and exclusions should not change that."
5902 );
5903
5904 assert_eq!(
5905 search(
5906 &project,
5907 SearchQuery::text(
5908 search_query,
5909 false,
5910 true,
5911 false,
5912 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5913 .unwrap(),
5914 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
5915 .unwrap(),
5916 false,
5917 None,
5918 )
5919 .unwrap(),
5920 cx
5921 )
5922 .await
5923 .unwrap(),
5924 HashMap::from_iter([
5925 (path!("dir/one.ts").to_string(), vec![14..18]),
5926 (path!("dir/two.ts").to_string(), vec![14..18]),
5927 ]),
5928 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5929 );
5930}
5931
5932#[gpui::test]
5933async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5934 init_test(cx);
5935
5936 let fs = FakeFs::new(cx.executor());
5937 fs.insert_tree(
5938 path!("/worktree-a"),
5939 json!({
5940 "haystack.rs": r#"// NEEDLE"#,
5941 "haystack.ts": r#"// NEEDLE"#,
5942 }),
5943 )
5944 .await;
5945 fs.insert_tree(
5946 path!("/worktree-b"),
5947 json!({
5948 "haystack.rs": r#"// NEEDLE"#,
5949 "haystack.ts": r#"// NEEDLE"#,
5950 }),
5951 )
5952 .await;
5953
5954 let path_style = PathStyle::local();
5955 let project = Project::test(
5956 fs.clone(),
5957 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5958 cx,
5959 )
5960 .await;
5961
5962 assert_eq!(
5963 search(
5964 &project,
5965 SearchQuery::text(
5966 "NEEDLE",
5967 false,
5968 true,
5969 false,
5970 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
5971 Default::default(),
5972 true,
5973 None,
5974 )
5975 .unwrap(),
5976 cx
5977 )
5978 .await
5979 .unwrap(),
5980 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5981 "should only return results from included worktree"
5982 );
5983 assert_eq!(
5984 search(
5985 &project,
5986 SearchQuery::text(
5987 "NEEDLE",
5988 false,
5989 true,
5990 false,
5991 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
5992 Default::default(),
5993 true,
5994 None,
5995 )
5996 .unwrap(),
5997 cx
5998 )
5999 .await
6000 .unwrap(),
6001 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6002 "should only return results from included worktree"
6003 );
6004
6005 assert_eq!(
6006 search(
6007 &project,
6008 SearchQuery::text(
6009 "NEEDLE",
6010 false,
6011 true,
6012 false,
6013 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6014 Default::default(),
6015 false,
6016 None,
6017 )
6018 .unwrap(),
6019 cx
6020 )
6021 .await
6022 .unwrap(),
6023 HashMap::from_iter([
6024 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6025 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6026 ]),
6027 "should return results from both worktrees"
6028 );
6029}
6030
6031#[gpui::test]
6032async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6033 init_test(cx);
6034
6035 let fs = FakeFs::new(cx.background_executor.clone());
6036 fs.insert_tree(
6037 path!("/dir"),
6038 json!({
6039 ".git": {},
6040 ".gitignore": "**/target\n/node_modules\n",
6041 "target": {
6042 "index.txt": "index_key:index_value"
6043 },
6044 "node_modules": {
6045 "eslint": {
6046 "index.ts": "const eslint_key = 'eslint value'",
6047 "package.json": r#"{ "some_key": "some value" }"#,
6048 },
6049 "prettier": {
6050 "index.ts": "const prettier_key = 'prettier value'",
6051 "package.json": r#"{ "other_key": "other value" }"#,
6052 },
6053 },
6054 "package.json": r#"{ "main_key": "main value" }"#,
6055 }),
6056 )
6057 .await;
6058 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6059
6060 let query = "key";
6061 assert_eq!(
6062 search(
6063 &project,
6064 SearchQuery::text(
6065 query,
6066 false,
6067 false,
6068 false,
6069 Default::default(),
6070 Default::default(),
6071 false,
6072 None,
6073 )
6074 .unwrap(),
6075 cx
6076 )
6077 .await
6078 .unwrap(),
6079 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6080 "Only one non-ignored file should have the query"
6081 );
6082
6083 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6084 let path_style = PathStyle::local();
6085 assert_eq!(
6086 search(
6087 &project,
6088 SearchQuery::text(
6089 query,
6090 false,
6091 false,
6092 true,
6093 Default::default(),
6094 Default::default(),
6095 false,
6096 None,
6097 )
6098 .unwrap(),
6099 cx
6100 )
6101 .await
6102 .unwrap(),
6103 HashMap::from_iter([
6104 (path!("dir/package.json").to_string(), vec![8..11]),
6105 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6106 (
6107 path!("dir/node_modules/prettier/package.json").to_string(),
6108 vec![9..12]
6109 ),
6110 (
6111 path!("dir/node_modules/prettier/index.ts").to_string(),
6112 vec![15..18]
6113 ),
6114 (
6115 path!("dir/node_modules/eslint/index.ts").to_string(),
6116 vec![13..16]
6117 ),
6118 (
6119 path!("dir/node_modules/eslint/package.json").to_string(),
6120 vec![8..11]
6121 ),
6122 ]),
6123 "Unrestricted search with ignored directories should find every file with the query"
6124 );
6125
6126 let files_to_include =
6127 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6128 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6129 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6130 assert_eq!(
6131 search(
6132 &project,
6133 SearchQuery::text(
6134 query,
6135 false,
6136 false,
6137 true,
6138 files_to_include,
6139 files_to_exclude,
6140 false,
6141 None,
6142 )
6143 .unwrap(),
6144 cx
6145 )
6146 .await
6147 .unwrap(),
6148 HashMap::from_iter([(
6149 path!("dir/node_modules/prettier/package.json").to_string(),
6150 vec![9..12]
6151 )]),
6152 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6153 );
6154}
6155
6156#[gpui::test]
6157async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6158 init_test(cx);
6159
6160 let fs = FakeFs::new(cx.executor());
6161 fs.insert_tree(
6162 path!("/dir"),
6163 json!({
6164 "one.rs": "// ПРИВЕТ? привет!",
6165 "two.rs": "// ПРИВЕТ.",
6166 "three.rs": "// привет",
6167 }),
6168 )
6169 .await;
6170 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6171 let unicode_case_sensitive_query = SearchQuery::text(
6172 "привет",
6173 false,
6174 true,
6175 false,
6176 Default::default(),
6177 Default::default(),
6178 false,
6179 None,
6180 );
6181 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6182 assert_eq!(
6183 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6184 .await
6185 .unwrap(),
6186 HashMap::from_iter([
6187 (path!("dir/one.rs").to_string(), vec![17..29]),
6188 (path!("dir/three.rs").to_string(), vec![3..15]),
6189 ])
6190 );
6191
6192 let unicode_case_insensitive_query = SearchQuery::text(
6193 "привет",
6194 false,
6195 false,
6196 false,
6197 Default::default(),
6198 Default::default(),
6199 false,
6200 None,
6201 );
6202 assert_matches!(
6203 unicode_case_insensitive_query,
6204 Ok(SearchQuery::Regex { .. })
6205 );
6206 assert_eq!(
6207 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6208 .await
6209 .unwrap(),
6210 HashMap::from_iter([
6211 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6212 (path!("dir/two.rs").to_string(), vec![3..15]),
6213 (path!("dir/three.rs").to_string(), vec![3..15]),
6214 ])
6215 );
6216
6217 assert_eq!(
6218 search(
6219 &project,
6220 SearchQuery::text(
6221 "привет.",
6222 false,
6223 false,
6224 false,
6225 Default::default(),
6226 Default::default(),
6227 false,
6228 None,
6229 )
6230 .unwrap(),
6231 cx
6232 )
6233 .await
6234 .unwrap(),
6235 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6236 );
6237}
6238
6239#[gpui::test]
6240async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6241 init_test(cx);
6242
6243 let fs = FakeFs::new(cx.executor());
6244 fs.insert_tree(
6245 "/one/two",
6246 json!({
6247 "three": {
6248 "a.txt": "",
6249 "four": {}
6250 },
6251 "c.rs": ""
6252 }),
6253 )
6254 .await;
6255
6256 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6257 project
6258 .update(cx, |project, cx| {
6259 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6260 project.create_entry((id, rel_path("b..")), true, cx)
6261 })
6262 .await
6263 .unwrap()
6264 .into_included()
6265 .unwrap();
6266
6267 assert_eq!(
6268 fs.paths(true),
6269 vec![
6270 PathBuf::from(path!("/")),
6271 PathBuf::from(path!("/one")),
6272 PathBuf::from(path!("/one/two")),
6273 PathBuf::from(path!("/one/two/c.rs")),
6274 PathBuf::from(path!("/one/two/three")),
6275 PathBuf::from(path!("/one/two/three/a.txt")),
6276 PathBuf::from(path!("/one/two/three/b..")),
6277 PathBuf::from(path!("/one/two/three/four")),
6278 ]
6279 );
6280}
6281
6282#[gpui::test]
6283async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6284 init_test(cx);
6285
6286 let fs = FakeFs::new(cx.executor());
6287 fs.insert_tree(
6288 path!("/dir"),
6289 json!({
6290 "a.tsx": "a",
6291 }),
6292 )
6293 .await;
6294
6295 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6296
6297 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6298 language_registry.add(tsx_lang());
6299 let language_server_names = [
6300 "TypeScriptServer",
6301 "TailwindServer",
6302 "ESLintServer",
6303 "NoHoverCapabilitiesServer",
6304 ];
6305 let mut language_servers = [
6306 language_registry.register_fake_lsp(
6307 "tsx",
6308 FakeLspAdapter {
6309 name: language_server_names[0],
6310 capabilities: lsp::ServerCapabilities {
6311 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6312 ..lsp::ServerCapabilities::default()
6313 },
6314 ..FakeLspAdapter::default()
6315 },
6316 ),
6317 language_registry.register_fake_lsp(
6318 "tsx",
6319 FakeLspAdapter {
6320 name: language_server_names[1],
6321 capabilities: lsp::ServerCapabilities {
6322 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6323 ..lsp::ServerCapabilities::default()
6324 },
6325 ..FakeLspAdapter::default()
6326 },
6327 ),
6328 language_registry.register_fake_lsp(
6329 "tsx",
6330 FakeLspAdapter {
6331 name: language_server_names[2],
6332 capabilities: lsp::ServerCapabilities {
6333 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6334 ..lsp::ServerCapabilities::default()
6335 },
6336 ..FakeLspAdapter::default()
6337 },
6338 ),
6339 language_registry.register_fake_lsp(
6340 "tsx",
6341 FakeLspAdapter {
6342 name: language_server_names[3],
6343 capabilities: lsp::ServerCapabilities {
6344 hover_provider: None,
6345 ..lsp::ServerCapabilities::default()
6346 },
6347 ..FakeLspAdapter::default()
6348 },
6349 ),
6350 ];
6351
6352 let (buffer, _handle) = project
6353 .update(cx, |p, cx| {
6354 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6355 })
6356 .await
6357 .unwrap();
6358 cx.executor().run_until_parked();
6359
6360 let mut servers_with_hover_requests = HashMap::default();
6361 for i in 0..language_server_names.len() {
6362 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6363 panic!(
6364 "Failed to get language server #{i} with name {}",
6365 &language_server_names[i]
6366 )
6367 });
6368 let new_server_name = new_server.server.name();
6369 assert!(
6370 !servers_with_hover_requests.contains_key(&new_server_name),
6371 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6372 );
6373 match new_server_name.as_ref() {
6374 "TailwindServer" | "TypeScriptServer" => {
6375 servers_with_hover_requests.insert(
6376 new_server_name.clone(),
6377 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6378 move |_, _| {
6379 let name = new_server_name.clone();
6380 async move {
6381 Ok(Some(lsp::Hover {
6382 contents: lsp::HoverContents::Scalar(
6383 lsp::MarkedString::String(format!("{name} hover")),
6384 ),
6385 range: None,
6386 }))
6387 }
6388 },
6389 ),
6390 );
6391 }
6392 "ESLintServer" => {
6393 servers_with_hover_requests.insert(
6394 new_server_name,
6395 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6396 |_, _| async move { Ok(None) },
6397 ),
6398 );
6399 }
6400 "NoHoverCapabilitiesServer" => {
6401 let _never_handled = new_server
6402 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6403 panic!(
6404 "Should not call for hovers server with no corresponding capabilities"
6405 )
6406 });
6407 }
6408 unexpected => panic!("Unexpected server name: {unexpected}"),
6409 }
6410 }
6411
6412 let hover_task = project.update(cx, |project, cx| {
6413 project.hover(&buffer, Point::new(0, 0), cx)
6414 });
6415 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6416 |mut hover_request| async move {
6417 hover_request
6418 .next()
6419 .await
6420 .expect("All hover requests should have been triggered")
6421 },
6422 ))
6423 .await;
6424 assert_eq!(
6425 vec!["TailwindServer hover", "TypeScriptServer hover"],
6426 hover_task
6427 .await
6428 .into_iter()
6429 .flatten()
6430 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6431 .sorted()
6432 .collect::<Vec<_>>(),
6433 "Should receive hover responses from all related servers with hover capabilities"
6434 );
6435}
6436
6437#[gpui::test]
6438async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6439 init_test(cx);
6440
6441 let fs = FakeFs::new(cx.executor());
6442 fs.insert_tree(
6443 path!("/dir"),
6444 json!({
6445 "a.ts": "a",
6446 }),
6447 )
6448 .await;
6449
6450 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6451
6452 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6453 language_registry.add(typescript_lang());
6454 let mut fake_language_servers = language_registry.register_fake_lsp(
6455 "TypeScript",
6456 FakeLspAdapter {
6457 capabilities: lsp::ServerCapabilities {
6458 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6459 ..lsp::ServerCapabilities::default()
6460 },
6461 ..FakeLspAdapter::default()
6462 },
6463 );
6464
6465 let (buffer, _handle) = project
6466 .update(cx, |p, cx| {
6467 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6468 })
6469 .await
6470 .unwrap();
6471 cx.executor().run_until_parked();
6472
6473 let fake_server = fake_language_servers
6474 .next()
6475 .await
6476 .expect("failed to get the language server");
6477
6478 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6479 move |_, _| async move {
6480 Ok(Some(lsp::Hover {
6481 contents: lsp::HoverContents::Array(vec![
6482 lsp::MarkedString::String("".to_string()),
6483 lsp::MarkedString::String(" ".to_string()),
6484 lsp::MarkedString::String("\n\n\n".to_string()),
6485 ]),
6486 range: None,
6487 }))
6488 },
6489 );
6490
6491 let hover_task = project.update(cx, |project, cx| {
6492 project.hover(&buffer, Point::new(0, 0), cx)
6493 });
6494 let () = request_handled
6495 .next()
6496 .await
6497 .expect("All hover requests should have been triggered");
6498 assert_eq!(
6499 Vec::<String>::new(),
6500 hover_task
6501 .await
6502 .into_iter()
6503 .flatten()
6504 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6505 .sorted()
6506 .collect::<Vec<_>>(),
6507 "Empty hover parts should be ignored"
6508 );
6509}
6510
6511#[gpui::test]
6512async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6513 init_test(cx);
6514
6515 let fs = FakeFs::new(cx.executor());
6516 fs.insert_tree(
6517 path!("/dir"),
6518 json!({
6519 "a.ts": "a",
6520 }),
6521 )
6522 .await;
6523
6524 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6525
6526 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6527 language_registry.add(typescript_lang());
6528 let mut fake_language_servers = language_registry.register_fake_lsp(
6529 "TypeScript",
6530 FakeLspAdapter {
6531 capabilities: lsp::ServerCapabilities {
6532 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6533 ..lsp::ServerCapabilities::default()
6534 },
6535 ..FakeLspAdapter::default()
6536 },
6537 );
6538
6539 let (buffer, _handle) = project
6540 .update(cx, |p, cx| {
6541 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6542 })
6543 .await
6544 .unwrap();
6545 cx.executor().run_until_parked();
6546
6547 let fake_server = fake_language_servers
6548 .next()
6549 .await
6550 .expect("failed to get the language server");
6551
6552 let mut request_handled = fake_server
6553 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6554 Ok(Some(vec![
6555 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6556 title: "organize imports".to_string(),
6557 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6558 ..lsp::CodeAction::default()
6559 }),
6560 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6561 title: "fix code".to_string(),
6562 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6563 ..lsp::CodeAction::default()
6564 }),
6565 ]))
6566 });
6567
6568 let code_actions_task = project.update(cx, |project, cx| {
6569 project.code_actions(
6570 &buffer,
6571 0..buffer.read(cx).len(),
6572 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6573 cx,
6574 )
6575 });
6576
6577 let () = request_handled
6578 .next()
6579 .await
6580 .expect("The code action request should have been triggered");
6581
6582 let code_actions = code_actions_task.await.unwrap().unwrap();
6583 assert_eq!(code_actions.len(), 1);
6584 assert_eq!(
6585 code_actions[0].lsp_action.action_kind(),
6586 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6587 );
6588}
6589
6590#[gpui::test]
6591async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6592 init_test(cx);
6593
6594 let fs = FakeFs::new(cx.executor());
6595 fs.insert_tree(
6596 path!("/dir"),
6597 json!({
6598 "a.tsx": "a",
6599 }),
6600 )
6601 .await;
6602
6603 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6604
6605 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6606 language_registry.add(tsx_lang());
6607 let language_server_names = [
6608 "TypeScriptServer",
6609 "TailwindServer",
6610 "ESLintServer",
6611 "NoActionsCapabilitiesServer",
6612 ];
6613
6614 let mut language_server_rxs = [
6615 language_registry.register_fake_lsp(
6616 "tsx",
6617 FakeLspAdapter {
6618 name: language_server_names[0],
6619 capabilities: lsp::ServerCapabilities {
6620 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6621 ..lsp::ServerCapabilities::default()
6622 },
6623 ..FakeLspAdapter::default()
6624 },
6625 ),
6626 language_registry.register_fake_lsp(
6627 "tsx",
6628 FakeLspAdapter {
6629 name: language_server_names[1],
6630 capabilities: lsp::ServerCapabilities {
6631 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6632 ..lsp::ServerCapabilities::default()
6633 },
6634 ..FakeLspAdapter::default()
6635 },
6636 ),
6637 language_registry.register_fake_lsp(
6638 "tsx",
6639 FakeLspAdapter {
6640 name: language_server_names[2],
6641 capabilities: lsp::ServerCapabilities {
6642 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6643 ..lsp::ServerCapabilities::default()
6644 },
6645 ..FakeLspAdapter::default()
6646 },
6647 ),
6648 language_registry.register_fake_lsp(
6649 "tsx",
6650 FakeLspAdapter {
6651 name: language_server_names[3],
6652 capabilities: lsp::ServerCapabilities {
6653 code_action_provider: None,
6654 ..lsp::ServerCapabilities::default()
6655 },
6656 ..FakeLspAdapter::default()
6657 },
6658 ),
6659 ];
6660
6661 let (buffer, _handle) = project
6662 .update(cx, |p, cx| {
6663 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6664 })
6665 .await
6666 .unwrap();
6667 cx.executor().run_until_parked();
6668
6669 let mut servers_with_actions_requests = HashMap::default();
6670 for i in 0..language_server_names.len() {
6671 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6672 panic!(
6673 "Failed to get language server #{i} with name {}",
6674 &language_server_names[i]
6675 )
6676 });
6677 let new_server_name = new_server.server.name();
6678
6679 assert!(
6680 !servers_with_actions_requests.contains_key(&new_server_name),
6681 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6682 );
6683 match new_server_name.0.as_ref() {
6684 "TailwindServer" | "TypeScriptServer" => {
6685 servers_with_actions_requests.insert(
6686 new_server_name.clone(),
6687 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6688 move |_, _| {
6689 let name = new_server_name.clone();
6690 async move {
6691 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6692 lsp::CodeAction {
6693 title: format!("{name} code action"),
6694 ..lsp::CodeAction::default()
6695 },
6696 )]))
6697 }
6698 },
6699 ),
6700 );
6701 }
6702 "ESLintServer" => {
6703 servers_with_actions_requests.insert(
6704 new_server_name,
6705 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6706 |_, _| async move { Ok(None) },
6707 ),
6708 );
6709 }
6710 "NoActionsCapabilitiesServer" => {
6711 let _never_handled = new_server
6712 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6713 panic!(
6714 "Should not call for code actions server with no corresponding capabilities"
6715 )
6716 });
6717 }
6718 unexpected => panic!("Unexpected server name: {unexpected}"),
6719 }
6720 }
6721
6722 let code_actions_task = project.update(cx, |project, cx| {
6723 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6724 });
6725
6726 // cx.run_until_parked();
6727 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6728 |mut code_actions_request| async move {
6729 code_actions_request
6730 .next()
6731 .await
6732 .expect("All code actions requests should have been triggered")
6733 },
6734 ))
6735 .await;
6736 assert_eq!(
6737 vec!["TailwindServer code action", "TypeScriptServer code action"],
6738 code_actions_task
6739 .await
6740 .unwrap()
6741 .unwrap()
6742 .into_iter()
6743 .map(|code_action| code_action.lsp_action.title().to_owned())
6744 .sorted()
6745 .collect::<Vec<_>>(),
6746 "Should receive code actions responses from all related servers with hover capabilities"
6747 );
6748}
6749
6750#[gpui::test]
6751async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6752 init_test(cx);
6753
6754 let fs = FakeFs::new(cx.executor());
6755 fs.insert_tree(
6756 "/dir",
6757 json!({
6758 "a.rs": "let a = 1;",
6759 "b.rs": "let b = 2;",
6760 "c.rs": "let c = 2;",
6761 }),
6762 )
6763 .await;
6764
6765 let project = Project::test(
6766 fs,
6767 [
6768 "/dir/a.rs".as_ref(),
6769 "/dir/b.rs".as_ref(),
6770 "/dir/c.rs".as_ref(),
6771 ],
6772 cx,
6773 )
6774 .await;
6775
6776 // check the initial state and get the worktrees
6777 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6778 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6779 assert_eq!(worktrees.len(), 3);
6780
6781 let worktree_a = worktrees[0].read(cx);
6782 let worktree_b = worktrees[1].read(cx);
6783 let worktree_c = worktrees[2].read(cx);
6784
6785 // check they start in the right order
6786 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6787 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6788 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6789
6790 (
6791 worktrees[0].clone(),
6792 worktrees[1].clone(),
6793 worktrees[2].clone(),
6794 )
6795 });
6796
6797 // move first worktree to after the second
6798 // [a, b, c] -> [b, a, c]
6799 project
6800 .update(cx, |project, cx| {
6801 let first = worktree_a.read(cx);
6802 let second = worktree_b.read(cx);
6803 project.move_worktree(first.id(), second.id(), cx)
6804 })
6805 .expect("moving first after second");
6806
6807 // check the state after moving
6808 project.update(cx, |project, cx| {
6809 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6810 assert_eq!(worktrees.len(), 3);
6811
6812 let first = worktrees[0].read(cx);
6813 let second = worktrees[1].read(cx);
6814 let third = worktrees[2].read(cx);
6815
6816 // check they are now in the right order
6817 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6818 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6819 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6820 });
6821
6822 // move the second worktree to before the first
6823 // [b, a, c] -> [a, b, c]
6824 project
6825 .update(cx, |project, cx| {
6826 let second = worktree_a.read(cx);
6827 let first = worktree_b.read(cx);
6828 project.move_worktree(first.id(), second.id(), cx)
6829 })
6830 .expect("moving second before first");
6831
6832 // check the state after moving
6833 project.update(cx, |project, cx| {
6834 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6835 assert_eq!(worktrees.len(), 3);
6836
6837 let first = worktrees[0].read(cx);
6838 let second = worktrees[1].read(cx);
6839 let third = worktrees[2].read(cx);
6840
6841 // check they are now in the right order
6842 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6843 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6844 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6845 });
6846
6847 // move the second worktree to after the third
6848 // [a, b, c] -> [a, c, b]
6849 project
6850 .update(cx, |project, cx| {
6851 let second = worktree_b.read(cx);
6852 let third = worktree_c.read(cx);
6853 project.move_worktree(second.id(), third.id(), cx)
6854 })
6855 .expect("moving second after third");
6856
6857 // check the state after moving
6858 project.update(cx, |project, cx| {
6859 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6860 assert_eq!(worktrees.len(), 3);
6861
6862 let first = worktrees[0].read(cx);
6863 let second = worktrees[1].read(cx);
6864 let third = worktrees[2].read(cx);
6865
6866 // check they are now in the right order
6867 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6868 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6869 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6870 });
6871
6872 // move the third worktree to before the second
6873 // [a, c, b] -> [a, b, c]
6874 project
6875 .update(cx, |project, cx| {
6876 let third = worktree_c.read(cx);
6877 let second = worktree_b.read(cx);
6878 project.move_worktree(third.id(), second.id(), cx)
6879 })
6880 .expect("moving third before second");
6881
6882 // check the state after moving
6883 project.update(cx, |project, cx| {
6884 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6885 assert_eq!(worktrees.len(), 3);
6886
6887 let first = worktrees[0].read(cx);
6888 let second = worktrees[1].read(cx);
6889 let third = worktrees[2].read(cx);
6890
6891 // check they are now in the right order
6892 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6893 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6894 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6895 });
6896
6897 // move the first worktree to after the third
6898 // [a, b, c] -> [b, c, a]
6899 project
6900 .update(cx, |project, cx| {
6901 let first = worktree_a.read(cx);
6902 let third = worktree_c.read(cx);
6903 project.move_worktree(first.id(), third.id(), cx)
6904 })
6905 .expect("moving first after third");
6906
6907 // check the state after moving
6908 project.update(cx, |project, cx| {
6909 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6910 assert_eq!(worktrees.len(), 3);
6911
6912 let first = worktrees[0].read(cx);
6913 let second = worktrees[1].read(cx);
6914 let third = worktrees[2].read(cx);
6915
6916 // check they are now in the right order
6917 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6918 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6919 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6920 });
6921
6922 // move the third worktree to before the first
6923 // [b, c, a] -> [a, b, c]
6924 project
6925 .update(cx, |project, cx| {
6926 let third = worktree_a.read(cx);
6927 let first = worktree_b.read(cx);
6928 project.move_worktree(third.id(), first.id(), cx)
6929 })
6930 .expect("moving third before first");
6931
6932 // check the state after moving
6933 project.update(cx, |project, cx| {
6934 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6935 assert_eq!(worktrees.len(), 3);
6936
6937 let first = worktrees[0].read(cx);
6938 let second = worktrees[1].read(cx);
6939 let third = worktrees[2].read(cx);
6940
6941 // check they are now in the right order
6942 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6943 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6944 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6945 });
6946}
6947
6948#[gpui::test]
6949async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6950 init_test(cx);
6951
6952 let staged_contents = r#"
6953 fn main() {
6954 println!("hello world");
6955 }
6956 "#
6957 .unindent();
6958 let file_contents = r#"
6959 // print goodbye
6960 fn main() {
6961 println!("goodbye world");
6962 }
6963 "#
6964 .unindent();
6965
6966 let fs = FakeFs::new(cx.background_executor.clone());
6967 fs.insert_tree(
6968 "/dir",
6969 json!({
6970 ".git": {},
6971 "src": {
6972 "main.rs": file_contents,
6973 }
6974 }),
6975 )
6976 .await;
6977
6978 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
6979
6980 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6981
6982 let buffer = project
6983 .update(cx, |project, cx| {
6984 project.open_local_buffer("/dir/src/main.rs", cx)
6985 })
6986 .await
6987 .unwrap();
6988 let unstaged_diff = project
6989 .update(cx, |project, cx| {
6990 project.open_unstaged_diff(buffer.clone(), cx)
6991 })
6992 .await
6993 .unwrap();
6994
6995 cx.run_until_parked();
6996 unstaged_diff.update(cx, |unstaged_diff, cx| {
6997 let snapshot = buffer.read(cx).snapshot();
6998 assert_hunks(
6999 unstaged_diff.hunks(&snapshot, cx),
7000 &snapshot,
7001 &unstaged_diff.base_text_string().unwrap(),
7002 &[
7003 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7004 (
7005 2..3,
7006 " println!(\"hello world\");\n",
7007 " println!(\"goodbye world\");\n",
7008 DiffHunkStatus::modified_none(),
7009 ),
7010 ],
7011 );
7012 });
7013
7014 let staged_contents = r#"
7015 // print goodbye
7016 fn main() {
7017 }
7018 "#
7019 .unindent();
7020
7021 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7022
7023 cx.run_until_parked();
7024 unstaged_diff.update(cx, |unstaged_diff, cx| {
7025 let snapshot = buffer.read(cx).snapshot();
7026 assert_hunks(
7027 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7028 &snapshot,
7029 &unstaged_diff.base_text().text(),
7030 &[(
7031 2..3,
7032 "",
7033 " println!(\"goodbye world\");\n",
7034 DiffHunkStatus::added_none(),
7035 )],
7036 );
7037 });
7038}
7039
7040#[gpui::test]
7041async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7042 init_test(cx);
7043
7044 let committed_contents = r#"
7045 fn main() {
7046 println!("hello world");
7047 }
7048 "#
7049 .unindent();
7050 let staged_contents = r#"
7051 fn main() {
7052 println!("goodbye world");
7053 }
7054 "#
7055 .unindent();
7056 let file_contents = r#"
7057 // print goodbye
7058 fn main() {
7059 println!("goodbye world");
7060 }
7061 "#
7062 .unindent();
7063
7064 let fs = FakeFs::new(cx.background_executor.clone());
7065 fs.insert_tree(
7066 "/dir",
7067 json!({
7068 ".git": {},
7069 "src": {
7070 "modification.rs": file_contents,
7071 }
7072 }),
7073 )
7074 .await;
7075
7076 fs.set_head_for_repo(
7077 Path::new("/dir/.git"),
7078 &[
7079 ("src/modification.rs", committed_contents),
7080 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7081 ],
7082 "deadbeef",
7083 );
7084 fs.set_index_for_repo(
7085 Path::new("/dir/.git"),
7086 &[
7087 ("src/modification.rs", staged_contents),
7088 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7089 ],
7090 );
7091
7092 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7093 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7094 let language = rust_lang();
7095 language_registry.add(language.clone());
7096
7097 let buffer_1 = project
7098 .update(cx, |project, cx| {
7099 project.open_local_buffer("/dir/src/modification.rs", cx)
7100 })
7101 .await
7102 .unwrap();
7103 let diff_1 = project
7104 .update(cx, |project, cx| {
7105 project.open_uncommitted_diff(buffer_1.clone(), cx)
7106 })
7107 .await
7108 .unwrap();
7109 diff_1.read_with(cx, |diff, _| {
7110 assert_eq!(diff.base_text().language().cloned(), Some(language))
7111 });
7112 cx.run_until_parked();
7113 diff_1.update(cx, |diff, cx| {
7114 let snapshot = buffer_1.read(cx).snapshot();
7115 assert_hunks(
7116 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7117 &snapshot,
7118 &diff.base_text_string().unwrap(),
7119 &[
7120 (
7121 0..1,
7122 "",
7123 "// print goodbye\n",
7124 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7125 ),
7126 (
7127 2..3,
7128 " println!(\"hello world\");\n",
7129 " println!(\"goodbye world\");\n",
7130 DiffHunkStatus::modified_none(),
7131 ),
7132 ],
7133 );
7134 });
7135
7136 // Reset HEAD to a version that differs from both the buffer and the index.
7137 let committed_contents = r#"
7138 // print goodbye
7139 fn main() {
7140 }
7141 "#
7142 .unindent();
7143 fs.set_head_for_repo(
7144 Path::new("/dir/.git"),
7145 &[
7146 ("src/modification.rs", committed_contents.clone()),
7147 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7148 ],
7149 "deadbeef",
7150 );
7151
7152 // Buffer now has an unstaged hunk.
7153 cx.run_until_parked();
7154 diff_1.update(cx, |diff, cx| {
7155 let snapshot = buffer_1.read(cx).snapshot();
7156 assert_hunks(
7157 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7158 &snapshot,
7159 &diff.base_text().text(),
7160 &[(
7161 2..3,
7162 "",
7163 " println!(\"goodbye world\");\n",
7164 DiffHunkStatus::added_none(),
7165 )],
7166 );
7167 });
7168
7169 // Open a buffer for a file that's been deleted.
7170 let buffer_2 = project
7171 .update(cx, |project, cx| {
7172 project.open_local_buffer("/dir/src/deletion.rs", cx)
7173 })
7174 .await
7175 .unwrap();
7176 let diff_2 = project
7177 .update(cx, |project, cx| {
7178 project.open_uncommitted_diff(buffer_2.clone(), cx)
7179 })
7180 .await
7181 .unwrap();
7182 cx.run_until_parked();
7183 diff_2.update(cx, |diff, cx| {
7184 let snapshot = buffer_2.read(cx).snapshot();
7185 assert_hunks(
7186 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7187 &snapshot,
7188 &diff.base_text_string().unwrap(),
7189 &[(
7190 0..0,
7191 "// the-deleted-contents\n",
7192 "",
7193 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7194 )],
7195 );
7196 });
7197
7198 // Stage the deletion of this file
7199 fs.set_index_for_repo(
7200 Path::new("/dir/.git"),
7201 &[("src/modification.rs", committed_contents.clone())],
7202 );
7203 cx.run_until_parked();
7204 diff_2.update(cx, |diff, cx| {
7205 let snapshot = buffer_2.read(cx).snapshot();
7206 assert_hunks(
7207 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7208 &snapshot,
7209 &diff.base_text_string().unwrap(),
7210 &[(
7211 0..0,
7212 "// the-deleted-contents\n",
7213 "",
7214 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7215 )],
7216 );
7217 });
7218}
7219
7220#[gpui::test]
7221async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7222 use DiffHunkSecondaryStatus::*;
7223 init_test(cx);
7224
7225 let committed_contents = r#"
7226 zero
7227 one
7228 two
7229 three
7230 four
7231 five
7232 "#
7233 .unindent();
7234 let file_contents = r#"
7235 one
7236 TWO
7237 three
7238 FOUR
7239 five
7240 "#
7241 .unindent();
7242
7243 let fs = FakeFs::new(cx.background_executor.clone());
7244 fs.insert_tree(
7245 "/dir",
7246 json!({
7247 ".git": {},
7248 "file.txt": file_contents.clone()
7249 }),
7250 )
7251 .await;
7252
7253 fs.set_head_and_index_for_repo(
7254 path!("/dir/.git").as_ref(),
7255 &[("file.txt", committed_contents.clone())],
7256 );
7257
7258 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7259
7260 let buffer = project
7261 .update(cx, |project, cx| {
7262 project.open_local_buffer("/dir/file.txt", cx)
7263 })
7264 .await
7265 .unwrap();
7266 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7267 let uncommitted_diff = project
7268 .update(cx, |project, cx| {
7269 project.open_uncommitted_diff(buffer.clone(), cx)
7270 })
7271 .await
7272 .unwrap();
7273 let mut diff_events = cx.events(&uncommitted_diff);
7274
7275 // The hunks are initially unstaged.
7276 uncommitted_diff.read_with(cx, |diff, cx| {
7277 assert_hunks(
7278 diff.hunks(&snapshot, cx),
7279 &snapshot,
7280 &diff.base_text_string().unwrap(),
7281 &[
7282 (
7283 0..0,
7284 "zero\n",
7285 "",
7286 DiffHunkStatus::deleted(HasSecondaryHunk),
7287 ),
7288 (
7289 1..2,
7290 "two\n",
7291 "TWO\n",
7292 DiffHunkStatus::modified(HasSecondaryHunk),
7293 ),
7294 (
7295 3..4,
7296 "four\n",
7297 "FOUR\n",
7298 DiffHunkStatus::modified(HasSecondaryHunk),
7299 ),
7300 ],
7301 );
7302 });
7303
7304 // Stage a hunk. It appears as optimistically staged.
7305 uncommitted_diff.update(cx, |diff, cx| {
7306 let range =
7307 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7308 let hunks = diff
7309 .hunks_intersecting_range(range, &snapshot, cx)
7310 .collect::<Vec<_>>();
7311 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7312
7313 assert_hunks(
7314 diff.hunks(&snapshot, cx),
7315 &snapshot,
7316 &diff.base_text_string().unwrap(),
7317 &[
7318 (
7319 0..0,
7320 "zero\n",
7321 "",
7322 DiffHunkStatus::deleted(HasSecondaryHunk),
7323 ),
7324 (
7325 1..2,
7326 "two\n",
7327 "TWO\n",
7328 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7329 ),
7330 (
7331 3..4,
7332 "four\n",
7333 "FOUR\n",
7334 DiffHunkStatus::modified(HasSecondaryHunk),
7335 ),
7336 ],
7337 );
7338 });
7339
7340 // The diff emits a change event for the range of the staged hunk.
7341 assert!(matches!(
7342 diff_events.next().await.unwrap(),
7343 BufferDiffEvent::HunksStagedOrUnstaged(_)
7344 ));
7345 let event = diff_events.next().await.unwrap();
7346 if let BufferDiffEvent::DiffChanged {
7347 changed_range: Some(changed_range),
7348 } = event
7349 {
7350 let changed_range = changed_range.to_point(&snapshot);
7351 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7352 } else {
7353 panic!("Unexpected event {event:?}");
7354 }
7355
7356 // When the write to the index completes, it appears as staged.
7357 cx.run_until_parked();
7358 uncommitted_diff.update(cx, |diff, cx| {
7359 assert_hunks(
7360 diff.hunks(&snapshot, cx),
7361 &snapshot,
7362 &diff.base_text_string().unwrap(),
7363 &[
7364 (
7365 0..0,
7366 "zero\n",
7367 "",
7368 DiffHunkStatus::deleted(HasSecondaryHunk),
7369 ),
7370 (
7371 1..2,
7372 "two\n",
7373 "TWO\n",
7374 DiffHunkStatus::modified(NoSecondaryHunk),
7375 ),
7376 (
7377 3..4,
7378 "four\n",
7379 "FOUR\n",
7380 DiffHunkStatus::modified(HasSecondaryHunk),
7381 ),
7382 ],
7383 );
7384 });
7385
7386 // The diff emits a change event for the changed index text.
7387 let event = diff_events.next().await.unwrap();
7388 if let BufferDiffEvent::DiffChanged {
7389 changed_range: Some(changed_range),
7390 } = event
7391 {
7392 let changed_range = changed_range.to_point(&snapshot);
7393 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7394 } else {
7395 panic!("Unexpected event {event:?}");
7396 }
7397
7398 // Simulate a problem writing to the git index.
7399 fs.set_error_message_for_index_write(
7400 "/dir/.git".as_ref(),
7401 Some("failed to write git index".into()),
7402 );
7403
7404 // Stage another hunk.
7405 uncommitted_diff.update(cx, |diff, cx| {
7406 let range =
7407 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7408 let hunks = diff
7409 .hunks_intersecting_range(range, &snapshot, cx)
7410 .collect::<Vec<_>>();
7411 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7412
7413 assert_hunks(
7414 diff.hunks(&snapshot, cx),
7415 &snapshot,
7416 &diff.base_text_string().unwrap(),
7417 &[
7418 (
7419 0..0,
7420 "zero\n",
7421 "",
7422 DiffHunkStatus::deleted(HasSecondaryHunk),
7423 ),
7424 (
7425 1..2,
7426 "two\n",
7427 "TWO\n",
7428 DiffHunkStatus::modified(NoSecondaryHunk),
7429 ),
7430 (
7431 3..4,
7432 "four\n",
7433 "FOUR\n",
7434 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7435 ),
7436 ],
7437 );
7438 });
7439 assert!(matches!(
7440 diff_events.next().await.unwrap(),
7441 BufferDiffEvent::HunksStagedOrUnstaged(_)
7442 ));
7443 let event = diff_events.next().await.unwrap();
7444 if let BufferDiffEvent::DiffChanged {
7445 changed_range: Some(changed_range),
7446 } = event
7447 {
7448 let changed_range = changed_range.to_point(&snapshot);
7449 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7450 } else {
7451 panic!("Unexpected event {event:?}");
7452 }
7453
7454 // When the write fails, the hunk returns to being unstaged.
7455 cx.run_until_parked();
7456 uncommitted_diff.update(cx, |diff, cx| {
7457 assert_hunks(
7458 diff.hunks(&snapshot, cx),
7459 &snapshot,
7460 &diff.base_text_string().unwrap(),
7461 &[
7462 (
7463 0..0,
7464 "zero\n",
7465 "",
7466 DiffHunkStatus::deleted(HasSecondaryHunk),
7467 ),
7468 (
7469 1..2,
7470 "two\n",
7471 "TWO\n",
7472 DiffHunkStatus::modified(NoSecondaryHunk),
7473 ),
7474 (
7475 3..4,
7476 "four\n",
7477 "FOUR\n",
7478 DiffHunkStatus::modified(HasSecondaryHunk),
7479 ),
7480 ],
7481 );
7482 });
7483
7484 let event = diff_events.next().await.unwrap();
7485 if let BufferDiffEvent::DiffChanged {
7486 changed_range: Some(changed_range),
7487 } = event
7488 {
7489 let changed_range = changed_range.to_point(&snapshot);
7490 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7491 } else {
7492 panic!("Unexpected event {event:?}");
7493 }
7494
7495 // Allow writing to the git index to succeed again.
7496 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7497
7498 // Stage two hunks with separate operations.
7499 uncommitted_diff.update(cx, |diff, cx| {
7500 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7501 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7502 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7503 });
7504
7505 // Both staged hunks appear as pending.
7506 uncommitted_diff.update(cx, |diff, cx| {
7507 assert_hunks(
7508 diff.hunks(&snapshot, cx),
7509 &snapshot,
7510 &diff.base_text_string().unwrap(),
7511 &[
7512 (
7513 0..0,
7514 "zero\n",
7515 "",
7516 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7517 ),
7518 (
7519 1..2,
7520 "two\n",
7521 "TWO\n",
7522 DiffHunkStatus::modified(NoSecondaryHunk),
7523 ),
7524 (
7525 3..4,
7526 "four\n",
7527 "FOUR\n",
7528 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7529 ),
7530 ],
7531 );
7532 });
7533
7534 // Both staging operations take effect.
7535 cx.run_until_parked();
7536 uncommitted_diff.update(cx, |diff, cx| {
7537 assert_hunks(
7538 diff.hunks(&snapshot, cx),
7539 &snapshot,
7540 &diff.base_text_string().unwrap(),
7541 &[
7542 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7543 (
7544 1..2,
7545 "two\n",
7546 "TWO\n",
7547 DiffHunkStatus::modified(NoSecondaryHunk),
7548 ),
7549 (
7550 3..4,
7551 "four\n",
7552 "FOUR\n",
7553 DiffHunkStatus::modified(NoSecondaryHunk),
7554 ),
7555 ],
7556 );
7557 });
7558}
7559
7560#[gpui::test(seeds(340, 472))]
7561async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7562 use DiffHunkSecondaryStatus::*;
7563 init_test(cx);
7564
7565 let committed_contents = r#"
7566 zero
7567 one
7568 two
7569 three
7570 four
7571 five
7572 "#
7573 .unindent();
7574 let file_contents = r#"
7575 one
7576 TWO
7577 three
7578 FOUR
7579 five
7580 "#
7581 .unindent();
7582
7583 let fs = FakeFs::new(cx.background_executor.clone());
7584 fs.insert_tree(
7585 "/dir",
7586 json!({
7587 ".git": {},
7588 "file.txt": file_contents.clone()
7589 }),
7590 )
7591 .await;
7592
7593 fs.set_head_for_repo(
7594 "/dir/.git".as_ref(),
7595 &[("file.txt", committed_contents.clone())],
7596 "deadbeef",
7597 );
7598 fs.set_index_for_repo(
7599 "/dir/.git".as_ref(),
7600 &[("file.txt", committed_contents.clone())],
7601 );
7602
7603 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7604
7605 let buffer = project
7606 .update(cx, |project, cx| {
7607 project.open_local_buffer("/dir/file.txt", cx)
7608 })
7609 .await
7610 .unwrap();
7611 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7612 let uncommitted_diff = project
7613 .update(cx, |project, cx| {
7614 project.open_uncommitted_diff(buffer.clone(), cx)
7615 })
7616 .await
7617 .unwrap();
7618
7619 // The hunks are initially unstaged.
7620 uncommitted_diff.read_with(cx, |diff, cx| {
7621 assert_hunks(
7622 diff.hunks(&snapshot, cx),
7623 &snapshot,
7624 &diff.base_text_string().unwrap(),
7625 &[
7626 (
7627 0..0,
7628 "zero\n",
7629 "",
7630 DiffHunkStatus::deleted(HasSecondaryHunk),
7631 ),
7632 (
7633 1..2,
7634 "two\n",
7635 "TWO\n",
7636 DiffHunkStatus::modified(HasSecondaryHunk),
7637 ),
7638 (
7639 3..4,
7640 "four\n",
7641 "FOUR\n",
7642 DiffHunkStatus::modified(HasSecondaryHunk),
7643 ),
7644 ],
7645 );
7646 });
7647
7648 // Pause IO events
7649 fs.pause_events();
7650
7651 // Stage the first hunk.
7652 uncommitted_diff.update(cx, |diff, cx| {
7653 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7654 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7655 assert_hunks(
7656 diff.hunks(&snapshot, cx),
7657 &snapshot,
7658 &diff.base_text_string().unwrap(),
7659 &[
7660 (
7661 0..0,
7662 "zero\n",
7663 "",
7664 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7665 ),
7666 (
7667 1..2,
7668 "two\n",
7669 "TWO\n",
7670 DiffHunkStatus::modified(HasSecondaryHunk),
7671 ),
7672 (
7673 3..4,
7674 "four\n",
7675 "FOUR\n",
7676 DiffHunkStatus::modified(HasSecondaryHunk),
7677 ),
7678 ],
7679 );
7680 });
7681
7682 // Stage the second hunk *before* receiving the FS event for the first hunk.
7683 cx.run_until_parked();
7684 uncommitted_diff.update(cx, |diff, cx| {
7685 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7686 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7687 assert_hunks(
7688 diff.hunks(&snapshot, cx),
7689 &snapshot,
7690 &diff.base_text_string().unwrap(),
7691 &[
7692 (
7693 0..0,
7694 "zero\n",
7695 "",
7696 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7697 ),
7698 (
7699 1..2,
7700 "two\n",
7701 "TWO\n",
7702 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7703 ),
7704 (
7705 3..4,
7706 "four\n",
7707 "FOUR\n",
7708 DiffHunkStatus::modified(HasSecondaryHunk),
7709 ),
7710 ],
7711 );
7712 });
7713
7714 // Process the FS event for staging the first hunk (second event is still pending).
7715 fs.flush_events(1);
7716 cx.run_until_parked();
7717
7718 // Stage the third hunk before receiving the second FS event.
7719 uncommitted_diff.update(cx, |diff, cx| {
7720 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7721 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7722 });
7723
7724 // Wait for all remaining IO.
7725 cx.run_until_parked();
7726 fs.flush_events(fs.buffered_event_count());
7727
7728 // Now all hunks are staged.
7729 cx.run_until_parked();
7730 uncommitted_diff.update(cx, |diff, cx| {
7731 assert_hunks(
7732 diff.hunks(&snapshot, cx),
7733 &snapshot,
7734 &diff.base_text_string().unwrap(),
7735 &[
7736 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7737 (
7738 1..2,
7739 "two\n",
7740 "TWO\n",
7741 DiffHunkStatus::modified(NoSecondaryHunk),
7742 ),
7743 (
7744 3..4,
7745 "four\n",
7746 "FOUR\n",
7747 DiffHunkStatus::modified(NoSecondaryHunk),
7748 ),
7749 ],
7750 );
7751 });
7752}
7753
7754#[gpui::test(iterations = 25)]
7755async fn test_staging_random_hunks(
7756 mut rng: StdRng,
7757 executor: BackgroundExecutor,
7758 cx: &mut gpui::TestAppContext,
7759) {
7760 let operations = env::var("OPERATIONS")
7761 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7762 .unwrap_or(20);
7763
7764 // Try to induce races between diff recalculation and index writes.
7765 if rng.random_bool(0.5) {
7766 executor.deprioritize(*CALCULATE_DIFF_TASK);
7767 }
7768
7769 use DiffHunkSecondaryStatus::*;
7770 init_test(cx);
7771
7772 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7773 let index_text = committed_text.clone();
7774 let buffer_text = (0..30)
7775 .map(|i| match i % 5 {
7776 0 => format!("line {i} (modified)\n"),
7777 _ => format!("line {i}\n"),
7778 })
7779 .collect::<String>();
7780
7781 let fs = FakeFs::new(cx.background_executor.clone());
7782 fs.insert_tree(
7783 path!("/dir"),
7784 json!({
7785 ".git": {},
7786 "file.txt": buffer_text.clone()
7787 }),
7788 )
7789 .await;
7790 fs.set_head_for_repo(
7791 path!("/dir/.git").as_ref(),
7792 &[("file.txt", committed_text.clone())],
7793 "deadbeef",
7794 );
7795 fs.set_index_for_repo(
7796 path!("/dir/.git").as_ref(),
7797 &[("file.txt", index_text.clone())],
7798 );
7799 let repo = fs.open_repo(path!("/dir/.git").as_ref()).unwrap();
7800
7801 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7802 let buffer = project
7803 .update(cx, |project, cx| {
7804 project.open_local_buffer(path!("/dir/file.txt"), cx)
7805 })
7806 .await
7807 .unwrap();
7808 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7809 let uncommitted_diff = project
7810 .update(cx, |project, cx| {
7811 project.open_uncommitted_diff(buffer.clone(), cx)
7812 })
7813 .await
7814 .unwrap();
7815
7816 let mut hunks =
7817 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7818 assert_eq!(hunks.len(), 6);
7819
7820 for _i in 0..operations {
7821 let hunk_ix = rng.random_range(0..hunks.len());
7822 let hunk = &mut hunks[hunk_ix];
7823 let row = hunk.range.start.row;
7824
7825 if hunk.status().has_secondary_hunk() {
7826 log::info!("staging hunk at {row}");
7827 uncommitted_diff.update(cx, |diff, cx| {
7828 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7829 });
7830 hunk.secondary_status = SecondaryHunkRemovalPending;
7831 } else {
7832 log::info!("unstaging hunk at {row}");
7833 uncommitted_diff.update(cx, |diff, cx| {
7834 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7835 });
7836 hunk.secondary_status = SecondaryHunkAdditionPending;
7837 }
7838
7839 for _ in 0..rng.random_range(0..10) {
7840 log::info!("yielding");
7841 cx.executor().simulate_random_delay().await;
7842 }
7843 }
7844
7845 cx.executor().run_until_parked();
7846
7847 for hunk in &mut hunks {
7848 if hunk.secondary_status == SecondaryHunkRemovalPending {
7849 hunk.secondary_status = NoSecondaryHunk;
7850 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7851 hunk.secondary_status = HasSecondaryHunk;
7852 }
7853 }
7854
7855 log::info!(
7856 "index text:\n{}",
7857 repo.load_index_text(rel_path("file.txt").into())
7858 .await
7859 .unwrap()
7860 );
7861
7862 uncommitted_diff.update(cx, |diff, cx| {
7863 let expected_hunks = hunks
7864 .iter()
7865 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7866 .collect::<Vec<_>>();
7867 let actual_hunks = diff
7868 .hunks(&snapshot, cx)
7869 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7870 .collect::<Vec<_>>();
7871 assert_eq!(actual_hunks, expected_hunks);
7872 });
7873}
7874
7875#[gpui::test]
7876async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7877 init_test(cx);
7878
7879 let committed_contents = r#"
7880 fn main() {
7881 println!("hello from HEAD");
7882 }
7883 "#
7884 .unindent();
7885 let file_contents = r#"
7886 fn main() {
7887 println!("hello from the working copy");
7888 }
7889 "#
7890 .unindent();
7891
7892 let fs = FakeFs::new(cx.background_executor.clone());
7893 fs.insert_tree(
7894 "/dir",
7895 json!({
7896 ".git": {},
7897 "src": {
7898 "main.rs": file_contents,
7899 }
7900 }),
7901 )
7902 .await;
7903
7904 fs.set_head_for_repo(
7905 Path::new("/dir/.git"),
7906 &[("src/main.rs", committed_contents.clone())],
7907 "deadbeef",
7908 );
7909 fs.set_index_for_repo(
7910 Path::new("/dir/.git"),
7911 &[("src/main.rs", committed_contents.clone())],
7912 );
7913
7914 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7915
7916 let buffer = project
7917 .update(cx, |project, cx| {
7918 project.open_local_buffer("/dir/src/main.rs", cx)
7919 })
7920 .await
7921 .unwrap();
7922 let uncommitted_diff = project
7923 .update(cx, |project, cx| {
7924 project.open_uncommitted_diff(buffer.clone(), cx)
7925 })
7926 .await
7927 .unwrap();
7928
7929 cx.run_until_parked();
7930 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7931 let snapshot = buffer.read(cx).snapshot();
7932 assert_hunks(
7933 uncommitted_diff.hunks(&snapshot, cx),
7934 &snapshot,
7935 &uncommitted_diff.base_text_string().unwrap(),
7936 &[(
7937 1..2,
7938 " println!(\"hello from HEAD\");\n",
7939 " println!(\"hello from the working copy\");\n",
7940 DiffHunkStatus {
7941 kind: DiffHunkStatusKind::Modified,
7942 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7943 },
7944 )],
7945 );
7946 });
7947}
7948
7949#[gpui::test]
7950async fn test_repository_and_path_for_project_path(
7951 background_executor: BackgroundExecutor,
7952 cx: &mut gpui::TestAppContext,
7953) {
7954 init_test(cx);
7955 let fs = FakeFs::new(background_executor);
7956 fs.insert_tree(
7957 path!("/root"),
7958 json!({
7959 "c.txt": "",
7960 "dir1": {
7961 ".git": {},
7962 "deps": {
7963 "dep1": {
7964 ".git": {},
7965 "src": {
7966 "a.txt": ""
7967 }
7968 }
7969 },
7970 "src": {
7971 "b.txt": ""
7972 }
7973 },
7974 }),
7975 )
7976 .await;
7977
7978 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7979 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7980 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7981 project
7982 .update(cx, |project, cx| project.git_scans_complete(cx))
7983 .await;
7984 cx.run_until_parked();
7985
7986 project.read_with(cx, |project, cx| {
7987 let git_store = project.git_store().read(cx);
7988 let pairs = [
7989 ("c.txt", None),
7990 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
7991 (
7992 "dir1/deps/dep1/src/a.txt",
7993 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
7994 ),
7995 ];
7996 let expected = pairs
7997 .iter()
7998 .map(|(path, result)| {
7999 (
8000 path,
8001 result.map(|(repo, repo_path)| {
8002 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8003 }),
8004 )
8005 })
8006 .collect::<Vec<_>>();
8007 let actual = pairs
8008 .iter()
8009 .map(|(path, _)| {
8010 let project_path = (tree_id, rel_path(path)).into();
8011 let result = maybe!({
8012 let (repo, repo_path) =
8013 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8014 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8015 });
8016 (path, result)
8017 })
8018 .collect::<Vec<_>>();
8019 pretty_assertions::assert_eq!(expected, actual);
8020 });
8021
8022 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8023 .await
8024 .unwrap();
8025 cx.run_until_parked();
8026
8027 project.read_with(cx, |project, cx| {
8028 let git_store = project.git_store().read(cx);
8029 assert_eq!(
8030 git_store.repository_and_path_for_project_path(
8031 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8032 cx
8033 ),
8034 None
8035 );
8036 });
8037}
8038
8039#[gpui::test]
8040async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8041 init_test(cx);
8042 let fs = FakeFs::new(cx.background_executor.clone());
8043 let home = paths::home_dir();
8044 fs.insert_tree(
8045 home,
8046 json!({
8047 ".git": {},
8048 "project": {
8049 "a.txt": "A"
8050 },
8051 }),
8052 )
8053 .await;
8054
8055 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8056 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8057 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8058
8059 project
8060 .update(cx, |project, cx| project.git_scans_complete(cx))
8061 .await;
8062 tree.flush_fs_events(cx).await;
8063
8064 project.read_with(cx, |project, cx| {
8065 let containing = project
8066 .git_store()
8067 .read(cx)
8068 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8069 assert!(containing.is_none());
8070 });
8071
8072 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8073 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8074 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8075 project
8076 .update(cx, |project, cx| project.git_scans_complete(cx))
8077 .await;
8078 tree.flush_fs_events(cx).await;
8079
8080 project.read_with(cx, |project, cx| {
8081 let containing = project
8082 .git_store()
8083 .read(cx)
8084 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8085 assert_eq!(
8086 containing
8087 .unwrap()
8088 .0
8089 .read(cx)
8090 .work_directory_abs_path
8091 .as_ref(),
8092 home,
8093 );
8094 });
8095}
8096
8097#[gpui::test]
8098async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8099 init_test(cx);
8100 cx.executor().allow_parking();
8101
8102 let root = TempTree::new(json!({
8103 "project": {
8104 "a.txt": "a", // Modified
8105 "b.txt": "bb", // Added
8106 "c.txt": "ccc", // Unchanged
8107 "d.txt": "dddd", // Deleted
8108 },
8109 }));
8110
8111 // Set up git repository before creating the project.
8112 let work_dir = root.path().join("project");
8113 let repo = git_init(work_dir.as_path());
8114 git_add("a.txt", &repo);
8115 git_add("c.txt", &repo);
8116 git_add("d.txt", &repo);
8117 git_commit("Initial commit", &repo);
8118 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8119 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8120
8121 let project = Project::test(
8122 Arc::new(RealFs::new(None, cx.executor())),
8123 [root.path()],
8124 cx,
8125 )
8126 .await;
8127
8128 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8129 tree.flush_fs_events(cx).await;
8130 project
8131 .update(cx, |project, cx| project.git_scans_complete(cx))
8132 .await;
8133 cx.executor().run_until_parked();
8134
8135 let repository = project.read_with(cx, |project, cx| {
8136 project.repositories(cx).values().next().unwrap().clone()
8137 });
8138
8139 // Check that the right git state is observed on startup
8140 repository.read_with(cx, |repository, _| {
8141 let entries = repository.cached_status().collect::<Vec<_>>();
8142 assert_eq!(
8143 entries,
8144 [
8145 StatusEntry {
8146 repo_path: repo_path("a.txt"),
8147 status: StatusCode::Modified.worktree(),
8148 },
8149 StatusEntry {
8150 repo_path: repo_path("b.txt"),
8151 status: FileStatus::Untracked,
8152 },
8153 StatusEntry {
8154 repo_path: repo_path("d.txt"),
8155 status: StatusCode::Deleted.worktree(),
8156 },
8157 ]
8158 );
8159 });
8160
8161 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8162
8163 tree.flush_fs_events(cx).await;
8164 project
8165 .update(cx, |project, cx| project.git_scans_complete(cx))
8166 .await;
8167 cx.executor().run_until_parked();
8168
8169 repository.read_with(cx, |repository, _| {
8170 let entries = repository.cached_status().collect::<Vec<_>>();
8171 assert_eq!(
8172 entries,
8173 [
8174 StatusEntry {
8175 repo_path: repo_path("a.txt"),
8176 status: StatusCode::Modified.worktree(),
8177 },
8178 StatusEntry {
8179 repo_path: repo_path("b.txt"),
8180 status: FileStatus::Untracked,
8181 },
8182 StatusEntry {
8183 repo_path: repo_path("c.txt"),
8184 status: StatusCode::Modified.worktree(),
8185 },
8186 StatusEntry {
8187 repo_path: repo_path("d.txt"),
8188 status: StatusCode::Deleted.worktree(),
8189 },
8190 ]
8191 );
8192 });
8193
8194 git_add("a.txt", &repo);
8195 git_add("c.txt", &repo);
8196 git_remove_index(Path::new("d.txt"), &repo);
8197 git_commit("Another commit", &repo);
8198 tree.flush_fs_events(cx).await;
8199 project
8200 .update(cx, |project, cx| project.git_scans_complete(cx))
8201 .await;
8202 cx.executor().run_until_parked();
8203
8204 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8205 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8206 tree.flush_fs_events(cx).await;
8207 project
8208 .update(cx, |project, cx| project.git_scans_complete(cx))
8209 .await;
8210 cx.executor().run_until_parked();
8211
8212 repository.read_with(cx, |repository, _cx| {
8213 let entries = repository.cached_status().collect::<Vec<_>>();
8214
8215 // Deleting an untracked entry, b.txt, should leave no status
8216 // a.txt was tracked, and so should have a status
8217 assert_eq!(
8218 entries,
8219 [StatusEntry {
8220 repo_path: repo_path("a.txt"),
8221 status: StatusCode::Deleted.worktree(),
8222 }]
8223 );
8224 });
8225}
8226
8227#[gpui::test]
8228async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8229 init_test(cx);
8230 cx.executor().allow_parking();
8231
8232 let root = TempTree::new(json!({
8233 "project": {
8234 "sub": {},
8235 "a.txt": "",
8236 },
8237 }));
8238
8239 let work_dir = root.path().join("project");
8240 let repo = git_init(work_dir.as_path());
8241 // a.txt exists in HEAD and the working copy but is deleted in the index.
8242 git_add("a.txt", &repo);
8243 git_commit("Initial commit", &repo);
8244 git_remove_index("a.txt".as_ref(), &repo);
8245 // `sub` is a nested git repository.
8246 let _sub = git_init(&work_dir.join("sub"));
8247
8248 let project = Project::test(
8249 Arc::new(RealFs::new(None, cx.executor())),
8250 [root.path()],
8251 cx,
8252 )
8253 .await;
8254
8255 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8256 tree.flush_fs_events(cx).await;
8257 project
8258 .update(cx, |project, cx| project.git_scans_complete(cx))
8259 .await;
8260 cx.executor().run_until_parked();
8261
8262 let repository = project.read_with(cx, |project, cx| {
8263 project
8264 .repositories(cx)
8265 .values()
8266 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8267 .unwrap()
8268 .clone()
8269 });
8270
8271 repository.read_with(cx, |repository, _cx| {
8272 let entries = repository.cached_status().collect::<Vec<_>>();
8273
8274 // `sub` doesn't appear in our computed statuses.
8275 // a.txt appears with a combined `DA` status.
8276 assert_eq!(
8277 entries,
8278 [StatusEntry {
8279 repo_path: repo_path("a.txt"),
8280 status: TrackedStatus {
8281 index_status: StatusCode::Deleted,
8282 worktree_status: StatusCode::Added
8283 }
8284 .into(),
8285 }]
8286 )
8287 });
8288}
8289
8290#[gpui::test]
8291async fn test_repository_subfolder_git_status(
8292 executor: gpui::BackgroundExecutor,
8293 cx: &mut gpui::TestAppContext,
8294) {
8295 init_test(cx);
8296
8297 let fs = FakeFs::new(executor);
8298 fs.insert_tree(
8299 path!("/root"),
8300 json!({
8301 "my-repo": {
8302 ".git": {},
8303 "a.txt": "a",
8304 "sub-folder-1": {
8305 "sub-folder-2": {
8306 "c.txt": "cc",
8307 "d": {
8308 "e.txt": "eee"
8309 }
8310 },
8311 }
8312 },
8313 }),
8314 )
8315 .await;
8316
8317 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8318 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8319
8320 fs.set_status_for_repo(
8321 path!("/root/my-repo/.git").as_ref(),
8322 &[(E_TXT, FileStatus::Untracked)],
8323 );
8324
8325 let project = Project::test(
8326 fs.clone(),
8327 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8328 cx,
8329 )
8330 .await;
8331
8332 project
8333 .update(cx, |project, cx| project.git_scans_complete(cx))
8334 .await;
8335 cx.run_until_parked();
8336
8337 let repository = project.read_with(cx, |project, cx| {
8338 project.repositories(cx).values().next().unwrap().clone()
8339 });
8340
8341 // Ensure that the git status is loaded correctly
8342 repository.read_with(cx, |repository, _cx| {
8343 assert_eq!(
8344 repository.work_directory_abs_path,
8345 Path::new(path!("/root/my-repo")).into()
8346 );
8347
8348 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8349 assert_eq!(
8350 repository
8351 .status_for_path(&repo_path(E_TXT))
8352 .unwrap()
8353 .status,
8354 FileStatus::Untracked
8355 );
8356 });
8357
8358 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8359 project
8360 .update(cx, |project, cx| project.git_scans_complete(cx))
8361 .await;
8362 cx.run_until_parked();
8363
8364 repository.read_with(cx, |repository, _cx| {
8365 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8366 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8367 });
8368}
8369
8370// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8371#[cfg(any())]
8372#[gpui::test]
8373async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8374 init_test(cx);
8375 cx.executor().allow_parking();
8376
8377 let root = TempTree::new(json!({
8378 "project": {
8379 "a.txt": "a",
8380 },
8381 }));
8382 let root_path = root.path();
8383
8384 let repo = git_init(&root_path.join("project"));
8385 git_add("a.txt", &repo);
8386 git_commit("init", &repo);
8387
8388 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8389
8390 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8391 tree.flush_fs_events(cx).await;
8392 project
8393 .update(cx, |project, cx| project.git_scans_complete(cx))
8394 .await;
8395 cx.executor().run_until_parked();
8396
8397 let repository = project.read_with(cx, |project, cx| {
8398 project.repositories(cx).values().next().unwrap().clone()
8399 });
8400
8401 git_branch("other-branch", &repo);
8402 git_checkout("refs/heads/other-branch", &repo);
8403 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8404 git_add("a.txt", &repo);
8405 git_commit("capitalize", &repo);
8406 let commit = repo
8407 .head()
8408 .expect("Failed to get HEAD")
8409 .peel_to_commit()
8410 .expect("HEAD is not a commit");
8411 git_checkout("refs/heads/main", &repo);
8412 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8413 git_add("a.txt", &repo);
8414 git_commit("improve letter", &repo);
8415 git_cherry_pick(&commit, &repo);
8416 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8417 .expect("No CHERRY_PICK_HEAD");
8418 pretty_assertions::assert_eq!(
8419 git_status(&repo),
8420 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8421 );
8422 tree.flush_fs_events(cx).await;
8423 project
8424 .update(cx, |project, cx| project.git_scans_complete(cx))
8425 .await;
8426 cx.executor().run_until_parked();
8427 let conflicts = repository.update(cx, |repository, _| {
8428 repository
8429 .merge_conflicts
8430 .iter()
8431 .cloned()
8432 .collect::<Vec<_>>()
8433 });
8434 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8435
8436 git_add("a.txt", &repo);
8437 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8438 git_commit("whatevs", &repo);
8439 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8440 .expect("Failed to remove CHERRY_PICK_HEAD");
8441 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8442 tree.flush_fs_events(cx).await;
8443 let conflicts = repository.update(cx, |repository, _| {
8444 repository
8445 .merge_conflicts
8446 .iter()
8447 .cloned()
8448 .collect::<Vec<_>>()
8449 });
8450 pretty_assertions::assert_eq!(conflicts, []);
8451}
8452
8453#[gpui::test]
8454async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8455 init_test(cx);
8456 let fs = FakeFs::new(cx.background_executor.clone());
8457 fs.insert_tree(
8458 path!("/root"),
8459 json!({
8460 ".git": {},
8461 ".gitignore": "*.txt\n",
8462 "a.xml": "<a></a>",
8463 "b.txt": "Some text"
8464 }),
8465 )
8466 .await;
8467
8468 fs.set_head_and_index_for_repo(
8469 path!("/root/.git").as_ref(),
8470 &[
8471 (".gitignore", "*.txt\n".into()),
8472 ("a.xml", "<a></a>".into()),
8473 ],
8474 );
8475
8476 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8477
8478 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8479 tree.flush_fs_events(cx).await;
8480 project
8481 .update(cx, |project, cx| project.git_scans_complete(cx))
8482 .await;
8483 cx.executor().run_until_parked();
8484
8485 let repository = project.read_with(cx, |project, cx| {
8486 project.repositories(cx).values().next().unwrap().clone()
8487 });
8488
8489 // One file is unmodified, the other is ignored.
8490 cx.read(|cx| {
8491 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8492 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8493 });
8494
8495 // Change the gitignore, and stage the newly non-ignored file.
8496 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8497 .await
8498 .unwrap();
8499 fs.set_index_for_repo(
8500 Path::new(path!("/root/.git")),
8501 &[
8502 (".gitignore", "*.txt\n".into()),
8503 ("a.xml", "<a></a>".into()),
8504 ("b.txt", "Some text".into()),
8505 ],
8506 );
8507
8508 cx.executor().run_until_parked();
8509 cx.read(|cx| {
8510 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8511 assert_entry_git_state(
8512 tree.read(cx),
8513 repository.read(cx),
8514 "b.txt",
8515 Some(StatusCode::Added),
8516 false,
8517 );
8518 });
8519}
8520
8521// NOTE:
8522// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8523// a directory which some program has already open.
8524// This is a limitation of the Windows.
8525// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8526#[gpui::test]
8527#[cfg_attr(target_os = "windows", ignore)]
8528async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8529 init_test(cx);
8530 cx.executor().allow_parking();
8531 let root = TempTree::new(json!({
8532 "projects": {
8533 "project1": {
8534 "a": "",
8535 "b": "",
8536 }
8537 },
8538
8539 }));
8540 let root_path = root.path();
8541
8542 let repo = git_init(&root_path.join("projects/project1"));
8543 git_add("a", &repo);
8544 git_commit("init", &repo);
8545 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8546
8547 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8548
8549 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8550 tree.flush_fs_events(cx).await;
8551 project
8552 .update(cx, |project, cx| project.git_scans_complete(cx))
8553 .await;
8554 cx.executor().run_until_parked();
8555
8556 let repository = project.read_with(cx, |project, cx| {
8557 project.repositories(cx).values().next().unwrap().clone()
8558 });
8559
8560 repository.read_with(cx, |repository, _| {
8561 assert_eq!(
8562 repository.work_directory_abs_path.as_ref(),
8563 root_path.join("projects/project1").as_path()
8564 );
8565 assert_eq!(
8566 repository
8567 .status_for_path(&repo_path("a"))
8568 .map(|entry| entry.status),
8569 Some(StatusCode::Modified.worktree()),
8570 );
8571 assert_eq!(
8572 repository
8573 .status_for_path(&repo_path("b"))
8574 .map(|entry| entry.status),
8575 Some(FileStatus::Untracked),
8576 );
8577 });
8578
8579 std::fs::rename(
8580 root_path.join("projects/project1"),
8581 root_path.join("projects/project2"),
8582 )
8583 .unwrap();
8584 tree.flush_fs_events(cx).await;
8585
8586 repository.read_with(cx, |repository, _| {
8587 assert_eq!(
8588 repository.work_directory_abs_path.as_ref(),
8589 root_path.join("projects/project2").as_path()
8590 );
8591 assert_eq!(
8592 repository.status_for_path(&repo_path("a")).unwrap().status,
8593 StatusCode::Modified.worktree(),
8594 );
8595 assert_eq!(
8596 repository.status_for_path(&repo_path("b")).unwrap().status,
8597 FileStatus::Untracked,
8598 );
8599 });
8600}
8601
8602// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8603// you can't rename a directory which some program has already open. This is a
8604// limitation of the Windows. See:
8605// https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8606#[gpui::test]
8607#[cfg_attr(target_os = "windows", ignore)]
8608async fn test_file_status(cx: &mut gpui::TestAppContext) {
8609 init_test(cx);
8610 cx.executor().allow_parking();
8611 const IGNORE_RULE: &str = "**/target";
8612
8613 let root = TempTree::new(json!({
8614 "project": {
8615 "a.txt": "a",
8616 "b.txt": "bb",
8617 "c": {
8618 "d": {
8619 "e.txt": "eee"
8620 }
8621 },
8622 "f.txt": "ffff",
8623 "target": {
8624 "build_file": "???"
8625 },
8626 ".gitignore": IGNORE_RULE
8627 },
8628
8629 }));
8630 let root_path = root.path();
8631
8632 const A_TXT: &str = "a.txt";
8633 const B_TXT: &str = "b.txt";
8634 const E_TXT: &str = "c/d/e.txt";
8635 const F_TXT: &str = "f.txt";
8636 const DOTGITIGNORE: &str = ".gitignore";
8637 const BUILD_FILE: &str = "target/build_file";
8638
8639 // Set up git repository before creating the worktree.
8640 let work_dir = root.path().join("project");
8641 let mut repo = git_init(work_dir.as_path());
8642 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8643 git_add(A_TXT, &repo);
8644 git_add(E_TXT, &repo);
8645 git_add(DOTGITIGNORE, &repo);
8646 git_commit("Initial commit", &repo);
8647
8648 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8649
8650 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8651 tree.flush_fs_events(cx).await;
8652 project
8653 .update(cx, |project, cx| project.git_scans_complete(cx))
8654 .await;
8655 cx.executor().run_until_parked();
8656
8657 let repository = project.read_with(cx, |project, cx| {
8658 project.repositories(cx).values().next().unwrap().clone()
8659 });
8660
8661 // Check that the right git state is observed on startup
8662 repository.read_with(cx, |repository, _cx| {
8663 assert_eq!(
8664 repository.work_directory_abs_path.as_ref(),
8665 root_path.join("project").as_path()
8666 );
8667
8668 assert_eq!(
8669 repository
8670 .status_for_path(&repo_path(B_TXT))
8671 .unwrap()
8672 .status,
8673 FileStatus::Untracked,
8674 );
8675 assert_eq!(
8676 repository
8677 .status_for_path(&repo_path(F_TXT))
8678 .unwrap()
8679 .status,
8680 FileStatus::Untracked,
8681 );
8682 });
8683
8684 // Modify a file in the working copy.
8685 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8686 tree.flush_fs_events(cx).await;
8687 project
8688 .update(cx, |project, cx| project.git_scans_complete(cx))
8689 .await;
8690 cx.executor().run_until_parked();
8691
8692 // The worktree detects that the file's git status has changed.
8693 repository.read_with(cx, |repository, _| {
8694 assert_eq!(
8695 repository
8696 .status_for_path(&repo_path(A_TXT))
8697 .unwrap()
8698 .status,
8699 StatusCode::Modified.worktree(),
8700 );
8701 });
8702
8703 // Create a commit in the git repository.
8704 git_add(A_TXT, &repo);
8705 git_add(B_TXT, &repo);
8706 git_commit("Committing modified and added", &repo);
8707 tree.flush_fs_events(cx).await;
8708 project
8709 .update(cx, |project, cx| project.git_scans_complete(cx))
8710 .await;
8711 cx.executor().run_until_parked();
8712
8713 // The worktree detects that the files' git status have changed.
8714 repository.read_with(cx, |repository, _cx| {
8715 assert_eq!(
8716 repository
8717 .status_for_path(&repo_path(F_TXT))
8718 .unwrap()
8719 .status,
8720 FileStatus::Untracked,
8721 );
8722 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8723 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8724 });
8725
8726 // Modify files in the working copy and perform git operations on other files.
8727 git_reset(0, &repo);
8728 git_remove_index(Path::new(B_TXT), &repo);
8729 git_stash(&mut repo);
8730 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8731 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8732 tree.flush_fs_events(cx).await;
8733 project
8734 .update(cx, |project, cx| project.git_scans_complete(cx))
8735 .await;
8736 cx.executor().run_until_parked();
8737
8738 // Check that more complex repo changes are tracked
8739 repository.read_with(cx, |repository, _cx| {
8740 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8741 assert_eq!(
8742 repository
8743 .status_for_path(&repo_path(B_TXT))
8744 .unwrap()
8745 .status,
8746 FileStatus::Untracked,
8747 );
8748 assert_eq!(
8749 repository
8750 .status_for_path(&repo_path(E_TXT))
8751 .unwrap()
8752 .status,
8753 StatusCode::Modified.worktree(),
8754 );
8755 });
8756
8757 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8758 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8759 std::fs::write(
8760 work_dir.join(DOTGITIGNORE),
8761 [IGNORE_RULE, "f.txt"].join("\n"),
8762 )
8763 .unwrap();
8764
8765 git_add(Path::new(DOTGITIGNORE), &repo);
8766 git_commit("Committing modified git ignore", &repo);
8767
8768 tree.flush_fs_events(cx).await;
8769 cx.executor().run_until_parked();
8770
8771 let mut renamed_dir_name = "first_directory/second_directory";
8772 const RENAMED_FILE: &str = "rf.txt";
8773
8774 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8775 std::fs::write(
8776 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8777 "new-contents",
8778 )
8779 .unwrap();
8780
8781 tree.flush_fs_events(cx).await;
8782 project
8783 .update(cx, |project, cx| project.git_scans_complete(cx))
8784 .await;
8785 cx.executor().run_until_parked();
8786
8787 repository.read_with(cx, |repository, _cx| {
8788 assert_eq!(
8789 repository
8790 .status_for_path(
8791 &rel_path(renamed_dir_name)
8792 .join(rel_path(RENAMED_FILE))
8793 .into()
8794 )
8795 .unwrap()
8796 .status,
8797 FileStatus::Untracked,
8798 );
8799 });
8800
8801 renamed_dir_name = "new_first_directory/second_directory";
8802
8803 std::fs::rename(
8804 work_dir.join("first_directory"),
8805 work_dir.join("new_first_directory"),
8806 )
8807 .unwrap();
8808
8809 tree.flush_fs_events(cx).await;
8810 project
8811 .update(cx, |project, cx| project.git_scans_complete(cx))
8812 .await;
8813 cx.executor().run_until_parked();
8814
8815 repository.read_with(cx, |repository, _cx| {
8816 assert_eq!(
8817 repository
8818 .status_for_path(
8819 &rel_path(renamed_dir_name)
8820 .join(rel_path(RENAMED_FILE))
8821 .into()
8822 )
8823 .unwrap()
8824 .status,
8825 FileStatus::Untracked,
8826 );
8827 });
8828}
8829
8830#[gpui::test]
8831async fn test_repos_in_invisible_worktrees(
8832 executor: BackgroundExecutor,
8833 cx: &mut gpui::TestAppContext,
8834) {
8835 init_test(cx);
8836 let fs = FakeFs::new(executor);
8837 fs.insert_tree(
8838 path!("/root"),
8839 json!({
8840 "dir1": {
8841 ".git": {},
8842 "dep1": {
8843 ".git": {},
8844 "src": {
8845 "a.txt": "",
8846 },
8847 },
8848 "b.txt": "",
8849 },
8850 }),
8851 )
8852 .await;
8853
8854 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
8855 let _visible_worktree =
8856 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8857 project
8858 .update(cx, |project, cx| project.git_scans_complete(cx))
8859 .await;
8860
8861 let repos = project.read_with(cx, |project, cx| {
8862 project
8863 .repositories(cx)
8864 .values()
8865 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8866 .collect::<Vec<_>>()
8867 });
8868 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8869
8870 let (_invisible_worktree, _) = project
8871 .update(cx, |project, cx| {
8872 project.worktree_store.update(cx, |worktree_store, cx| {
8873 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
8874 })
8875 })
8876 .await
8877 .expect("failed to create worktree");
8878 project
8879 .update(cx, |project, cx| project.git_scans_complete(cx))
8880 .await;
8881
8882 let repos = project.read_with(cx, |project, cx| {
8883 project
8884 .repositories(cx)
8885 .values()
8886 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
8887 .collect::<Vec<_>>()
8888 });
8889 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
8890}
8891
8892#[gpui::test(iterations = 10)]
8893async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
8894 init_test(cx);
8895 cx.update(|cx| {
8896 cx.update_global::<SettingsStore, _>(|store, cx| {
8897 store.update_user_settings(cx, |settings| {
8898 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
8899 });
8900 });
8901 });
8902 let fs = FakeFs::new(cx.background_executor.clone());
8903 fs.insert_tree(
8904 path!("/root"),
8905 json!({
8906 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
8907 "tree": {
8908 ".git": {},
8909 ".gitignore": "ignored-dir\n",
8910 "tracked-dir": {
8911 "tracked-file1": "",
8912 "ancestor-ignored-file1": "",
8913 },
8914 "ignored-dir": {
8915 "ignored-file1": ""
8916 }
8917 }
8918 }),
8919 )
8920 .await;
8921 fs.set_head_and_index_for_repo(
8922 path!("/root/tree/.git").as_ref(),
8923 &[
8924 (".gitignore", "ignored-dir\n".into()),
8925 ("tracked-dir/tracked-file1", "".into()),
8926 ],
8927 );
8928
8929 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
8930
8931 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8932 tree.flush_fs_events(cx).await;
8933 project
8934 .update(cx, |project, cx| project.git_scans_complete(cx))
8935 .await;
8936 cx.executor().run_until_parked();
8937
8938 let repository = project.read_with(cx, |project, cx| {
8939 project.repositories(cx).values().next().unwrap().clone()
8940 });
8941
8942 tree.read_with(cx, |tree, _| {
8943 tree.as_local()
8944 .unwrap()
8945 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
8946 })
8947 .recv()
8948 .await;
8949
8950 cx.read(|cx| {
8951 assert_entry_git_state(
8952 tree.read(cx),
8953 repository.read(cx),
8954 "tracked-dir/tracked-file1",
8955 None,
8956 false,
8957 );
8958 assert_entry_git_state(
8959 tree.read(cx),
8960 repository.read(cx),
8961 "tracked-dir/ancestor-ignored-file1",
8962 None,
8963 false,
8964 );
8965 assert_entry_git_state(
8966 tree.read(cx),
8967 repository.read(cx),
8968 "ignored-dir/ignored-file1",
8969 None,
8970 true,
8971 );
8972 });
8973
8974 fs.create_file(
8975 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
8976 Default::default(),
8977 )
8978 .await
8979 .unwrap();
8980 fs.set_index_for_repo(
8981 path!("/root/tree/.git").as_ref(),
8982 &[
8983 (".gitignore", "ignored-dir\n".into()),
8984 ("tracked-dir/tracked-file1", "".into()),
8985 ("tracked-dir/tracked-file2", "".into()),
8986 ],
8987 );
8988 fs.create_file(
8989 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
8990 Default::default(),
8991 )
8992 .await
8993 .unwrap();
8994 fs.create_file(
8995 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
8996 Default::default(),
8997 )
8998 .await
8999 .unwrap();
9000
9001 cx.executor().run_until_parked();
9002 cx.read(|cx| {
9003 assert_entry_git_state(
9004 tree.read(cx),
9005 repository.read(cx),
9006 "tracked-dir/tracked-file2",
9007 Some(StatusCode::Added),
9008 false,
9009 );
9010 assert_entry_git_state(
9011 tree.read(cx),
9012 repository.read(cx),
9013 "tracked-dir/ancestor-ignored-file2",
9014 None,
9015 false,
9016 );
9017 assert_entry_git_state(
9018 tree.read(cx),
9019 repository.read(cx),
9020 "ignored-dir/ignored-file2",
9021 None,
9022 true,
9023 );
9024 assert!(
9025 tree.read(cx)
9026 .entry_for_path(&rel_path(".git"))
9027 .unwrap()
9028 .is_ignored
9029 );
9030 });
9031}
9032
9033#[gpui::test]
9034async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9035 init_test(cx);
9036
9037 let fs = FakeFs::new(cx.executor());
9038 fs.insert_tree(
9039 path!("/project"),
9040 json!({
9041 ".git": {
9042 "worktrees": {
9043 "some-worktree": {
9044 "commondir": "../..\n",
9045 // For is_git_dir
9046 "HEAD": "",
9047 "config": ""
9048 }
9049 },
9050 "modules": {
9051 "subdir": {
9052 "some-submodule": {
9053 // For is_git_dir
9054 "HEAD": "",
9055 "config": "",
9056 }
9057 }
9058 }
9059 },
9060 "src": {
9061 "a.txt": "A",
9062 },
9063 "some-worktree": {
9064 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9065 "src": {
9066 "b.txt": "B",
9067 }
9068 },
9069 "subdir": {
9070 "some-submodule": {
9071 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9072 "c.txt": "C",
9073 }
9074 }
9075 }),
9076 )
9077 .await;
9078
9079 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9080 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9081 scan_complete.await;
9082
9083 let mut repositories = project.update(cx, |project, cx| {
9084 project
9085 .repositories(cx)
9086 .values()
9087 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9088 .collect::<Vec<_>>()
9089 });
9090 repositories.sort();
9091 pretty_assertions::assert_eq!(
9092 repositories,
9093 [
9094 Path::new(path!("/project")).into(),
9095 Path::new(path!("/project/some-worktree")).into(),
9096 Path::new(path!("/project/subdir/some-submodule")).into(),
9097 ]
9098 );
9099
9100 // Generate a git-related event for the worktree and check that it's refreshed.
9101 fs.with_git_state(
9102 path!("/project/some-worktree/.git").as_ref(),
9103 true,
9104 |state| {
9105 state
9106 .head_contents
9107 .insert(repo_path("src/b.txt"), "b".to_owned());
9108 state
9109 .index_contents
9110 .insert(repo_path("src/b.txt"), "b".to_owned());
9111 },
9112 )
9113 .unwrap();
9114 cx.run_until_parked();
9115
9116 let buffer = project
9117 .update(cx, |project, cx| {
9118 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9119 })
9120 .await
9121 .unwrap();
9122 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9123 let (repo, _) = project
9124 .git_store()
9125 .read(cx)
9126 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9127 .unwrap();
9128 pretty_assertions::assert_eq!(
9129 repo.read(cx).work_directory_abs_path,
9130 Path::new(path!("/project/some-worktree")).into(),
9131 );
9132 let barrier = repo.update(cx, |repo, _| repo.barrier());
9133 (repo.clone(), barrier)
9134 });
9135 barrier.await.unwrap();
9136 worktree_repo.update(cx, |repo, _| {
9137 pretty_assertions::assert_eq!(
9138 repo.status_for_path(&repo_path("src/b.txt"))
9139 .unwrap()
9140 .status,
9141 StatusCode::Modified.worktree(),
9142 );
9143 });
9144
9145 // The same for the submodule.
9146 fs.with_git_state(
9147 path!("/project/subdir/some-submodule/.git").as_ref(),
9148 true,
9149 |state| {
9150 state
9151 .head_contents
9152 .insert(repo_path("c.txt"), "c".to_owned());
9153 state
9154 .index_contents
9155 .insert(repo_path("c.txt"), "c".to_owned());
9156 },
9157 )
9158 .unwrap();
9159 cx.run_until_parked();
9160
9161 let buffer = project
9162 .update(cx, |project, cx| {
9163 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9164 })
9165 .await
9166 .unwrap();
9167 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9168 let (repo, _) = project
9169 .git_store()
9170 .read(cx)
9171 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9172 .unwrap();
9173 pretty_assertions::assert_eq!(
9174 repo.read(cx).work_directory_abs_path,
9175 Path::new(path!("/project/subdir/some-submodule")).into(),
9176 );
9177 let barrier = repo.update(cx, |repo, _| repo.barrier());
9178 (repo.clone(), barrier)
9179 });
9180 barrier.await.unwrap();
9181 submodule_repo.update(cx, |repo, _| {
9182 pretty_assertions::assert_eq!(
9183 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9184 StatusCode::Modified.worktree(),
9185 );
9186 });
9187}
9188
9189#[gpui::test]
9190async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9191 init_test(cx);
9192 let fs = FakeFs::new(cx.background_executor.clone());
9193 fs.insert_tree(
9194 path!("/root"),
9195 json!({
9196 "project": {
9197 ".git": {},
9198 "child1": {
9199 "a.txt": "A",
9200 },
9201 "child2": {
9202 "b.txt": "B",
9203 }
9204 }
9205 }),
9206 )
9207 .await;
9208
9209 let project = Project::test(
9210 fs.clone(),
9211 [
9212 path!("/root/project/child1").as_ref(),
9213 path!("/root/project/child2").as_ref(),
9214 ],
9215 cx,
9216 )
9217 .await;
9218
9219 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9220 tree.flush_fs_events(cx).await;
9221 project
9222 .update(cx, |project, cx| project.git_scans_complete(cx))
9223 .await;
9224 cx.executor().run_until_parked();
9225
9226 let repos = project.read_with(cx, |project, cx| {
9227 project
9228 .repositories(cx)
9229 .values()
9230 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9231 .collect::<Vec<_>>()
9232 });
9233 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9234}
9235
9236async fn search(
9237 project: &Entity<Project>,
9238 query: SearchQuery,
9239 cx: &mut gpui::TestAppContext,
9240) -> Result<HashMap<String, Vec<Range<usize>>>> {
9241 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9242 let mut results = HashMap::default();
9243 while let Ok(search_result) = search_rx.recv().await {
9244 match search_result {
9245 SearchResult::Buffer { buffer, ranges } => {
9246 results.entry(buffer).or_insert(ranges);
9247 }
9248 SearchResult::LimitReached => {}
9249 }
9250 }
9251 Ok(results
9252 .into_iter()
9253 .map(|(buffer, ranges)| {
9254 buffer.update(cx, |buffer, cx| {
9255 let path = buffer
9256 .file()
9257 .unwrap()
9258 .full_path(cx)
9259 .to_string_lossy()
9260 .to_string();
9261 let ranges = ranges
9262 .into_iter()
9263 .map(|range| range.to_offset(buffer))
9264 .collect::<Vec<_>>();
9265 (path, ranges)
9266 })
9267 })
9268 .collect())
9269}
9270
9271pub fn init_test(cx: &mut gpui::TestAppContext) {
9272 zlog::init_test();
9273
9274 cx.update(|cx| {
9275 let settings_store = SettingsStore::test(cx);
9276 cx.set_global(settings_store);
9277 release_channel::init(SemanticVersion::default(), cx);
9278 language::init(cx);
9279 Project::init_settings(cx);
9280 });
9281}
9282
9283fn json_lang() -> Arc<Language> {
9284 Arc::new(Language::new(
9285 LanguageConfig {
9286 name: "JSON".into(),
9287 matcher: LanguageMatcher {
9288 path_suffixes: vec!["json".to_string()],
9289 ..Default::default()
9290 },
9291 ..Default::default()
9292 },
9293 None,
9294 ))
9295}
9296
9297fn js_lang() -> Arc<Language> {
9298 Arc::new(Language::new(
9299 LanguageConfig {
9300 name: "JavaScript".into(),
9301 matcher: LanguageMatcher {
9302 path_suffixes: vec!["js".to_string()],
9303 ..Default::default()
9304 },
9305 ..Default::default()
9306 },
9307 None,
9308 ))
9309}
9310
9311fn rust_lang() -> Arc<Language> {
9312 Arc::new(Language::new(
9313 LanguageConfig {
9314 name: "Rust".into(),
9315 matcher: LanguageMatcher {
9316 path_suffixes: vec!["rs".to_string()],
9317 ..Default::default()
9318 },
9319 ..Default::default()
9320 },
9321 Some(tree_sitter_rust::LANGUAGE.into()),
9322 ))
9323}
9324
9325fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9326 struct PythonMootToolchainLister(Arc<FakeFs>);
9327 #[async_trait]
9328 impl ToolchainLister for PythonMootToolchainLister {
9329 async fn list(
9330 &self,
9331 worktree_root: PathBuf,
9332 subroot_relative_path: Arc<RelPath>,
9333 _: Option<HashMap<String, String>>,
9334 ) -> ToolchainList {
9335 // This lister will always return a path .venv directories within ancestors
9336 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9337 let mut toolchains = vec![];
9338 for ancestor in ancestors {
9339 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9340 if self.0.is_dir(&venv_path).await {
9341 toolchains.push(Toolchain {
9342 name: SharedString::new("Python Venv"),
9343 path: venv_path.to_string_lossy().into_owned().into(),
9344 language_name: LanguageName(SharedString::new_static("Python")),
9345 as_json: serde_json::Value::Null,
9346 })
9347 }
9348 }
9349 ToolchainList {
9350 toolchains,
9351 ..Default::default()
9352 }
9353 }
9354 async fn resolve(
9355 &self,
9356 _: PathBuf,
9357 _: Option<HashMap<String, String>>,
9358 ) -> anyhow::Result<Toolchain> {
9359 Err(anyhow::anyhow!("Not implemented"))
9360 }
9361 fn meta(&self) -> ToolchainMetadata {
9362 ToolchainMetadata {
9363 term: SharedString::new_static("Virtual Environment"),
9364 new_toolchain_placeholder: SharedString::new_static(
9365 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9366 ),
9367 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9368 }
9369 }
9370 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9371 vec![]
9372 }
9373 }
9374 Arc::new(
9375 Language::new(
9376 LanguageConfig {
9377 name: "Python".into(),
9378 matcher: LanguageMatcher {
9379 path_suffixes: vec!["py".to_string()],
9380 ..Default::default()
9381 },
9382 ..Default::default()
9383 },
9384 None, // We're not testing Python parsing with this language.
9385 )
9386 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9387 "pyproject.toml",
9388 ))))
9389 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9390 )
9391}
9392
9393fn typescript_lang() -> Arc<Language> {
9394 Arc::new(Language::new(
9395 LanguageConfig {
9396 name: "TypeScript".into(),
9397 matcher: LanguageMatcher {
9398 path_suffixes: vec!["ts".to_string()],
9399 ..Default::default()
9400 },
9401 ..Default::default()
9402 },
9403 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9404 ))
9405}
9406
9407fn tsx_lang() -> Arc<Language> {
9408 Arc::new(Language::new(
9409 LanguageConfig {
9410 name: "tsx".into(),
9411 matcher: LanguageMatcher {
9412 path_suffixes: vec!["tsx".to_string()],
9413 ..Default::default()
9414 },
9415 ..Default::default()
9416 },
9417 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9418 ))
9419}
9420
9421fn get_all_tasks(
9422 project: &Entity<Project>,
9423 task_contexts: Arc<TaskContexts>,
9424 cx: &mut App,
9425) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9426 let new_tasks = project.update(cx, |project, cx| {
9427 project.task_store.update(cx, |task_store, cx| {
9428 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9429 this.used_and_current_resolved_tasks(task_contexts, cx)
9430 })
9431 })
9432 });
9433
9434 cx.background_spawn(async move {
9435 let (mut old, new) = new_tasks.await;
9436 old.extend(new);
9437 old
9438 })
9439}
9440
9441#[track_caller]
9442fn assert_entry_git_state(
9443 tree: &Worktree,
9444 repository: &Repository,
9445 path: &str,
9446 index_status: Option<StatusCode>,
9447 is_ignored: bool,
9448) {
9449 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9450 let entry = tree
9451 .entry_for_path(&rel_path(path))
9452 .unwrap_or_else(|| panic!("entry {path} not found"));
9453 let status = repository
9454 .status_for_path(&repo_path(path))
9455 .map(|entry| entry.status);
9456 let expected = index_status.map(|index_status| {
9457 TrackedStatus {
9458 index_status,
9459 worktree_status: StatusCode::Unmodified,
9460 }
9461 .into()
9462 });
9463 assert_eq!(
9464 status, expected,
9465 "expected {path} to have git status: {expected:?}"
9466 );
9467 assert_eq!(
9468 entry.is_ignored, is_ignored,
9469 "expected {path} to have is_ignored: {is_ignored}"
9470 );
9471}
9472
9473#[track_caller]
9474fn git_init(path: &Path) -> git2::Repository {
9475 let mut init_opts = RepositoryInitOptions::new();
9476 init_opts.initial_head("main");
9477 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9478}
9479
9480#[track_caller]
9481fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9482 let path = path.as_ref();
9483 let mut index = repo.index().expect("Failed to get index");
9484 index.add_path(path).expect("Failed to add file");
9485 index.write().expect("Failed to write index");
9486}
9487
9488#[track_caller]
9489fn git_remove_index(path: &Path, repo: &git2::Repository) {
9490 let mut index = repo.index().expect("Failed to get index");
9491 index.remove_path(path).expect("Failed to add file");
9492 index.write().expect("Failed to write index");
9493}
9494
9495#[track_caller]
9496fn git_commit(msg: &'static str, repo: &git2::Repository) {
9497 use git2::Signature;
9498
9499 let signature = Signature::now("test", "test@zed.dev").unwrap();
9500 let oid = repo.index().unwrap().write_tree().unwrap();
9501 let tree = repo.find_tree(oid).unwrap();
9502 if let Ok(head) = repo.head() {
9503 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9504
9505 let parent_commit = parent_obj.as_commit().unwrap();
9506
9507 repo.commit(
9508 Some("HEAD"),
9509 &signature,
9510 &signature,
9511 msg,
9512 &tree,
9513 &[parent_commit],
9514 )
9515 .expect("Failed to commit with parent");
9516 } else {
9517 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9518 .expect("Failed to commit");
9519 }
9520}
9521
9522#[cfg(any())]
9523#[track_caller]
9524fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9525 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9526}
9527
9528#[track_caller]
9529fn git_stash(repo: &mut git2::Repository) {
9530 use git2::Signature;
9531
9532 let signature = Signature::now("test", "test@zed.dev").unwrap();
9533 repo.stash_save(&signature, "N/A", None)
9534 .expect("Failed to stash");
9535}
9536
9537#[track_caller]
9538fn git_reset(offset: usize, repo: &git2::Repository) {
9539 let head = repo.head().expect("Couldn't get repo head");
9540 let object = head.peel(git2::ObjectType::Commit).unwrap();
9541 let commit = object.as_commit().unwrap();
9542 let new_head = commit
9543 .parents()
9544 .inspect(|parnet| {
9545 parnet.message();
9546 })
9547 .nth(offset)
9548 .expect("Not enough history");
9549 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9550 .expect("Could not reset");
9551}
9552
9553#[cfg(any())]
9554#[track_caller]
9555fn git_branch(name: &str, repo: &git2::Repository) {
9556 let head = repo
9557 .head()
9558 .expect("Couldn't get repo head")
9559 .peel_to_commit()
9560 .expect("HEAD is not a commit");
9561 repo.branch(name, &head, false).expect("Failed to commit");
9562}
9563
9564#[cfg(any())]
9565#[track_caller]
9566fn git_checkout(name: &str, repo: &git2::Repository) {
9567 repo.set_head(name).expect("Failed to set head");
9568 repo.checkout_head(None).expect("Failed to check out head");
9569}
9570
9571#[cfg(any())]
9572#[track_caller]
9573fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9574 repo.statuses(None)
9575 .unwrap()
9576 .iter()
9577 .map(|status| (status.path().unwrap().to_string(), status.status()))
9578 .collect()
9579}
9580
9581#[gpui::test]
9582async fn test_find_project_path_abs(
9583 background_executor: BackgroundExecutor,
9584 cx: &mut gpui::TestAppContext,
9585) {
9586 // find_project_path should work with absolute paths
9587 init_test(cx);
9588
9589 let fs = FakeFs::new(background_executor);
9590 fs.insert_tree(
9591 path!("/root"),
9592 json!({
9593 "project1": {
9594 "file1.txt": "content1",
9595 "subdir": {
9596 "file2.txt": "content2"
9597 }
9598 },
9599 "project2": {
9600 "file3.txt": "content3"
9601 }
9602 }),
9603 )
9604 .await;
9605
9606 let project = Project::test(
9607 fs.clone(),
9608 [
9609 path!("/root/project1").as_ref(),
9610 path!("/root/project2").as_ref(),
9611 ],
9612 cx,
9613 )
9614 .await;
9615
9616 // Make sure the worktrees are fully initialized
9617 project
9618 .update(cx, |project, cx| project.git_scans_complete(cx))
9619 .await;
9620 cx.run_until_parked();
9621
9622 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9623 project.read_with(cx, |project, cx| {
9624 let worktrees: Vec<_> = project.worktrees(cx).collect();
9625 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9626 let id1 = worktrees[0].read(cx).id();
9627 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9628 let id2 = worktrees[1].read(cx).id();
9629 (abs_path1, id1, abs_path2, id2)
9630 });
9631
9632 project.update(cx, |project, cx| {
9633 let abs_path = project1_abs_path.join("file1.txt");
9634 let found_path = project.find_project_path(abs_path, cx).unwrap();
9635 assert_eq!(found_path.worktree_id, project1_id);
9636 assert_eq!(&*found_path.path, rel_path("file1.txt"));
9637
9638 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9639 let found_path = project.find_project_path(abs_path, cx).unwrap();
9640 assert_eq!(found_path.worktree_id, project1_id);
9641 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
9642
9643 let abs_path = project2_abs_path.join("file3.txt");
9644 let found_path = project.find_project_path(abs_path, cx).unwrap();
9645 assert_eq!(found_path.worktree_id, project2_id);
9646 assert_eq!(&*found_path.path, rel_path("file3.txt"));
9647
9648 let abs_path = project1_abs_path.join("nonexistent.txt");
9649 let found_path = project.find_project_path(abs_path, cx);
9650 assert!(
9651 found_path.is_some(),
9652 "Should find project path for nonexistent file in worktree"
9653 );
9654
9655 // Test with an absolute path outside any worktree
9656 let abs_path = Path::new("/some/other/path");
9657 let found_path = project.find_project_path(abs_path, cx);
9658 assert!(
9659 found_path.is_none(),
9660 "Should not find project path for path outside any worktree"
9661 );
9662 });
9663}