1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .background_executor()
214 .block(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250 cx.update(|cx| {
251 GitHostingProviderRegistry::default_global(cx);
252 git_hosting_providers::init(cx);
253 });
254
255 let fs = FakeFs::new(cx.executor());
256 let str_path = path!("/dir");
257 let path = Path::new(str_path);
258
259 fs.insert_tree(
260 path!("/dir"),
261 json!({
262 ".zed": {
263 "settings.json": r#"{
264 "git_hosting_providers": [
265 {
266 "provider": "gitlab",
267 "base_url": "https://google.com",
268 "name": "foo"
269 }
270 ]
271 }"#
272 },
273 }),
274 )
275 .await;
276
277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
278 let (_worktree, _) =
279 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
280 cx.executor().run_until_parked();
281
282 cx.update(|cx| {
283 let provider = GitHostingProviderRegistry::global(cx);
284 assert!(
285 provider
286 .list_hosting_providers()
287 .into_iter()
288 .any(|provider| provider.name() == "foo")
289 );
290 });
291
292 fs.atomic_write(
293 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
294 "{}".into(),
295 )
296 .await
297 .unwrap();
298
299 cx.run_until_parked();
300
301 cx.update(|cx| {
302 let provider = GitHostingProviderRegistry::global(cx);
303 assert!(
304 !provider
305 .list_hosting_providers()
306 .into_iter()
307 .any(|provider| provider.name() == "foo")
308 );
309 });
310}
311
312#[gpui::test]
313async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315 TaskStore::init(None);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 path!("/dir"),
320 json!({
321 ".zed": {
322 "settings.json": r#"{ "tab_size": 8 }"#,
323 "tasks.json": r#"[{
324 "label": "cargo check all",
325 "command": "cargo",
326 "args": ["check", "--all"]
327 },]"#,
328 },
329 "a": {
330 "a.rs": "fn a() {\n A\n}"
331 },
332 "b": {
333 ".zed": {
334 "settings.json": r#"{ "tab_size": 2 }"#,
335 "tasks.json": r#"[{
336 "label": "cargo check",
337 "command": "cargo",
338 "args": ["check"]
339 },]"#,
340 },
341 "b.rs": "fn b() {\n B\n}"
342 }
343 }),
344 )
345 .await;
346
347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
348 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
349
350 cx.executor().run_until_parked();
351 let worktree_id = cx.update(|cx| {
352 project.update(cx, |project, cx| {
353 project.worktrees(cx).next().unwrap().read(cx).id()
354 })
355 });
356
357 let mut task_contexts = TaskContexts::default();
358 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
359 let task_contexts = Arc::new(task_contexts);
360
361 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
362 id: worktree_id,
363 directory_in_worktree: rel_path(".zed").into(),
364 id_base: "local worktree tasks from directory \".zed\"".into(),
365 };
366
367 let all_tasks = cx
368 .update(|cx| {
369 let tree = worktree.read(cx);
370
371 let file_a = File::for_entry(
372 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
373 worktree.clone(),
374 ) as _;
375 let settings_a = language_settings(None, Some(&file_a), cx);
376 let file_b = File::for_entry(
377 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
378 worktree.clone(),
379 ) as _;
380 let settings_b = language_settings(None, Some(&file_b), cx);
381
382 assert_eq!(settings_a.tab_size.get(), 8);
383 assert_eq!(settings_b.tab_size.get(), 2);
384
385 get_all_tasks(&project, task_contexts.clone(), cx)
386 })
387 .await
388 .into_iter()
389 .map(|(source_kind, task)| {
390 let resolved = task.resolved;
391 (
392 source_kind,
393 task.resolved_label,
394 resolved.args,
395 resolved.env,
396 )
397 })
398 .collect::<Vec<_>>();
399 assert_eq!(
400 all_tasks,
401 vec![
402 (
403 TaskSourceKind::Worktree {
404 id: worktree_id,
405 directory_in_worktree: rel_path("b/.zed").into(),
406 id_base: "local worktree tasks from directory \"b/.zed\"".into()
407 },
408 "cargo check".to_string(),
409 vec!["check".to_string()],
410 HashMap::default(),
411 ),
412 (
413 topmost_local_task_source_kind.clone(),
414 "cargo check all".to_string(),
415 vec!["check".to_string(), "--all".to_string()],
416 HashMap::default(),
417 ),
418 ]
419 );
420
421 let (_, resolved_task) = cx
422 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
423 .await
424 .into_iter()
425 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
426 .expect("should have one global task");
427 project.update(cx, |project, cx| {
428 let task_inventory = project
429 .task_store
430 .read(cx)
431 .task_inventory()
432 .cloned()
433 .unwrap();
434 task_inventory.update(cx, |inventory, _| {
435 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
436 inventory
437 .update_file_based_tasks(
438 TaskSettingsLocation::Global(tasks_file()),
439 Some(
440 &json!([{
441 "label": "cargo check unstable",
442 "command": "cargo",
443 "args": [
444 "check",
445 "--all",
446 "--all-targets"
447 ],
448 "env": {
449 "RUSTFLAGS": "-Zunstable-options"
450 }
451 }])
452 .to_string(),
453 ),
454 )
455 .unwrap();
456 });
457 });
458 cx.run_until_parked();
459
460 let all_tasks = cx
461 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
462 .await
463 .into_iter()
464 .map(|(source_kind, task)| {
465 let resolved = task.resolved;
466 (
467 source_kind,
468 task.resolved_label,
469 resolved.args,
470 resolved.env,
471 )
472 })
473 .collect::<Vec<_>>();
474 assert_eq!(
475 all_tasks,
476 vec![
477 (
478 topmost_local_task_source_kind.clone(),
479 "cargo check all".to_string(),
480 vec!["check".to_string(), "--all".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: rel_path("b/.zed").into(),
487 id_base: "local worktree tasks from directory \"b/.zed\"".into()
488 },
489 "cargo check".to_string(),
490 vec!["check".to_string()],
491 HashMap::default(),
492 ),
493 (
494 TaskSourceKind::AbsPath {
495 abs_path: paths::tasks_file().clone(),
496 id_base: "global tasks.json".into(),
497 },
498 "cargo check unstable".to_string(),
499 vec![
500 "check".to_string(),
501 "--all".to_string(),
502 "--all-targets".to_string(),
503 ],
504 HashMap::from_iter(Some((
505 "RUSTFLAGS".to_string(),
506 "-Zunstable-options".to_string()
507 ))),
508 ),
509 ]
510 );
511}
512
513#[gpui::test]
514async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
515 init_test(cx);
516 TaskStore::init(None);
517
518 let fs = FakeFs::new(cx.executor());
519 fs.insert_tree(
520 path!("/dir"),
521 json!({
522 ".zed": {
523 "tasks.json": r#"[{
524 "label": "test worktree root",
525 "command": "echo $ZED_WORKTREE_ROOT"
526 }]"#,
527 },
528 "a": {
529 "a.rs": "fn a() {\n A\n}"
530 },
531 }),
532 )
533 .await;
534
535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
536 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
537
538 cx.executor().run_until_parked();
539 let worktree_id = cx.update(|cx| {
540 project.update(cx, |project, cx| {
541 project.worktrees(cx).next().unwrap().read(cx).id()
542 })
543 });
544
545 let active_non_worktree_item_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: None,
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 }),
556 cx,
557 )
558 })
559 .await;
560 assert!(
561 active_non_worktree_item_tasks.is_empty(),
562 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
563 );
564
565 let active_worktree_tasks = cx
566 .update(|cx| {
567 get_all_tasks(
568 &project,
569 Arc::new(TaskContexts {
570 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
571 active_worktree_context: Some((worktree_id, {
572 let mut worktree_context = TaskContext::default();
573 worktree_context
574 .task_variables
575 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
576 worktree_context
577 })),
578 other_worktree_contexts: Vec::new(),
579 lsp_task_sources: HashMap::default(),
580 latest_selection: None,
581 }),
582 cx,
583 )
584 })
585 .await;
586 assert_eq!(
587 active_worktree_tasks
588 .into_iter()
589 .map(|(source_kind, task)| {
590 let resolved = task.resolved;
591 (source_kind, resolved.command.unwrap())
592 })
593 .collect::<Vec<_>>(),
594 vec![(
595 TaskSourceKind::Worktree {
596 id: worktree_id,
597 directory_in_worktree: rel_path(".zed").into(),
598 id_base: "local worktree tasks from directory \".zed\"".into(),
599 },
600 "echo /dir".to_string(),
601 )]
602 );
603}
604
605#[gpui::test]
606async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
607 cx: &mut gpui::TestAppContext,
608) {
609 pub(crate) struct PyprojectTomlManifestProvider;
610
611 impl ManifestProvider for PyprojectTomlManifestProvider {
612 fn name(&self) -> ManifestName {
613 SharedString::new_static("pyproject.toml").into()
614 }
615
616 fn search(
617 &self,
618 ManifestQuery {
619 path,
620 depth,
621 delegate,
622 }: ManifestQuery,
623 ) -> Option<Arc<RelPath>> {
624 for path in path.ancestors().take(depth) {
625 let p = path.join(rel_path("pyproject.toml"));
626 if delegate.exists(&p, Some(false)) {
627 return Some(path.into());
628 }
629 }
630
631 None
632 }
633 }
634
635 init_test(cx);
636 let fs = FakeFs::new(cx.executor());
637
638 fs.insert_tree(
639 path!("/the-root"),
640 json!({
641 ".zed": {
642 "settings.json": r#"
643 {
644 "languages": {
645 "Python": {
646 "language_servers": ["ty"]
647 }
648 }
649 }"#
650 },
651 "project-a": {
652 ".venv": {},
653 "file.py": "",
654 "pyproject.toml": ""
655 },
656 "project-b": {
657 ".venv": {},
658 "source_file.py":"",
659 "another_file.py": "",
660 "pyproject.toml": ""
661 }
662 }),
663 )
664 .await;
665 cx.update(|cx| {
666 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
667 });
668
669 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
671 let _fake_python_server = language_registry.register_fake_lsp(
672 "Python",
673 FakeLspAdapter {
674 name: "ty",
675 capabilities: lsp::ServerCapabilities {
676 ..Default::default()
677 },
678 ..Default::default()
679 },
680 );
681
682 language_registry.add(python_lang(fs.clone()));
683 let (first_buffer, _handle) = project
684 .update(cx, |project, cx| {
685 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
686 })
687 .await
688 .unwrap();
689 cx.executor().run_until_parked();
690 let servers = project.update(cx, |project, cx| {
691 project.lsp_store.update(cx, |this, cx| {
692 first_buffer.update(cx, |buffer, cx| {
693 this.language_servers_for_local_buffer(buffer, cx)
694 .map(|(adapter, server)| (adapter.clone(), server.clone()))
695 .collect::<Vec<_>>()
696 })
697 })
698 });
699 cx.executor().run_until_parked();
700 assert_eq!(servers.len(), 1);
701 let (adapter, server) = servers.into_iter().next().unwrap();
702 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
703 assert_eq!(server.server_id(), LanguageServerId(0));
704 // `workspace_folders` are set to the rooting point.
705 assert_eq!(
706 server.workspace_folders(),
707 BTreeSet::from_iter(
708 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
709 )
710 );
711
712 let (second_project_buffer, _other_handle) = project
713 .update(cx, |project, cx| {
714 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
715 })
716 .await
717 .unwrap();
718 cx.executor().run_until_parked();
719 let servers = project.update(cx, |project, cx| {
720 project.lsp_store.update(cx, |this, cx| {
721 second_project_buffer.update(cx, |buffer, cx| {
722 this.language_servers_for_local_buffer(buffer, cx)
723 .map(|(adapter, server)| (adapter.clone(), server.clone()))
724 .collect::<Vec<_>>()
725 })
726 })
727 });
728 cx.executor().run_until_parked();
729 assert_eq!(servers.len(), 1);
730 let (adapter, server) = servers.into_iter().next().unwrap();
731 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
732 // We're not using venvs at all here, so both folders should fall under the same root.
733 assert_eq!(server.server_id(), LanguageServerId(0));
734 // Now, let's select a different toolchain for one of subprojects.
735
736 let Toolchains {
737 toolchains: available_toolchains_for_b,
738 root_path,
739 ..
740 } = project
741 .update(cx, |this, cx| {
742 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
743 this.available_toolchains(
744 ProjectPath {
745 worktree_id,
746 path: rel_path("project-b/source_file.py").into(),
747 },
748 LanguageName::new("Python"),
749 cx,
750 )
751 })
752 .await
753 .expect("A toolchain to be discovered");
754 assert_eq!(root_path.as_ref(), rel_path("project-b"));
755 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
756 let currently_active_toolchain = project
757 .update(cx, |this, cx| {
758 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
759 this.active_toolchain(
760 ProjectPath {
761 worktree_id,
762 path: rel_path("project-b/source_file.py").into(),
763 },
764 LanguageName::new("Python"),
765 cx,
766 )
767 })
768 .await;
769
770 assert!(currently_active_toolchain.is_none());
771 let _ = project
772 .update(cx, |this, cx| {
773 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
774 this.activate_toolchain(
775 ProjectPath {
776 worktree_id,
777 path: root_path,
778 },
779 available_toolchains_for_b
780 .toolchains
781 .into_iter()
782 .next()
783 .unwrap(),
784 cx,
785 )
786 })
787 .await
788 .unwrap();
789 cx.run_until_parked();
790 let servers = project.update(cx, |project, cx| {
791 project.lsp_store.update(cx, |this, cx| {
792 second_project_buffer.update(cx, |buffer, cx| {
793 this.language_servers_for_local_buffer(buffer, cx)
794 .map(|(adapter, server)| (adapter.clone(), server.clone()))
795 .collect::<Vec<_>>()
796 })
797 })
798 });
799 cx.executor().run_until_parked();
800 assert_eq!(servers.len(), 1);
801 let (adapter, server) = servers.into_iter().next().unwrap();
802 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
803 // There's a new language server in town.
804 assert_eq!(server.server_id(), LanguageServerId(1));
805}
806
807#[gpui::test]
808async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
809 init_test(cx);
810
811 let fs = FakeFs::new(cx.executor());
812 fs.insert_tree(
813 path!("/dir"),
814 json!({
815 "test.rs": "const A: i32 = 1;",
816 "test2.rs": "",
817 "Cargo.toml": "a = 1",
818 "package.json": "{\"a\": 1}",
819 }),
820 )
821 .await;
822
823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825
826 let mut fake_rust_servers = language_registry.register_fake_lsp(
827 "Rust",
828 FakeLspAdapter {
829 name: "the-rust-language-server",
830 capabilities: lsp::ServerCapabilities {
831 completion_provider: Some(lsp::CompletionOptions {
832 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
833 ..Default::default()
834 }),
835 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
836 lsp::TextDocumentSyncOptions {
837 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
838 ..Default::default()
839 },
840 )),
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 );
846 let mut fake_json_servers = language_registry.register_fake_lsp(
847 "JSON",
848 FakeLspAdapter {
849 name: "the-json-language-server",
850 capabilities: lsp::ServerCapabilities {
851 completion_provider: Some(lsp::CompletionOptions {
852 trigger_characters: Some(vec![":".to_string()]),
853 ..Default::default()
854 }),
855 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
856 lsp::TextDocumentSyncOptions {
857 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
858 ..Default::default()
859 },
860 )),
861 ..Default::default()
862 },
863 ..Default::default()
864 },
865 );
866
867 // Open a buffer without an associated language server.
868 let (toml_buffer, _handle) = project
869 .update(cx, |project, cx| {
870 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
871 })
872 .await
873 .unwrap();
874
875 // Open a buffer with an associated language server before the language for it has been loaded.
876 let (rust_buffer, _handle2) = project
877 .update(cx, |project, cx| {
878 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
879 })
880 .await
881 .unwrap();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), None);
884 });
885
886 // Now we add the languages to the project, and ensure they get assigned to all
887 // the relevant open buffers.
888 language_registry.add(json_lang());
889 language_registry.add(rust_lang());
890 cx.executor().run_until_parked();
891 rust_buffer.update(cx, |buffer, _| {
892 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
893 });
894
895 // A server is started up, and it is notified about Rust files.
896 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
897 assert_eq!(
898 fake_rust_server
899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
900 .await
901 .text_document,
902 lsp::TextDocumentItem {
903 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
904 version: 0,
905 text: "const A: i32 = 1;".to_string(),
906 language_id: "rust".to_string(),
907 }
908 );
909
910 // The buffer is configured based on the language server's capabilities.
911 rust_buffer.update(cx, |buffer, _| {
912 assert_eq!(
913 buffer
914 .completion_triggers()
915 .iter()
916 .cloned()
917 .collect::<Vec<_>>(),
918 &[".".to_string(), "::".to_string()]
919 );
920 });
921 toml_buffer.update(cx, |buffer, _| {
922 assert!(buffer.completion_triggers().is_empty());
923 });
924
925 // Edit a buffer. The changes are reported to the language server.
926 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
927 assert_eq!(
928 fake_rust_server
929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
930 .await
931 .text_document,
932 lsp::VersionedTextDocumentIdentifier::new(
933 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
934 1
935 )
936 );
937
938 // Open a third buffer with a different associated language server.
939 let (json_buffer, _json_handle) = project
940 .update(cx, |project, cx| {
941 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
942 })
943 .await
944 .unwrap();
945
946 // A json language server is started up and is only notified about the json buffer.
947 let mut fake_json_server = fake_json_servers.next().await.unwrap();
948 assert_eq!(
949 fake_json_server
950 .receive_notification::<lsp::notification::DidOpenTextDocument>()
951 .await
952 .text_document,
953 lsp::TextDocumentItem {
954 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
955 version: 0,
956 text: "{\"a\": 1}".to_string(),
957 language_id: "json".to_string(),
958 }
959 );
960
961 // This buffer is configured based on the second language server's
962 // capabilities.
963 json_buffer.update(cx, |buffer, _| {
964 assert_eq!(
965 buffer
966 .completion_triggers()
967 .iter()
968 .cloned()
969 .collect::<Vec<_>>(),
970 &[":".to_string()]
971 );
972 });
973
974 // When opening another buffer whose language server is already running,
975 // it is also configured based on the existing language server's capabilities.
976 let (rust_buffer2, _handle4) = project
977 .update(cx, |project, cx| {
978 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
979 })
980 .await
981 .unwrap();
982 rust_buffer2.update(cx, |buffer, _| {
983 assert_eq!(
984 buffer
985 .completion_triggers()
986 .iter()
987 .cloned()
988 .collect::<Vec<_>>(),
989 &[".".to_string(), "::".to_string()]
990 );
991 });
992
993 // Changes are reported only to servers matching the buffer's language.
994 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
995 rust_buffer2.update(cx, |buffer, cx| {
996 buffer.edit([(0..0, "let x = 1;")], None, cx)
997 });
998 assert_eq!(
999 fake_rust_server
1000 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1001 .await
1002 .text_document,
1003 lsp::VersionedTextDocumentIdentifier::new(
1004 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1005 1
1006 )
1007 );
1008
1009 // Save notifications are reported to all servers.
1010 project
1011 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1012 .await
1013 .unwrap();
1014 assert_eq!(
1015 fake_rust_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023 assert_eq!(
1024 fake_json_server
1025 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1026 .await
1027 .text_document,
1028 lsp::TextDocumentIdentifier::new(
1029 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1030 )
1031 );
1032
1033 // Renames are reported only to servers matching the buffer's language.
1034 fs.rename(
1035 Path::new(path!("/dir/test2.rs")),
1036 Path::new(path!("/dir/test3.rs")),
1037 Default::default(),
1038 )
1039 .await
1040 .unwrap();
1041 assert_eq!(
1042 fake_rust_server
1043 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1044 .await
1045 .text_document,
1046 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1047 );
1048 assert_eq!(
1049 fake_rust_server
1050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1051 .await
1052 .text_document,
1053 lsp::TextDocumentItem {
1054 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1055 version: 0,
1056 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1057 language_id: "rust".to_string(),
1058 },
1059 );
1060
1061 rust_buffer2.update(cx, |buffer, cx| {
1062 buffer.update_diagnostics(
1063 LanguageServerId(0),
1064 DiagnosticSet::from_sorted_entries(
1065 vec![DiagnosticEntry {
1066 diagnostic: Default::default(),
1067 range: Anchor::MIN..Anchor::MAX,
1068 }],
1069 &buffer.snapshot(),
1070 ),
1071 cx,
1072 );
1073 assert_eq!(
1074 buffer
1075 .snapshot()
1076 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1077 .count(),
1078 1
1079 );
1080 });
1081
1082 // When the rename changes the extension of the file, the buffer gets closed on the old
1083 // language server and gets opened on the new one.
1084 fs.rename(
1085 Path::new(path!("/dir/test3.rs")),
1086 Path::new(path!("/dir/test3.json")),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091 assert_eq!(
1092 fake_rust_server
1093 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1094 .await
1095 .text_document,
1096 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1097 );
1098 assert_eq!(
1099 fake_json_server
1100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1101 .await
1102 .text_document,
1103 lsp::TextDocumentItem {
1104 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1105 version: 0,
1106 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1107 language_id: "json".to_string(),
1108 },
1109 );
1110
1111 // We clear the diagnostics, since the language has changed.
1112 rust_buffer2.update(cx, |buffer, _| {
1113 assert_eq!(
1114 buffer
1115 .snapshot()
1116 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1117 .count(),
1118 0
1119 );
1120 });
1121
1122 // The renamed file's version resets after changing language server.
1123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1124 assert_eq!(
1125 fake_json_server
1126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1127 .await
1128 .text_document,
1129 lsp::VersionedTextDocumentIdentifier::new(
1130 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1131 1
1132 )
1133 );
1134
1135 // Restart language servers
1136 project.update(cx, |project, cx| {
1137 project.restart_language_servers_for_buffers(
1138 vec![rust_buffer.clone(), json_buffer.clone()],
1139 HashSet::default(),
1140 cx,
1141 );
1142 });
1143
1144 let mut rust_shutdown_requests = fake_rust_server
1145 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1146 let mut json_shutdown_requests = fake_json_server
1147 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1148 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1149
1150 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1151 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1152
1153 // Ensure rust document is reopened in new rust language server
1154 assert_eq!(
1155 fake_rust_server
1156 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1157 .await
1158 .text_document,
1159 lsp::TextDocumentItem {
1160 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1161 version: 0,
1162 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1163 language_id: "rust".to_string(),
1164 }
1165 );
1166
1167 // Ensure json documents are reopened in new json language server
1168 assert_set_eq!(
1169 [
1170 fake_json_server
1171 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1172 .await
1173 .text_document,
1174 fake_json_server
1175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1176 .await
1177 .text_document,
1178 ],
1179 [
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1182 version: 0,
1183 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 },
1186 lsp::TextDocumentItem {
1187 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1188 version: 0,
1189 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1190 language_id: "json".to_string(),
1191 }
1192 ]
1193 );
1194
1195 // Close notifications are reported only to servers matching the buffer's language.
1196 cx.update(|_| drop(_json_handle));
1197 let close_message = lsp::DidCloseTextDocumentParams {
1198 text_document: lsp::TextDocumentIdentifier::new(
1199 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1200 ),
1201 };
1202 assert_eq!(
1203 fake_json_server
1204 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1205 .await,
1206 close_message,
1207 );
1208}
1209
1210#[gpui::test]
1211async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let fs = FakeFs::new(cx.executor());
1215 fs.insert_tree(
1216 path!("/the-root"),
1217 json!({
1218 ".gitignore": "target\n",
1219 "Cargo.lock": "",
1220 "src": {
1221 "a.rs": "",
1222 "b.rs": "",
1223 },
1224 "target": {
1225 "x": {
1226 "out": {
1227 "x.rs": ""
1228 }
1229 },
1230 "y": {
1231 "out": {
1232 "y.rs": "",
1233 }
1234 },
1235 "z": {
1236 "out": {
1237 "z.rs": ""
1238 }
1239 }
1240 }
1241 }),
1242 )
1243 .await;
1244 fs.insert_tree(
1245 path!("/the-registry"),
1246 json!({
1247 "dep1": {
1248 "src": {
1249 "dep1.rs": "",
1250 }
1251 },
1252 "dep2": {
1253 "src": {
1254 "dep2.rs": "",
1255 }
1256 },
1257 }),
1258 )
1259 .await;
1260 fs.insert_tree(
1261 path!("/the/stdlib"),
1262 json!({
1263 "LICENSE": "",
1264 "src": {
1265 "string.rs": "",
1266 }
1267 }),
1268 )
1269 .await;
1270
1271 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1272 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1273 (project.languages().clone(), project.lsp_store())
1274 });
1275 language_registry.add(rust_lang());
1276 let mut fake_servers = language_registry.register_fake_lsp(
1277 "Rust",
1278 FakeLspAdapter {
1279 name: "the-language-server",
1280 ..Default::default()
1281 },
1282 );
1283
1284 cx.executor().run_until_parked();
1285
1286 // Start the language server by opening a buffer with a compatible file extension.
1287 project
1288 .update(cx, |project, cx| {
1289 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1290 })
1291 .await
1292 .unwrap();
1293
1294 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1295 project.update(cx, |project, cx| {
1296 let worktree = project.worktrees(cx).next().unwrap();
1297 assert_eq!(
1298 worktree
1299 .read(cx)
1300 .snapshot()
1301 .entries(true, 0)
1302 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1303 .collect::<Vec<_>>(),
1304 &[
1305 ("", false),
1306 (".gitignore", false),
1307 ("Cargo.lock", false),
1308 ("src", false),
1309 ("src/a.rs", false),
1310 ("src/b.rs", false),
1311 ("target", true),
1312 ]
1313 );
1314 });
1315
1316 let prev_read_dir_count = fs.read_dir_call_count();
1317
1318 let fake_server = fake_servers.next().await.unwrap();
1319 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1320 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1321 id
1322 });
1323
1324 // Simulate jumping to a definition in a dependency outside of the worktree.
1325 let _out_of_worktree_buffer = project
1326 .update(cx, |project, cx| {
1327 project.open_local_buffer_via_lsp(
1328 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1329 server_id,
1330 cx,
1331 )
1332 })
1333 .await
1334 .unwrap();
1335
1336 // Keep track of the FS events reported to the language server.
1337 let file_changes = Arc::new(Mutex::new(Vec::new()));
1338 fake_server
1339 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1340 registrations: vec![lsp::Registration {
1341 id: Default::default(),
1342 method: "workspace/didChangeWatchedFiles".to_string(),
1343 register_options: serde_json::to_value(
1344 lsp::DidChangeWatchedFilesRegistrationOptions {
1345 watchers: vec![
1346 lsp::FileSystemWatcher {
1347 glob_pattern: lsp::GlobPattern::String(
1348 path!("/the-root/Cargo.toml").to_string(),
1349 ),
1350 kind: None,
1351 },
1352 lsp::FileSystemWatcher {
1353 glob_pattern: lsp::GlobPattern::String(
1354 path!("/the-root/src/*.{rs,c}").to_string(),
1355 ),
1356 kind: None,
1357 },
1358 lsp::FileSystemWatcher {
1359 glob_pattern: lsp::GlobPattern::String(
1360 path!("/the-root/target/y/**/*.rs").to_string(),
1361 ),
1362 kind: None,
1363 },
1364 lsp::FileSystemWatcher {
1365 glob_pattern: lsp::GlobPattern::String(
1366 path!("/the/stdlib/src/**/*.rs").to_string(),
1367 ),
1368 kind: None,
1369 },
1370 lsp::FileSystemWatcher {
1371 glob_pattern: lsp::GlobPattern::String(
1372 path!("**/Cargo.lock").to_string(),
1373 ),
1374 kind: None,
1375 },
1376 ],
1377 },
1378 )
1379 .ok(),
1380 }],
1381 })
1382 .await
1383 .into_response()
1384 .unwrap();
1385 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1386 let file_changes = file_changes.clone();
1387 move |params, _| {
1388 let mut file_changes = file_changes.lock();
1389 file_changes.extend(params.changes);
1390 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1391 }
1392 });
1393
1394 cx.executor().run_until_parked();
1395 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1396 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1397
1398 let mut new_watched_paths = fs.watched_paths();
1399 new_watched_paths.retain(|path| {
1400 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1401 });
1402 assert_eq!(
1403 &new_watched_paths,
1404 &[
1405 Path::new(path!("/the-root")),
1406 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1407 Path::new(path!("/the/stdlib/src"))
1408 ]
1409 );
1410
1411 // Now the language server has asked us to watch an ignored directory path,
1412 // so we recursively load it.
1413 project.update(cx, |project, cx| {
1414 let worktree = project.visible_worktrees(cx).next().unwrap();
1415 assert_eq!(
1416 worktree
1417 .read(cx)
1418 .snapshot()
1419 .entries(true, 0)
1420 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1421 .collect::<Vec<_>>(),
1422 &[
1423 ("", false),
1424 (".gitignore", false),
1425 ("Cargo.lock", false),
1426 ("src", false),
1427 ("src/a.rs", false),
1428 ("src/b.rs", false),
1429 ("target", true),
1430 ("target/x", true),
1431 ("target/y", true),
1432 ("target/y/out", true),
1433 ("target/y/out/y.rs", true),
1434 ("target/z", true),
1435 ]
1436 );
1437 });
1438
1439 // Perform some file system mutations, two of which match the watched patterns,
1440 // and one of which does not.
1441 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1442 .await
1443 .unwrap();
1444 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1445 .await
1446 .unwrap();
1447 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1448 .await
1449 .unwrap();
1450 fs.create_file(
1451 path!("/the-root/target/x/out/x2.rs").as_ref(),
1452 Default::default(),
1453 )
1454 .await
1455 .unwrap();
1456 fs.create_file(
1457 path!("/the-root/target/y/out/y2.rs").as_ref(),
1458 Default::default(),
1459 )
1460 .await
1461 .unwrap();
1462 fs.save(
1463 path!("/the-root/Cargo.lock").as_ref(),
1464 &"".into(),
1465 Default::default(),
1466 )
1467 .await
1468 .unwrap();
1469 fs.save(
1470 path!("/the-stdlib/LICENSE").as_ref(),
1471 &"".into(),
1472 Default::default(),
1473 )
1474 .await
1475 .unwrap();
1476 fs.save(
1477 path!("/the/stdlib/src/string.rs").as_ref(),
1478 &"".into(),
1479 Default::default(),
1480 )
1481 .await
1482 .unwrap();
1483
1484 // The language server receives events for the FS mutations that match its watch patterns.
1485 cx.executor().run_until_parked();
1486 assert_eq!(
1487 &*file_changes.lock(),
1488 &[
1489 lsp::FileEvent {
1490 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1491 typ: lsp::FileChangeType::CHANGED,
1492 },
1493 lsp::FileEvent {
1494 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1495 typ: lsp::FileChangeType::DELETED,
1496 },
1497 lsp::FileEvent {
1498 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1499 typ: lsp::FileChangeType::CREATED,
1500 },
1501 lsp::FileEvent {
1502 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1503 typ: lsp::FileChangeType::CREATED,
1504 },
1505 lsp::FileEvent {
1506 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1507 typ: lsp::FileChangeType::CHANGED,
1508 },
1509 ]
1510 );
1511}
1512
1513#[gpui::test]
1514async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1515 init_test(cx);
1516
1517 let fs = FakeFs::new(cx.executor());
1518 fs.insert_tree(
1519 path!("/dir"),
1520 json!({
1521 "a.rs": "let a = 1;",
1522 "b.rs": "let b = 2;"
1523 }),
1524 )
1525 .await;
1526
1527 let project = Project::test(
1528 fs,
1529 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1530 cx,
1531 )
1532 .await;
1533 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1534
1535 let buffer_a = project
1536 .update(cx, |project, cx| {
1537 project.open_local_buffer(path!("/dir/a.rs"), cx)
1538 })
1539 .await
1540 .unwrap();
1541 let buffer_b = project
1542 .update(cx, |project, cx| {
1543 project.open_local_buffer(path!("/dir/b.rs"), cx)
1544 })
1545 .await
1546 .unwrap();
1547
1548 lsp_store.update(cx, |lsp_store, cx| {
1549 lsp_store
1550 .update_diagnostics(
1551 LanguageServerId(0),
1552 lsp::PublishDiagnosticsParams {
1553 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1554 version: None,
1555 diagnostics: vec![lsp::Diagnostic {
1556 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1557 severity: Some(lsp::DiagnosticSeverity::ERROR),
1558 message: "error 1".to_string(),
1559 ..Default::default()
1560 }],
1561 },
1562 None,
1563 DiagnosticSourceKind::Pushed,
1564 &[],
1565 cx,
1566 )
1567 .unwrap();
1568 lsp_store
1569 .update_diagnostics(
1570 LanguageServerId(0),
1571 lsp::PublishDiagnosticsParams {
1572 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1573 version: None,
1574 diagnostics: vec![lsp::Diagnostic {
1575 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1576 severity: Some(DiagnosticSeverity::WARNING),
1577 message: "error 2".to_string(),
1578 ..Default::default()
1579 }],
1580 },
1581 None,
1582 DiagnosticSourceKind::Pushed,
1583 &[],
1584 cx,
1585 )
1586 .unwrap();
1587 });
1588
1589 buffer_a.update(cx, |buffer, _| {
1590 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1591 assert_eq!(
1592 chunks
1593 .iter()
1594 .map(|(s, d)| (s.as_str(), *d))
1595 .collect::<Vec<_>>(),
1596 &[
1597 ("let ", None),
1598 ("a", Some(DiagnosticSeverity::ERROR)),
1599 (" = 1;", None),
1600 ]
1601 );
1602 });
1603 buffer_b.update(cx, |buffer, _| {
1604 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1605 assert_eq!(
1606 chunks
1607 .iter()
1608 .map(|(s, d)| (s.as_str(), *d))
1609 .collect::<Vec<_>>(),
1610 &[
1611 ("let ", None),
1612 ("b", Some(DiagnosticSeverity::WARNING)),
1613 (" = 2;", None),
1614 ]
1615 );
1616 });
1617}
1618
1619#[gpui::test]
1620async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1621 init_test(cx);
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree(
1625 path!("/root"),
1626 json!({
1627 "dir": {
1628 ".git": {
1629 "HEAD": "ref: refs/heads/main",
1630 },
1631 ".gitignore": "b.rs",
1632 "a.rs": "let a = 1;",
1633 "b.rs": "let b = 2;",
1634 },
1635 "other.rs": "let b = c;"
1636 }),
1637 )
1638 .await;
1639
1640 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1641 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1642 let (worktree, _) = project
1643 .update(cx, |project, cx| {
1644 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1645 })
1646 .await
1647 .unwrap();
1648 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1649
1650 let (worktree, _) = project
1651 .update(cx, |project, cx| {
1652 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1653 })
1654 .await
1655 .unwrap();
1656 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1657
1658 let server_id = LanguageServerId(0);
1659 lsp_store.update(cx, |lsp_store, cx| {
1660 lsp_store
1661 .update_diagnostics(
1662 server_id,
1663 lsp::PublishDiagnosticsParams {
1664 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1665 version: None,
1666 diagnostics: vec![lsp::Diagnostic {
1667 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1668 severity: Some(lsp::DiagnosticSeverity::ERROR),
1669 message: "unused variable 'b'".to_string(),
1670 ..Default::default()
1671 }],
1672 },
1673 None,
1674 DiagnosticSourceKind::Pushed,
1675 &[],
1676 cx,
1677 )
1678 .unwrap();
1679 lsp_store
1680 .update_diagnostics(
1681 server_id,
1682 lsp::PublishDiagnosticsParams {
1683 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1684 version: None,
1685 diagnostics: vec![lsp::Diagnostic {
1686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1687 severity: Some(lsp::DiagnosticSeverity::ERROR),
1688 message: "unknown variable 'c'".to_string(),
1689 ..Default::default()
1690 }],
1691 },
1692 None,
1693 DiagnosticSourceKind::Pushed,
1694 &[],
1695 cx,
1696 )
1697 .unwrap();
1698 });
1699
1700 let main_ignored_buffer = project
1701 .update(cx, |project, cx| {
1702 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1703 })
1704 .await
1705 .unwrap();
1706 main_ignored_buffer.update(cx, |buffer, _| {
1707 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1708 assert_eq!(
1709 chunks
1710 .iter()
1711 .map(|(s, d)| (s.as_str(), *d))
1712 .collect::<Vec<_>>(),
1713 &[
1714 ("let ", None),
1715 ("b", Some(DiagnosticSeverity::ERROR)),
1716 (" = 2;", None),
1717 ],
1718 "Gigitnored buffers should still get in-buffer diagnostics",
1719 );
1720 });
1721 let other_buffer = project
1722 .update(cx, |project, cx| {
1723 project.open_buffer((other_worktree_id, rel_path("")), cx)
1724 })
1725 .await
1726 .unwrap();
1727 other_buffer.update(cx, |buffer, _| {
1728 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1729 assert_eq!(
1730 chunks
1731 .iter()
1732 .map(|(s, d)| (s.as_str(), *d))
1733 .collect::<Vec<_>>(),
1734 &[
1735 ("let b = ", None),
1736 ("c", Some(DiagnosticSeverity::ERROR)),
1737 (";", None),
1738 ],
1739 "Buffers from hidden projects should still get in-buffer diagnostics"
1740 );
1741 });
1742
1743 project.update(cx, |project, cx| {
1744 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1745 assert_eq!(
1746 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1747 vec![(
1748 ProjectPath {
1749 worktree_id: main_worktree_id,
1750 path: rel_path("b.rs").into(),
1751 },
1752 server_id,
1753 DiagnosticSummary {
1754 error_count: 1,
1755 warning_count: 0,
1756 }
1757 )]
1758 );
1759 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1760 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1761 });
1762}
1763
1764#[gpui::test]
1765async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1766 init_test(cx);
1767
1768 let progress_token = "the-progress-token";
1769
1770 let fs = FakeFs::new(cx.executor());
1771 fs.insert_tree(
1772 path!("/dir"),
1773 json!({
1774 "a.rs": "fn a() { A }",
1775 "b.rs": "const y: i32 = 1",
1776 }),
1777 )
1778 .await;
1779
1780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1781 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1782
1783 language_registry.add(rust_lang());
1784 let mut fake_servers = language_registry.register_fake_lsp(
1785 "Rust",
1786 FakeLspAdapter {
1787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1788 disk_based_diagnostics_sources: vec!["disk".into()],
1789 ..Default::default()
1790 },
1791 );
1792
1793 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1794
1795 // Cause worktree to start the fake language server
1796 let _ = project
1797 .update(cx, |project, cx| {
1798 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1799 })
1800 .await
1801 .unwrap();
1802
1803 let mut events = cx.events(&project);
1804
1805 let fake_server = fake_servers.next().await.unwrap();
1806 assert_eq!(
1807 events.next().await.unwrap(),
1808 Event::LanguageServerAdded(
1809 LanguageServerId(0),
1810 fake_server.server.name(),
1811 Some(worktree_id)
1812 ),
1813 );
1814
1815 fake_server
1816 .start_progress(format!("{}/0", progress_token))
1817 .await;
1818 assert_eq!(
1819 events.next().await.unwrap(),
1820 Event::RefreshInlayHints(fake_server.server.server_id())
1821 );
1822 assert_eq!(
1823 events.next().await.unwrap(),
1824 Event::DiskBasedDiagnosticsStarted {
1825 language_server_id: LanguageServerId(0),
1826 }
1827 );
1828
1829 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1830 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1831 version: None,
1832 diagnostics: vec![lsp::Diagnostic {
1833 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1834 severity: Some(lsp::DiagnosticSeverity::ERROR),
1835 message: "undefined variable 'A'".to_string(),
1836 ..Default::default()
1837 }],
1838 });
1839 assert_eq!(
1840 events.next().await.unwrap(),
1841 Event::DiagnosticsUpdated {
1842 language_server_id: LanguageServerId(0),
1843 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1844 }
1845 );
1846
1847 fake_server.end_progress(format!("{}/0", progress_token));
1848 assert_eq!(
1849 events.next().await.unwrap(),
1850 Event::DiskBasedDiagnosticsFinished {
1851 language_server_id: LanguageServerId(0)
1852 }
1853 );
1854
1855 let buffer = project
1856 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1857 .await
1858 .unwrap();
1859
1860 buffer.update(cx, |buffer, _| {
1861 let snapshot = buffer.snapshot();
1862 let diagnostics = snapshot
1863 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1864 .collect::<Vec<_>>();
1865 assert_eq!(
1866 diagnostics,
1867 &[DiagnosticEntryRef {
1868 range: Point::new(0, 9)..Point::new(0, 10),
1869 diagnostic: &Diagnostic {
1870 severity: lsp::DiagnosticSeverity::ERROR,
1871 message: "undefined variable 'A'".to_string(),
1872 group_id: 0,
1873 is_primary: true,
1874 source_kind: DiagnosticSourceKind::Pushed,
1875 ..Diagnostic::default()
1876 }
1877 }]
1878 )
1879 });
1880
1881 // Ensure publishing empty diagnostics twice only results in one update event.
1882 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1883 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1884 version: None,
1885 diagnostics: Default::default(),
1886 });
1887 assert_eq!(
1888 events.next().await.unwrap(),
1889 Event::DiagnosticsUpdated {
1890 language_server_id: LanguageServerId(0),
1891 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1892 }
1893 );
1894
1895 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1896 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1897 version: None,
1898 diagnostics: Default::default(),
1899 });
1900 cx.executor().run_until_parked();
1901 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1902}
1903
1904#[gpui::test]
1905async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1906 init_test(cx);
1907
1908 let progress_token = "the-progress-token";
1909
1910 let fs = FakeFs::new(cx.executor());
1911 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1912
1913 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1914
1915 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1916 language_registry.add(rust_lang());
1917 let mut fake_servers = language_registry.register_fake_lsp(
1918 "Rust",
1919 FakeLspAdapter {
1920 name: "the-language-server",
1921 disk_based_diagnostics_sources: vec!["disk".into()],
1922 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1923 ..FakeLspAdapter::default()
1924 },
1925 );
1926
1927 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1928
1929 let (buffer, _handle) = project
1930 .update(cx, |project, cx| {
1931 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1932 })
1933 .await
1934 .unwrap();
1935 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1936 // Simulate diagnostics starting to update.
1937 let fake_server = fake_servers.next().await.unwrap();
1938 fake_server.start_progress(progress_token).await;
1939
1940 // Restart the server before the diagnostics finish updating.
1941 project.update(cx, |project, cx| {
1942 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1943 });
1944 let mut events = cx.events(&project);
1945
1946 // Simulate the newly started server sending more diagnostics.
1947 let fake_server = fake_servers.next().await.unwrap();
1948 assert_eq!(
1949 events.next().await.unwrap(),
1950 Event::LanguageServerRemoved(LanguageServerId(0))
1951 );
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(1),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 )
1959 );
1960 assert_eq!(
1961 events.next().await.unwrap(),
1962 Event::RefreshInlayHints(fake_server.server.server_id())
1963 );
1964 fake_server.start_progress(progress_token).await;
1965 assert_eq!(
1966 events.next().await.unwrap(),
1967 Event::LanguageServerBufferRegistered {
1968 server_id: LanguageServerId(1),
1969 buffer_id,
1970 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1971 name: Some(fake_server.server.name())
1972 }
1973 );
1974 assert_eq!(
1975 events.next().await.unwrap(),
1976 Event::DiskBasedDiagnosticsStarted {
1977 language_server_id: LanguageServerId(1)
1978 }
1979 );
1980 project.update(cx, |project, cx| {
1981 assert_eq!(
1982 project
1983 .language_servers_running_disk_based_diagnostics(cx)
1984 .collect::<Vec<_>>(),
1985 [LanguageServerId(1)]
1986 );
1987 });
1988
1989 // All diagnostics are considered done, despite the old server's diagnostic
1990 // task never completing.
1991 fake_server.end_progress(progress_token);
1992 assert_eq!(
1993 events.next().await.unwrap(),
1994 Event::DiskBasedDiagnosticsFinished {
1995 language_server_id: LanguageServerId(1)
1996 }
1997 );
1998 project.update(cx, |project, cx| {
1999 assert_eq!(
2000 project
2001 .language_servers_running_disk_based_diagnostics(cx)
2002 .collect::<Vec<_>>(),
2003 [] as [language::LanguageServerId; 0]
2004 );
2005 });
2006}
2007
2008#[gpui::test]
2009async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2010 init_test(cx);
2011
2012 let fs = FakeFs::new(cx.executor());
2013 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2014
2015 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2016
2017 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2018 language_registry.add(rust_lang());
2019 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2020
2021 let (buffer, _) = project
2022 .update(cx, |project, cx| {
2023 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2024 })
2025 .await
2026 .unwrap();
2027
2028 // Publish diagnostics
2029 let fake_server = fake_servers.next().await.unwrap();
2030 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2031 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2032 version: None,
2033 diagnostics: vec![lsp::Diagnostic {
2034 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2035 severity: Some(lsp::DiagnosticSeverity::ERROR),
2036 message: "the message".to_string(),
2037 ..Default::default()
2038 }],
2039 });
2040
2041 cx.executor().run_until_parked();
2042 buffer.update(cx, |buffer, _| {
2043 assert_eq!(
2044 buffer
2045 .snapshot()
2046 .diagnostics_in_range::<_, usize>(0..1, false)
2047 .map(|entry| entry.diagnostic.message.clone())
2048 .collect::<Vec<_>>(),
2049 ["the message".to_string()]
2050 );
2051 });
2052 project.update(cx, |project, cx| {
2053 assert_eq!(
2054 project.diagnostic_summary(false, cx),
2055 DiagnosticSummary {
2056 error_count: 1,
2057 warning_count: 0,
2058 }
2059 );
2060 });
2061
2062 project.update(cx, |project, cx| {
2063 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2064 });
2065
2066 // The diagnostics are cleared.
2067 cx.executor().run_until_parked();
2068 buffer.update(cx, |buffer, _| {
2069 assert_eq!(
2070 buffer
2071 .snapshot()
2072 .diagnostics_in_range::<_, usize>(0..1, false)
2073 .map(|entry| entry.diagnostic.message.clone())
2074 .collect::<Vec<_>>(),
2075 Vec::<String>::new(),
2076 );
2077 });
2078 project.update(cx, |project, cx| {
2079 assert_eq!(
2080 project.diagnostic_summary(false, cx),
2081 DiagnosticSummary {
2082 error_count: 0,
2083 warning_count: 0,
2084 }
2085 );
2086 });
2087}
2088
2089#[gpui::test]
2090async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2091 init_test(cx);
2092
2093 let fs = FakeFs::new(cx.executor());
2094 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2095
2096 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2097 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2098
2099 language_registry.add(rust_lang());
2100 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2101
2102 let (buffer, _handle) = project
2103 .update(cx, |project, cx| {
2104 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2105 })
2106 .await
2107 .unwrap();
2108
2109 // Before restarting the server, report diagnostics with an unknown buffer version.
2110 let fake_server = fake_servers.next().await.unwrap();
2111 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2112 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2113 version: Some(10000),
2114 diagnostics: Vec::new(),
2115 });
2116 cx.executor().run_until_parked();
2117 project.update(cx, |project, cx| {
2118 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2119 });
2120
2121 let mut fake_server = fake_servers.next().await.unwrap();
2122 let notification = fake_server
2123 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2124 .await
2125 .text_document;
2126 assert_eq!(notification.version, 0);
2127}
2128
2129#[gpui::test]
2130async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2131 init_test(cx);
2132
2133 let progress_token = "the-progress-token";
2134
2135 let fs = FakeFs::new(cx.executor());
2136 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2137
2138 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2139
2140 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2141 language_registry.add(rust_lang());
2142 let mut fake_servers = language_registry.register_fake_lsp(
2143 "Rust",
2144 FakeLspAdapter {
2145 name: "the-language-server",
2146 disk_based_diagnostics_sources: vec!["disk".into()],
2147 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2148 ..Default::default()
2149 },
2150 );
2151
2152 let (buffer, _handle) = project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Simulate diagnostics starting to update.
2160 let mut fake_server = fake_servers.next().await.unwrap();
2161 fake_server
2162 .start_progress_with(
2163 "another-token",
2164 lsp::WorkDoneProgressBegin {
2165 cancellable: Some(false),
2166 ..Default::default()
2167 },
2168 )
2169 .await;
2170 fake_server
2171 .start_progress_with(
2172 progress_token,
2173 lsp::WorkDoneProgressBegin {
2174 cancellable: Some(true),
2175 ..Default::default()
2176 },
2177 )
2178 .await;
2179 cx.executor().run_until_parked();
2180
2181 project.update(cx, |project, cx| {
2182 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2183 });
2184
2185 let cancel_notification = fake_server
2186 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2187 .await;
2188 assert_eq!(
2189 cancel_notification.token,
2190 NumberOrString::String(progress_token.into())
2191 );
2192}
2193
2194#[gpui::test]
2195async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2196 init_test(cx);
2197
2198 let fs = FakeFs::new(cx.executor());
2199 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2200 .await;
2201
2202 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2204
2205 let mut fake_rust_servers = language_registry.register_fake_lsp(
2206 "Rust",
2207 FakeLspAdapter {
2208 name: "rust-lsp",
2209 ..Default::default()
2210 },
2211 );
2212 let mut fake_js_servers = language_registry.register_fake_lsp(
2213 "JavaScript",
2214 FakeLspAdapter {
2215 name: "js-lsp",
2216 ..Default::default()
2217 },
2218 );
2219 language_registry.add(rust_lang());
2220 language_registry.add(js_lang());
2221
2222 let _rs_buffer = project
2223 .update(cx, |project, cx| {
2224 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2225 })
2226 .await
2227 .unwrap();
2228 let _js_buffer = project
2229 .update(cx, |project, cx| {
2230 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2231 })
2232 .await
2233 .unwrap();
2234
2235 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2236 assert_eq!(
2237 fake_rust_server_1
2238 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2239 .await
2240 .text_document
2241 .uri
2242 .as_str(),
2243 uri!("file:///dir/a.rs")
2244 );
2245
2246 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2247 assert_eq!(
2248 fake_js_server
2249 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2250 .await
2251 .text_document
2252 .uri
2253 .as_str(),
2254 uri!("file:///dir/b.js")
2255 );
2256
2257 // Disable Rust language server, ensuring only that server gets stopped.
2258 cx.update(|cx| {
2259 SettingsStore::update_global(cx, |settings, cx| {
2260 settings.update_user_settings(cx, |settings| {
2261 settings.languages_mut().insert(
2262 "Rust".into(),
2263 LanguageSettingsContent {
2264 enable_language_server: Some(false),
2265 ..Default::default()
2266 },
2267 );
2268 });
2269 })
2270 });
2271 fake_rust_server_1
2272 .receive_notification::<lsp::notification::Exit>()
2273 .await;
2274
2275 // Enable Rust and disable JavaScript language servers, ensuring that the
2276 // former gets started again and that the latter stops.
2277 cx.update(|cx| {
2278 SettingsStore::update_global(cx, |settings, cx| {
2279 settings.update_user_settings(cx, |settings| {
2280 settings.languages_mut().insert(
2281 "Rust".into(),
2282 LanguageSettingsContent {
2283 enable_language_server: Some(true),
2284 ..Default::default()
2285 },
2286 );
2287 settings.languages_mut().insert(
2288 "JavaScript".into(),
2289 LanguageSettingsContent {
2290 enable_language_server: Some(false),
2291 ..Default::default()
2292 },
2293 );
2294 });
2295 })
2296 });
2297 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2298 assert_eq!(
2299 fake_rust_server_2
2300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2301 .await
2302 .text_document
2303 .uri
2304 .as_str(),
2305 uri!("file:///dir/a.rs")
2306 );
2307 fake_js_server
2308 .receive_notification::<lsp::notification::Exit>()
2309 .await;
2310}
2311
2312#[gpui::test(iterations = 3)]
2313async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2314 init_test(cx);
2315
2316 let text = "
2317 fn a() { A }
2318 fn b() { BB }
2319 fn c() { CCC }
2320 "
2321 .unindent();
2322
2323 let fs = FakeFs::new(cx.executor());
2324 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2325
2326 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2327 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2328
2329 language_registry.add(rust_lang());
2330 let mut fake_servers = language_registry.register_fake_lsp(
2331 "Rust",
2332 FakeLspAdapter {
2333 disk_based_diagnostics_sources: vec!["disk".into()],
2334 ..Default::default()
2335 },
2336 );
2337
2338 let buffer = project
2339 .update(cx, |project, cx| {
2340 project.open_local_buffer(path!("/dir/a.rs"), cx)
2341 })
2342 .await
2343 .unwrap();
2344
2345 let _handle = project.update(cx, |project, cx| {
2346 project.register_buffer_with_language_servers(&buffer, cx)
2347 });
2348
2349 let mut fake_server = fake_servers.next().await.unwrap();
2350 let open_notification = fake_server
2351 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2352 .await;
2353
2354 // Edit the buffer, moving the content down
2355 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2356 let change_notification_1 = fake_server
2357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2358 .await;
2359 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2360
2361 // Report some diagnostics for the initial version of the buffer
2362 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2363 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2364 version: Some(open_notification.text_document.version),
2365 diagnostics: vec![
2366 lsp::Diagnostic {
2367 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2368 severity: Some(DiagnosticSeverity::ERROR),
2369 message: "undefined variable 'A'".to_string(),
2370 source: Some("disk".to_string()),
2371 ..Default::default()
2372 },
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 message: "undefined variable 'BB'".to_string(),
2377 source: Some("disk".to_string()),
2378 ..Default::default()
2379 },
2380 lsp::Diagnostic {
2381 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2382 severity: Some(DiagnosticSeverity::ERROR),
2383 source: Some("disk".to_string()),
2384 message: "undefined variable 'CCC'".to_string(),
2385 ..Default::default()
2386 },
2387 ],
2388 });
2389
2390 // The diagnostics have moved down since they were created.
2391 cx.executor().run_until_parked();
2392 buffer.update(cx, |buffer, _| {
2393 assert_eq!(
2394 buffer
2395 .snapshot()
2396 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2397 .collect::<Vec<_>>(),
2398 &[
2399 DiagnosticEntry {
2400 range: Point::new(3, 9)..Point::new(3, 11),
2401 diagnostic: Diagnostic {
2402 source: Some("disk".into()),
2403 severity: DiagnosticSeverity::ERROR,
2404 message: "undefined variable 'BB'".to_string(),
2405 is_disk_based: true,
2406 group_id: 1,
2407 is_primary: true,
2408 source_kind: DiagnosticSourceKind::Pushed,
2409 ..Diagnostic::default()
2410 },
2411 },
2412 DiagnosticEntry {
2413 range: Point::new(4, 9)..Point::new(4, 12),
2414 diagnostic: Diagnostic {
2415 source: Some("disk".into()),
2416 severity: DiagnosticSeverity::ERROR,
2417 message: "undefined variable 'CCC'".to_string(),
2418 is_disk_based: true,
2419 group_id: 2,
2420 is_primary: true,
2421 source_kind: DiagnosticSourceKind::Pushed,
2422 ..Diagnostic::default()
2423 }
2424 }
2425 ]
2426 );
2427 assert_eq!(
2428 chunks_with_diagnostics(buffer, 0..buffer.len()),
2429 [
2430 ("\n\nfn a() { ".to_string(), None),
2431 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2432 (" }\nfn b() { ".to_string(), None),
2433 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2434 (" }\nfn c() { ".to_string(), None),
2435 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2436 (" }\n".to_string(), None),
2437 ]
2438 );
2439 assert_eq!(
2440 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2441 [
2442 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2443 (" }\nfn c() { ".to_string(), None),
2444 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2445 ]
2446 );
2447 });
2448
2449 // Ensure overlapping diagnostics are highlighted correctly.
2450 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2451 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2452 version: Some(open_notification.text_document.version),
2453 diagnostics: vec![
2454 lsp::Diagnostic {
2455 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2456 severity: Some(DiagnosticSeverity::ERROR),
2457 message: "undefined variable 'A'".to_string(),
2458 source: Some("disk".to_string()),
2459 ..Default::default()
2460 },
2461 lsp::Diagnostic {
2462 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2463 severity: Some(DiagnosticSeverity::WARNING),
2464 message: "unreachable statement".to_string(),
2465 source: Some("disk".to_string()),
2466 ..Default::default()
2467 },
2468 ],
2469 });
2470
2471 cx.executor().run_until_parked();
2472 buffer.update(cx, |buffer, _| {
2473 assert_eq!(
2474 buffer
2475 .snapshot()
2476 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2477 .collect::<Vec<_>>(),
2478 &[
2479 DiagnosticEntry {
2480 range: Point::new(2, 9)..Point::new(2, 12),
2481 diagnostic: Diagnostic {
2482 source: Some("disk".into()),
2483 severity: DiagnosticSeverity::WARNING,
2484 message: "unreachable statement".to_string(),
2485 is_disk_based: true,
2486 group_id: 4,
2487 is_primary: true,
2488 source_kind: DiagnosticSourceKind::Pushed,
2489 ..Diagnostic::default()
2490 }
2491 },
2492 DiagnosticEntry {
2493 range: Point::new(2, 9)..Point::new(2, 10),
2494 diagnostic: Diagnostic {
2495 source: Some("disk".into()),
2496 severity: DiagnosticSeverity::ERROR,
2497 message: "undefined variable 'A'".to_string(),
2498 is_disk_based: true,
2499 group_id: 3,
2500 is_primary: true,
2501 source_kind: DiagnosticSourceKind::Pushed,
2502 ..Diagnostic::default()
2503 },
2504 }
2505 ]
2506 );
2507 assert_eq!(
2508 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2509 [
2510 ("fn a() { ".to_string(), None),
2511 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2512 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2513 ("\n".to_string(), None),
2514 ]
2515 );
2516 assert_eq!(
2517 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2518 [
2519 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2520 ("\n".to_string(), None),
2521 ]
2522 );
2523 });
2524
2525 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2526 // changes since the last save.
2527 buffer.update(cx, |buffer, cx| {
2528 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2529 buffer.edit(
2530 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2531 None,
2532 cx,
2533 );
2534 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2535 });
2536 let change_notification_2 = fake_server
2537 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2538 .await;
2539 assert!(
2540 change_notification_2.text_document.version > change_notification_1.text_document.version
2541 );
2542
2543 // Handle out-of-order diagnostics
2544 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2545 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2546 version: Some(change_notification_2.text_document.version),
2547 diagnostics: vec![
2548 lsp::Diagnostic {
2549 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2550 severity: Some(DiagnosticSeverity::ERROR),
2551 message: "undefined variable 'BB'".to_string(),
2552 source: Some("disk".to_string()),
2553 ..Default::default()
2554 },
2555 lsp::Diagnostic {
2556 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2557 severity: Some(DiagnosticSeverity::WARNING),
2558 message: "undefined variable 'A'".to_string(),
2559 source: Some("disk".to_string()),
2560 ..Default::default()
2561 },
2562 ],
2563 });
2564
2565 cx.executor().run_until_parked();
2566 buffer.update(cx, |buffer, _| {
2567 assert_eq!(
2568 buffer
2569 .snapshot()
2570 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2571 .collect::<Vec<_>>(),
2572 &[
2573 DiagnosticEntry {
2574 range: Point::new(2, 21)..Point::new(2, 22),
2575 diagnostic: Diagnostic {
2576 source: Some("disk".into()),
2577 severity: DiagnosticSeverity::WARNING,
2578 message: "undefined variable 'A'".to_string(),
2579 is_disk_based: true,
2580 group_id: 6,
2581 is_primary: true,
2582 source_kind: DiagnosticSourceKind::Pushed,
2583 ..Diagnostic::default()
2584 }
2585 },
2586 DiagnosticEntry {
2587 range: Point::new(3, 9)..Point::new(3, 14),
2588 diagnostic: Diagnostic {
2589 source: Some("disk".into()),
2590 severity: DiagnosticSeverity::ERROR,
2591 message: "undefined variable 'BB'".to_string(),
2592 is_disk_based: true,
2593 group_id: 5,
2594 is_primary: true,
2595 source_kind: DiagnosticSourceKind::Pushed,
2596 ..Diagnostic::default()
2597 },
2598 }
2599 ]
2600 );
2601 });
2602}
2603
2604#[gpui::test]
2605async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2606 init_test(cx);
2607
2608 let text = concat!(
2609 "let one = ;\n", //
2610 "let two = \n",
2611 "let three = 3;\n",
2612 );
2613
2614 let fs = FakeFs::new(cx.executor());
2615 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2616
2617 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2618 let buffer = project
2619 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2620 .await
2621 .unwrap();
2622
2623 project.update(cx, |project, cx| {
2624 project.lsp_store.update(cx, |lsp_store, cx| {
2625 lsp_store
2626 .update_diagnostic_entries(
2627 LanguageServerId(0),
2628 PathBuf::from("/dir/a.rs"),
2629 None,
2630 None,
2631 vec![
2632 DiagnosticEntry {
2633 range: Unclipped(PointUtf16::new(0, 10))
2634 ..Unclipped(PointUtf16::new(0, 10)),
2635 diagnostic: Diagnostic {
2636 severity: DiagnosticSeverity::ERROR,
2637 message: "syntax error 1".to_string(),
2638 source_kind: DiagnosticSourceKind::Pushed,
2639 ..Diagnostic::default()
2640 },
2641 },
2642 DiagnosticEntry {
2643 range: Unclipped(PointUtf16::new(1, 10))
2644 ..Unclipped(PointUtf16::new(1, 10)),
2645 diagnostic: Diagnostic {
2646 severity: DiagnosticSeverity::ERROR,
2647 message: "syntax error 2".to_string(),
2648 source_kind: DiagnosticSourceKind::Pushed,
2649 ..Diagnostic::default()
2650 },
2651 },
2652 ],
2653 cx,
2654 )
2655 .unwrap();
2656 })
2657 });
2658
2659 // An empty range is extended forward to include the following character.
2660 // At the end of a line, an empty range is extended backward to include
2661 // the preceding character.
2662 buffer.update(cx, |buffer, _| {
2663 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2664 assert_eq!(
2665 chunks
2666 .iter()
2667 .map(|(s, d)| (s.as_str(), *d))
2668 .collect::<Vec<_>>(),
2669 &[
2670 ("let one = ", None),
2671 (";", Some(DiagnosticSeverity::ERROR)),
2672 ("\nlet two =", None),
2673 (" ", Some(DiagnosticSeverity::ERROR)),
2674 ("\nlet three = 3;\n", None)
2675 ]
2676 );
2677 });
2678}
2679
2680#[gpui::test]
2681async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2682 init_test(cx);
2683
2684 let fs = FakeFs::new(cx.executor());
2685 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2686 .await;
2687
2688 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2689 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2690
2691 lsp_store.update(cx, |lsp_store, cx| {
2692 lsp_store
2693 .update_diagnostic_entries(
2694 LanguageServerId(0),
2695 Path::new("/dir/a.rs").to_owned(),
2696 None,
2697 None,
2698 vec![DiagnosticEntry {
2699 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2700 diagnostic: Diagnostic {
2701 severity: DiagnosticSeverity::ERROR,
2702 is_primary: true,
2703 message: "syntax error a1".to_string(),
2704 source_kind: DiagnosticSourceKind::Pushed,
2705 ..Diagnostic::default()
2706 },
2707 }],
2708 cx,
2709 )
2710 .unwrap();
2711 lsp_store
2712 .update_diagnostic_entries(
2713 LanguageServerId(1),
2714 Path::new("/dir/a.rs").to_owned(),
2715 None,
2716 None,
2717 vec![DiagnosticEntry {
2718 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2719 diagnostic: Diagnostic {
2720 severity: DiagnosticSeverity::ERROR,
2721 is_primary: true,
2722 message: "syntax error b1".to_string(),
2723 source_kind: DiagnosticSourceKind::Pushed,
2724 ..Diagnostic::default()
2725 },
2726 }],
2727 cx,
2728 )
2729 .unwrap();
2730
2731 assert_eq!(
2732 lsp_store.diagnostic_summary(false, cx),
2733 DiagnosticSummary {
2734 error_count: 2,
2735 warning_count: 0,
2736 }
2737 );
2738 });
2739}
2740
2741#[gpui::test]
2742async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2743 init_test(cx);
2744
2745 let text = "
2746 fn a() {
2747 f1();
2748 }
2749 fn b() {
2750 f2();
2751 }
2752 fn c() {
2753 f3();
2754 }
2755 "
2756 .unindent();
2757
2758 let fs = FakeFs::new(cx.executor());
2759 fs.insert_tree(
2760 path!("/dir"),
2761 json!({
2762 "a.rs": text.clone(),
2763 }),
2764 )
2765 .await;
2766
2767 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2768 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2769
2770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2771 language_registry.add(rust_lang());
2772 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2773
2774 let (buffer, _handle) = project
2775 .update(cx, |project, cx| {
2776 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2777 })
2778 .await
2779 .unwrap();
2780
2781 let mut fake_server = fake_servers.next().await.unwrap();
2782 let lsp_document_version = fake_server
2783 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2784 .await
2785 .text_document
2786 .version;
2787
2788 // Simulate editing the buffer after the language server computes some edits.
2789 buffer.update(cx, |buffer, cx| {
2790 buffer.edit(
2791 [(
2792 Point::new(0, 0)..Point::new(0, 0),
2793 "// above first function\n",
2794 )],
2795 None,
2796 cx,
2797 );
2798 buffer.edit(
2799 [(
2800 Point::new(2, 0)..Point::new(2, 0),
2801 " // inside first function\n",
2802 )],
2803 None,
2804 cx,
2805 );
2806 buffer.edit(
2807 [(
2808 Point::new(6, 4)..Point::new(6, 4),
2809 "// inside second function ",
2810 )],
2811 None,
2812 cx,
2813 );
2814
2815 assert_eq!(
2816 buffer.text(),
2817 "
2818 // above first function
2819 fn a() {
2820 // inside first function
2821 f1();
2822 }
2823 fn b() {
2824 // inside second function f2();
2825 }
2826 fn c() {
2827 f3();
2828 }
2829 "
2830 .unindent()
2831 );
2832 });
2833
2834 let edits = lsp_store
2835 .update(cx, |lsp_store, cx| {
2836 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2837 &buffer,
2838 vec![
2839 // replace body of first function
2840 lsp::TextEdit {
2841 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2842 new_text: "
2843 fn a() {
2844 f10();
2845 }
2846 "
2847 .unindent(),
2848 },
2849 // edit inside second function
2850 lsp::TextEdit {
2851 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2852 new_text: "00".into(),
2853 },
2854 // edit inside third function via two distinct edits
2855 lsp::TextEdit {
2856 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2857 new_text: "4000".into(),
2858 },
2859 lsp::TextEdit {
2860 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2861 new_text: "".into(),
2862 },
2863 ],
2864 LanguageServerId(0),
2865 Some(lsp_document_version),
2866 cx,
2867 )
2868 })
2869 .await
2870 .unwrap();
2871
2872 buffer.update(cx, |buffer, cx| {
2873 for (range, new_text) in edits {
2874 buffer.edit([(range, new_text)], None, cx);
2875 }
2876 assert_eq!(
2877 buffer.text(),
2878 "
2879 // above first function
2880 fn a() {
2881 // inside first function
2882 f10();
2883 }
2884 fn b() {
2885 // inside second function f200();
2886 }
2887 fn c() {
2888 f4000();
2889 }
2890 "
2891 .unindent()
2892 );
2893 });
2894}
2895
2896#[gpui::test]
2897async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2898 init_test(cx);
2899
2900 let text = "
2901 use a::b;
2902 use a::c;
2903
2904 fn f() {
2905 b();
2906 c();
2907 }
2908 "
2909 .unindent();
2910
2911 let fs = FakeFs::new(cx.executor());
2912 fs.insert_tree(
2913 path!("/dir"),
2914 json!({
2915 "a.rs": text.clone(),
2916 }),
2917 )
2918 .await;
2919
2920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2921 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2922 let buffer = project
2923 .update(cx, |project, cx| {
2924 project.open_local_buffer(path!("/dir/a.rs"), cx)
2925 })
2926 .await
2927 .unwrap();
2928
2929 // Simulate the language server sending us a small edit in the form of a very large diff.
2930 // Rust-analyzer does this when performing a merge-imports code action.
2931 let edits = lsp_store
2932 .update(cx, |lsp_store, cx| {
2933 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2934 &buffer,
2935 [
2936 // Replace the first use statement without editing the semicolon.
2937 lsp::TextEdit {
2938 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2939 new_text: "a::{b, c}".into(),
2940 },
2941 // Reinsert the remainder of the file between the semicolon and the final
2942 // newline of the file.
2943 lsp::TextEdit {
2944 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2945 new_text: "\n\n".into(),
2946 },
2947 lsp::TextEdit {
2948 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2949 new_text: "
2950 fn f() {
2951 b();
2952 c();
2953 }"
2954 .unindent(),
2955 },
2956 // Delete everything after the first newline of the file.
2957 lsp::TextEdit {
2958 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2959 new_text: "".into(),
2960 },
2961 ],
2962 LanguageServerId(0),
2963 None,
2964 cx,
2965 )
2966 })
2967 .await
2968 .unwrap();
2969
2970 buffer.update(cx, |buffer, cx| {
2971 let edits = edits
2972 .into_iter()
2973 .map(|(range, text)| {
2974 (
2975 range.start.to_point(buffer)..range.end.to_point(buffer),
2976 text,
2977 )
2978 })
2979 .collect::<Vec<_>>();
2980
2981 assert_eq!(
2982 edits,
2983 [
2984 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2985 (Point::new(1, 0)..Point::new(2, 0), "".into())
2986 ]
2987 );
2988
2989 for (range, new_text) in edits {
2990 buffer.edit([(range, new_text)], None, cx);
2991 }
2992 assert_eq!(
2993 buffer.text(),
2994 "
2995 use a::{b, c};
2996
2997 fn f() {
2998 b();
2999 c();
3000 }
3001 "
3002 .unindent()
3003 );
3004 });
3005}
3006
3007#[gpui::test]
3008async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3009 cx: &mut gpui::TestAppContext,
3010) {
3011 init_test(cx);
3012
3013 let text = "Path()";
3014
3015 let fs = FakeFs::new(cx.executor());
3016 fs.insert_tree(
3017 path!("/dir"),
3018 json!({
3019 "a.rs": text
3020 }),
3021 )
3022 .await;
3023
3024 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3025 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3026 let buffer = project
3027 .update(cx, |project, cx| {
3028 project.open_local_buffer(path!("/dir/a.rs"), cx)
3029 })
3030 .await
3031 .unwrap();
3032
3033 // Simulate the language server sending us a pair of edits at the same location,
3034 // with an insertion following a replacement (which violates the LSP spec).
3035 let edits = lsp_store
3036 .update(cx, |lsp_store, cx| {
3037 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3038 &buffer,
3039 [
3040 lsp::TextEdit {
3041 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3042 new_text: "Path".into(),
3043 },
3044 lsp::TextEdit {
3045 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3046 new_text: "from path import Path\n\n\n".into(),
3047 },
3048 ],
3049 LanguageServerId(0),
3050 None,
3051 cx,
3052 )
3053 })
3054 .await
3055 .unwrap();
3056
3057 buffer.update(cx, |buffer, cx| {
3058 buffer.edit(edits, None, cx);
3059 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3060 });
3061}
3062
3063#[gpui::test]
3064async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3065 init_test(cx);
3066
3067 let text = "
3068 use a::b;
3069 use a::c;
3070
3071 fn f() {
3072 b();
3073 c();
3074 }
3075 "
3076 .unindent();
3077
3078 let fs = FakeFs::new(cx.executor());
3079 fs.insert_tree(
3080 path!("/dir"),
3081 json!({
3082 "a.rs": text.clone(),
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3088 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3089 let buffer = project
3090 .update(cx, |project, cx| {
3091 project.open_local_buffer(path!("/dir/a.rs"), cx)
3092 })
3093 .await
3094 .unwrap();
3095
3096 // Simulate the language server sending us edits in a non-ordered fashion,
3097 // with ranges sometimes being inverted or pointing to invalid locations.
3098 let edits = lsp_store
3099 .update(cx, |lsp_store, cx| {
3100 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3101 &buffer,
3102 [
3103 lsp::TextEdit {
3104 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3105 new_text: "\n\n".into(),
3106 },
3107 lsp::TextEdit {
3108 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3109 new_text: "a::{b, c}".into(),
3110 },
3111 lsp::TextEdit {
3112 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3113 new_text: "".into(),
3114 },
3115 lsp::TextEdit {
3116 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3117 new_text: "
3118 fn f() {
3119 b();
3120 c();
3121 }"
3122 .unindent(),
3123 },
3124 ],
3125 LanguageServerId(0),
3126 None,
3127 cx,
3128 )
3129 })
3130 .await
3131 .unwrap();
3132
3133 buffer.update(cx, |buffer, cx| {
3134 let edits = edits
3135 .into_iter()
3136 .map(|(range, text)| {
3137 (
3138 range.start.to_point(buffer)..range.end.to_point(buffer),
3139 text,
3140 )
3141 })
3142 .collect::<Vec<_>>();
3143
3144 assert_eq!(
3145 edits,
3146 [
3147 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3148 (Point::new(1, 0)..Point::new(2, 0), "".into())
3149 ]
3150 );
3151
3152 for (range, new_text) in edits {
3153 buffer.edit([(range, new_text)], None, cx);
3154 }
3155 assert_eq!(
3156 buffer.text(),
3157 "
3158 use a::{b, c};
3159
3160 fn f() {
3161 b();
3162 c();
3163 }
3164 "
3165 .unindent()
3166 );
3167 });
3168}
3169
3170fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3171 buffer: &Buffer,
3172 range: Range<T>,
3173) -> Vec<(String, Option<DiagnosticSeverity>)> {
3174 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3175 for chunk in buffer.snapshot().chunks(range, true) {
3176 if chunks
3177 .last()
3178 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3179 {
3180 chunks.last_mut().unwrap().0.push_str(chunk.text);
3181 } else {
3182 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3183 }
3184 }
3185 chunks
3186}
3187
3188#[gpui::test(iterations = 10)]
3189async fn test_definition(cx: &mut gpui::TestAppContext) {
3190 init_test(cx);
3191
3192 let fs = FakeFs::new(cx.executor());
3193 fs.insert_tree(
3194 path!("/dir"),
3195 json!({
3196 "a.rs": "const fn a() { A }",
3197 "b.rs": "const y: i32 = crate::a()",
3198 }),
3199 )
3200 .await;
3201
3202 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3203
3204 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3205 language_registry.add(rust_lang());
3206 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3207
3208 let (buffer, _handle) = project
3209 .update(cx, |project, cx| {
3210 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3211 })
3212 .await
3213 .unwrap();
3214
3215 let fake_server = fake_servers.next().await.unwrap();
3216 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3217 let params = params.text_document_position_params;
3218 assert_eq!(
3219 params.text_document.uri.to_file_path().unwrap(),
3220 Path::new(path!("/dir/b.rs")),
3221 );
3222 assert_eq!(params.position, lsp::Position::new(0, 22));
3223
3224 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3225 lsp::Location::new(
3226 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3227 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3228 ),
3229 )))
3230 });
3231 let mut definitions = project
3232 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3233 .await
3234 .unwrap()
3235 .unwrap();
3236
3237 // Assert no new language server started
3238 cx.executor().run_until_parked();
3239 assert!(fake_servers.try_next().is_err());
3240
3241 assert_eq!(definitions.len(), 1);
3242 let definition = definitions.pop().unwrap();
3243 cx.update(|cx| {
3244 let target_buffer = definition.target.buffer.read(cx);
3245 assert_eq!(
3246 target_buffer
3247 .file()
3248 .unwrap()
3249 .as_local()
3250 .unwrap()
3251 .abs_path(cx),
3252 Path::new(path!("/dir/a.rs")),
3253 );
3254 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3255 assert_eq!(
3256 list_worktrees(&project, cx),
3257 [
3258 (path!("/dir/a.rs").as_ref(), false),
3259 (path!("/dir/b.rs").as_ref(), true)
3260 ],
3261 );
3262
3263 drop(definition);
3264 });
3265 cx.update(|cx| {
3266 assert_eq!(
3267 list_worktrees(&project, cx),
3268 [(path!("/dir/b.rs").as_ref(), true)]
3269 );
3270 });
3271
3272 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3273 project
3274 .read(cx)
3275 .worktrees(cx)
3276 .map(|worktree| {
3277 let worktree = worktree.read(cx);
3278 (
3279 worktree.as_local().unwrap().abs_path().as_ref(),
3280 worktree.is_visible(),
3281 )
3282 })
3283 .collect::<Vec<_>>()
3284 }
3285}
3286
3287#[gpui::test]
3288async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3289 init_test(cx);
3290
3291 let fs = FakeFs::new(cx.executor());
3292 fs.insert_tree(
3293 path!("/dir"),
3294 json!({
3295 "a.ts": "",
3296 }),
3297 )
3298 .await;
3299
3300 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3301
3302 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3303 language_registry.add(typescript_lang());
3304 let mut fake_language_servers = language_registry.register_fake_lsp(
3305 "TypeScript",
3306 FakeLspAdapter {
3307 capabilities: lsp::ServerCapabilities {
3308 completion_provider: Some(lsp::CompletionOptions {
3309 trigger_characters: Some(vec![".".to_string()]),
3310 ..Default::default()
3311 }),
3312 ..Default::default()
3313 },
3314 ..Default::default()
3315 },
3316 );
3317
3318 let (buffer, _handle) = project
3319 .update(cx, |p, cx| {
3320 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3321 })
3322 .await
3323 .unwrap();
3324
3325 let fake_server = fake_language_servers.next().await.unwrap();
3326
3327 // When text_edit exists, it takes precedence over insert_text and label
3328 let text = "let a = obj.fqn";
3329 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3330 let completions = project.update(cx, |project, cx| {
3331 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3332 });
3333
3334 fake_server
3335 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3336 Ok(Some(lsp::CompletionResponse::Array(vec![
3337 lsp::CompletionItem {
3338 label: "labelText".into(),
3339 insert_text: Some("insertText".into()),
3340 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3341 range: lsp::Range::new(
3342 lsp::Position::new(0, text.len() as u32 - 3),
3343 lsp::Position::new(0, text.len() as u32),
3344 ),
3345 new_text: "textEditText".into(),
3346 })),
3347 ..Default::default()
3348 },
3349 ])))
3350 })
3351 .next()
3352 .await;
3353
3354 let completions = completions
3355 .await
3356 .unwrap()
3357 .into_iter()
3358 .flat_map(|response| response.completions)
3359 .collect::<Vec<_>>();
3360 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3361
3362 assert_eq!(completions.len(), 1);
3363 assert_eq!(completions[0].new_text, "textEditText");
3364 assert_eq!(
3365 completions[0].replace_range.to_offset(&snapshot),
3366 text.len() - 3..text.len()
3367 );
3368}
3369
3370#[gpui::test]
3371async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3372 init_test(cx);
3373
3374 let fs = FakeFs::new(cx.executor());
3375 fs.insert_tree(
3376 path!("/dir"),
3377 json!({
3378 "a.ts": "",
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3384
3385 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3386 language_registry.add(typescript_lang());
3387 let mut fake_language_servers = language_registry.register_fake_lsp(
3388 "TypeScript",
3389 FakeLspAdapter {
3390 capabilities: lsp::ServerCapabilities {
3391 completion_provider: Some(lsp::CompletionOptions {
3392 trigger_characters: Some(vec![".".to_string()]),
3393 ..Default::default()
3394 }),
3395 ..Default::default()
3396 },
3397 ..Default::default()
3398 },
3399 );
3400
3401 let (buffer, _handle) = project
3402 .update(cx, |p, cx| {
3403 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3404 })
3405 .await
3406 .unwrap();
3407
3408 let fake_server = fake_language_servers.next().await.unwrap();
3409 let text = "let a = obj.fqn";
3410
3411 // Test 1: When text_edit is None but insert_text exists with default edit_range
3412 {
3413 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3414 let completions = project.update(cx, |project, cx| {
3415 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3416 });
3417
3418 fake_server
3419 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3420 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3421 is_incomplete: false,
3422 item_defaults: Some(lsp::CompletionListItemDefaults {
3423 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3424 lsp::Range::new(
3425 lsp::Position::new(0, text.len() as u32 - 3),
3426 lsp::Position::new(0, text.len() as u32),
3427 ),
3428 )),
3429 ..Default::default()
3430 }),
3431 items: vec![lsp::CompletionItem {
3432 label: "labelText".into(),
3433 insert_text: Some("insertText".into()),
3434 text_edit: None,
3435 ..Default::default()
3436 }],
3437 })))
3438 })
3439 .next()
3440 .await;
3441
3442 let completions = completions
3443 .await
3444 .unwrap()
3445 .into_iter()
3446 .flat_map(|response| response.completions)
3447 .collect::<Vec<_>>();
3448 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3449
3450 assert_eq!(completions.len(), 1);
3451 assert_eq!(completions[0].new_text, "insertText");
3452 assert_eq!(
3453 completions[0].replace_range.to_offset(&snapshot),
3454 text.len() - 3..text.len()
3455 );
3456 }
3457
3458 // Test 2: When both text_edit and insert_text are None with default edit_range
3459 {
3460 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3461 let completions = project.update(cx, |project, cx| {
3462 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3463 });
3464
3465 fake_server
3466 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3467 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3468 is_incomplete: false,
3469 item_defaults: Some(lsp::CompletionListItemDefaults {
3470 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3471 lsp::Range::new(
3472 lsp::Position::new(0, text.len() as u32 - 3),
3473 lsp::Position::new(0, text.len() as u32),
3474 ),
3475 )),
3476 ..Default::default()
3477 }),
3478 items: vec![lsp::CompletionItem {
3479 label: "labelText".into(),
3480 insert_text: None,
3481 text_edit: None,
3482 ..Default::default()
3483 }],
3484 })))
3485 })
3486 .next()
3487 .await;
3488
3489 let completions = completions
3490 .await
3491 .unwrap()
3492 .into_iter()
3493 .flat_map(|response| response.completions)
3494 .collect::<Vec<_>>();
3495 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3496
3497 assert_eq!(completions.len(), 1);
3498 assert_eq!(completions[0].new_text, "labelText");
3499 assert_eq!(
3500 completions[0].replace_range.to_offset(&snapshot),
3501 text.len() - 3..text.len()
3502 );
3503 }
3504}
3505
3506#[gpui::test]
3507async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3508 init_test(cx);
3509
3510 let fs = FakeFs::new(cx.executor());
3511 fs.insert_tree(
3512 path!("/dir"),
3513 json!({
3514 "a.ts": "",
3515 }),
3516 )
3517 .await;
3518
3519 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3520
3521 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3522 language_registry.add(typescript_lang());
3523 let mut fake_language_servers = language_registry.register_fake_lsp(
3524 "TypeScript",
3525 FakeLspAdapter {
3526 capabilities: lsp::ServerCapabilities {
3527 completion_provider: Some(lsp::CompletionOptions {
3528 trigger_characters: Some(vec![":".to_string()]),
3529 ..Default::default()
3530 }),
3531 ..Default::default()
3532 },
3533 ..Default::default()
3534 },
3535 );
3536
3537 let (buffer, _handle) = project
3538 .update(cx, |p, cx| {
3539 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3540 })
3541 .await
3542 .unwrap();
3543
3544 let fake_server = fake_language_servers.next().await.unwrap();
3545
3546 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3547 let text = "let a = b.fqn";
3548 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3549 let completions = project.update(cx, |project, cx| {
3550 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3551 });
3552
3553 fake_server
3554 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3555 Ok(Some(lsp::CompletionResponse::Array(vec![
3556 lsp::CompletionItem {
3557 label: "fullyQualifiedName?".into(),
3558 insert_text: Some("fullyQualifiedName".into()),
3559 ..Default::default()
3560 },
3561 ])))
3562 })
3563 .next()
3564 .await;
3565 let completions = completions
3566 .await
3567 .unwrap()
3568 .into_iter()
3569 .flat_map(|response| response.completions)
3570 .collect::<Vec<_>>();
3571 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3572 assert_eq!(completions.len(), 1);
3573 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3574 assert_eq!(
3575 completions[0].replace_range.to_offset(&snapshot),
3576 text.len() - 3..text.len()
3577 );
3578
3579 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3580 let text = "let a = \"atoms/cmp\"";
3581 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3582 let completions = project.update(cx, |project, cx| {
3583 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3584 });
3585
3586 fake_server
3587 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3588 Ok(Some(lsp::CompletionResponse::Array(vec![
3589 lsp::CompletionItem {
3590 label: "component".into(),
3591 ..Default::default()
3592 },
3593 ])))
3594 })
3595 .next()
3596 .await;
3597 let completions = completions
3598 .await
3599 .unwrap()
3600 .into_iter()
3601 .flat_map(|response| response.completions)
3602 .collect::<Vec<_>>();
3603 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3604 assert_eq!(completions.len(), 1);
3605 assert_eq!(completions[0].new_text, "component");
3606 assert_eq!(
3607 completions[0].replace_range.to_offset(&snapshot),
3608 text.len() - 4..text.len() - 1
3609 );
3610}
3611
3612#[gpui::test]
3613async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3614 init_test(cx);
3615
3616 let fs = FakeFs::new(cx.executor());
3617 fs.insert_tree(
3618 path!("/dir"),
3619 json!({
3620 "a.ts": "",
3621 }),
3622 )
3623 .await;
3624
3625 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3626
3627 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3628 language_registry.add(typescript_lang());
3629 let mut fake_language_servers = language_registry.register_fake_lsp(
3630 "TypeScript",
3631 FakeLspAdapter {
3632 capabilities: lsp::ServerCapabilities {
3633 completion_provider: Some(lsp::CompletionOptions {
3634 trigger_characters: Some(vec![":".to_string()]),
3635 ..Default::default()
3636 }),
3637 ..Default::default()
3638 },
3639 ..Default::default()
3640 },
3641 );
3642
3643 let (buffer, _handle) = project
3644 .update(cx, |p, cx| {
3645 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3646 })
3647 .await
3648 .unwrap();
3649
3650 let fake_server = fake_language_servers.next().await.unwrap();
3651
3652 let text = "let a = b.fqn";
3653 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3654 let completions = project.update(cx, |project, cx| {
3655 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3656 });
3657
3658 fake_server
3659 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3660 Ok(Some(lsp::CompletionResponse::Array(vec![
3661 lsp::CompletionItem {
3662 label: "fullyQualifiedName?".into(),
3663 insert_text: Some("fully\rQualified\r\nName".into()),
3664 ..Default::default()
3665 },
3666 ])))
3667 })
3668 .next()
3669 .await;
3670 let completions = completions
3671 .await
3672 .unwrap()
3673 .into_iter()
3674 .flat_map(|response| response.completions)
3675 .collect::<Vec<_>>();
3676 assert_eq!(completions.len(), 1);
3677 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3678}
3679
3680#[gpui::test(iterations = 10)]
3681async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3682 init_test(cx);
3683
3684 let fs = FakeFs::new(cx.executor());
3685 fs.insert_tree(
3686 path!("/dir"),
3687 json!({
3688 "a.ts": "a",
3689 }),
3690 )
3691 .await;
3692
3693 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3694
3695 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3696 language_registry.add(typescript_lang());
3697 let mut fake_language_servers = language_registry.register_fake_lsp(
3698 "TypeScript",
3699 FakeLspAdapter {
3700 capabilities: lsp::ServerCapabilities {
3701 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3702 lsp::CodeActionOptions {
3703 resolve_provider: Some(true),
3704 ..lsp::CodeActionOptions::default()
3705 },
3706 )),
3707 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3708 commands: vec!["_the/command".to_string()],
3709 ..lsp::ExecuteCommandOptions::default()
3710 }),
3711 ..lsp::ServerCapabilities::default()
3712 },
3713 ..FakeLspAdapter::default()
3714 },
3715 );
3716
3717 let (buffer, _handle) = project
3718 .update(cx, |p, cx| {
3719 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3720 })
3721 .await
3722 .unwrap();
3723
3724 let fake_server = fake_language_servers.next().await.unwrap();
3725
3726 // Language server returns code actions that contain commands, and not edits.
3727 let actions = project.update(cx, |project, cx| {
3728 project.code_actions(&buffer, 0..0, None, cx)
3729 });
3730 fake_server
3731 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3732 Ok(Some(vec![
3733 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3734 title: "The code action".into(),
3735 data: Some(serde_json::json!({
3736 "command": "_the/command",
3737 })),
3738 ..lsp::CodeAction::default()
3739 }),
3740 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3741 title: "two".into(),
3742 ..lsp::CodeAction::default()
3743 }),
3744 ]))
3745 })
3746 .next()
3747 .await;
3748
3749 let action = actions.await.unwrap().unwrap()[0].clone();
3750 let apply = project.update(cx, |project, cx| {
3751 project.apply_code_action(buffer.clone(), action, true, cx)
3752 });
3753
3754 // Resolving the code action does not populate its edits. In absence of
3755 // edits, we must execute the given command.
3756 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3757 |mut action, _| async move {
3758 if action.data.is_some() {
3759 action.command = Some(lsp::Command {
3760 title: "The command".into(),
3761 command: "_the/command".into(),
3762 arguments: Some(vec![json!("the-argument")]),
3763 });
3764 }
3765 Ok(action)
3766 },
3767 );
3768
3769 // While executing the command, the language server sends the editor
3770 // a `workspaceEdit` request.
3771 fake_server
3772 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3773 let fake = fake_server.clone();
3774 move |params, _| {
3775 assert_eq!(params.command, "_the/command");
3776 let fake = fake.clone();
3777 async move {
3778 fake.server
3779 .request::<lsp::request::ApplyWorkspaceEdit>(
3780 lsp::ApplyWorkspaceEditParams {
3781 label: None,
3782 edit: lsp::WorkspaceEdit {
3783 changes: Some(
3784 [(
3785 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3786 vec![lsp::TextEdit {
3787 range: lsp::Range::new(
3788 lsp::Position::new(0, 0),
3789 lsp::Position::new(0, 0),
3790 ),
3791 new_text: "X".into(),
3792 }],
3793 )]
3794 .into_iter()
3795 .collect(),
3796 ),
3797 ..Default::default()
3798 },
3799 },
3800 )
3801 .await
3802 .into_response()
3803 .unwrap();
3804 Ok(Some(json!(null)))
3805 }
3806 }
3807 })
3808 .next()
3809 .await;
3810
3811 // Applying the code action returns a project transaction containing the edits
3812 // sent by the language server in its `workspaceEdit` request.
3813 let transaction = apply.await.unwrap();
3814 assert!(transaction.0.contains_key(&buffer));
3815 buffer.update(cx, |buffer, cx| {
3816 assert_eq!(buffer.text(), "Xa");
3817 buffer.undo(cx);
3818 assert_eq!(buffer.text(), "a");
3819 });
3820}
3821
3822#[gpui::test]
3823async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3824 init_test(cx);
3825 let fs = FakeFs::new(cx.background_executor.clone());
3826 let expected_contents = "content";
3827 fs.as_fake()
3828 .insert_tree(
3829 "/root",
3830 json!({
3831 "test.txt": expected_contents
3832 }),
3833 )
3834 .await;
3835
3836 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3837
3838 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3839 let worktree = project.worktrees(cx).next().unwrap();
3840 let entry_id = worktree
3841 .read(cx)
3842 .entry_for_path(rel_path("test.txt"))
3843 .unwrap()
3844 .id;
3845 (worktree, entry_id)
3846 });
3847 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3848 let _result = project
3849 .update(cx, |project, cx| {
3850 project.rename_entry(
3851 entry_id,
3852 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3853 cx,
3854 )
3855 })
3856 .await
3857 .unwrap();
3858 worktree.read_with(cx, |worktree, _| {
3859 assert!(
3860 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3861 "Old file should have been removed"
3862 );
3863 assert!(
3864 worktree
3865 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3866 .is_some(),
3867 "Whole directory hierarchy and the new file should have been created"
3868 );
3869 });
3870 assert_eq!(
3871 worktree
3872 .update(cx, |worktree, cx| {
3873 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3874 })
3875 .await
3876 .unwrap()
3877 .text,
3878 expected_contents,
3879 "Moved file's contents should be preserved"
3880 );
3881
3882 let entry_id = worktree.read_with(cx, |worktree, _| {
3883 worktree
3884 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3885 .unwrap()
3886 .id
3887 });
3888
3889 let _result = project
3890 .update(cx, |project, cx| {
3891 project.rename_entry(
3892 entry_id,
3893 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3894 cx,
3895 )
3896 })
3897 .await
3898 .unwrap();
3899 worktree.read_with(cx, |worktree, _| {
3900 assert!(
3901 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3902 "First file should not reappear"
3903 );
3904 assert!(
3905 worktree
3906 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3907 .is_none(),
3908 "Old file should have been removed"
3909 );
3910 assert!(
3911 worktree
3912 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3913 .is_some(),
3914 "No error should have occurred after moving into existing directory"
3915 );
3916 });
3917 assert_eq!(
3918 worktree
3919 .update(cx, |worktree, cx| {
3920 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3921 })
3922 .await
3923 .unwrap()
3924 .text,
3925 expected_contents,
3926 "Moved file's contents should be preserved"
3927 );
3928}
3929
3930#[gpui::test(iterations = 10)]
3931async fn test_save_file(cx: &mut gpui::TestAppContext) {
3932 init_test(cx);
3933
3934 let fs = FakeFs::new(cx.executor());
3935 fs.insert_tree(
3936 path!("/dir"),
3937 json!({
3938 "file1": "the old contents",
3939 }),
3940 )
3941 .await;
3942
3943 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3944 let buffer = project
3945 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3946 .await
3947 .unwrap();
3948 buffer.update(cx, |buffer, cx| {
3949 assert_eq!(buffer.text(), "the old contents");
3950 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3951 });
3952
3953 project
3954 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3955 .await
3956 .unwrap();
3957
3958 let new_text = fs
3959 .load(Path::new(path!("/dir/file1")))
3960 .await
3961 .unwrap()
3962 .replace("\r\n", "\n");
3963 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3964}
3965
3966#[gpui::test(iterations = 10)]
3967async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3968 // Issue: #24349
3969 init_test(cx);
3970
3971 let fs = FakeFs::new(cx.executor());
3972 fs.insert_tree(path!("/dir"), json!({})).await;
3973
3974 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3975 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3976
3977 language_registry.add(rust_lang());
3978 let mut fake_rust_servers = language_registry.register_fake_lsp(
3979 "Rust",
3980 FakeLspAdapter {
3981 name: "the-rust-language-server",
3982 capabilities: lsp::ServerCapabilities {
3983 completion_provider: Some(lsp::CompletionOptions {
3984 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3985 ..Default::default()
3986 }),
3987 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3988 lsp::TextDocumentSyncOptions {
3989 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3990 ..Default::default()
3991 },
3992 )),
3993 ..Default::default()
3994 },
3995 ..Default::default()
3996 },
3997 );
3998
3999 let buffer = project
4000 .update(cx, |this, cx| this.create_buffer(false, cx))
4001 .unwrap()
4002 .await;
4003 project.update(cx, |this, cx| {
4004 this.register_buffer_with_language_servers(&buffer, cx);
4005 buffer.update(cx, |buffer, cx| {
4006 assert!(!this.has_language_servers_for(buffer, cx));
4007 })
4008 });
4009
4010 project
4011 .update(cx, |this, cx| {
4012 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4013 this.save_buffer_as(
4014 buffer.clone(),
4015 ProjectPath {
4016 worktree_id,
4017 path: rel_path("file.rs").into(),
4018 },
4019 cx,
4020 )
4021 })
4022 .await
4023 .unwrap();
4024 // A server is started up, and it is notified about Rust files.
4025 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4026 assert_eq!(
4027 fake_rust_server
4028 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4029 .await
4030 .text_document,
4031 lsp::TextDocumentItem {
4032 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4033 version: 0,
4034 text: "".to_string(),
4035 language_id: "rust".to_string(),
4036 }
4037 );
4038
4039 project.update(cx, |this, cx| {
4040 buffer.update(cx, |buffer, cx| {
4041 assert!(this.has_language_servers_for(buffer, cx));
4042 })
4043 });
4044}
4045
4046#[gpui::test(iterations = 30)]
4047async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4048 init_test(cx);
4049
4050 let fs = FakeFs::new(cx.executor());
4051 fs.insert_tree(
4052 path!("/dir"),
4053 json!({
4054 "file1": "the original contents",
4055 }),
4056 )
4057 .await;
4058
4059 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4060 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4061 let buffer = project
4062 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4063 .await
4064 .unwrap();
4065
4066 // Simulate buffer diffs being slow, so that they don't complete before
4067 // the next file change occurs.
4068 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4069
4070 // Change the buffer's file on disk, and then wait for the file change
4071 // to be detected by the worktree, so that the buffer starts reloading.
4072 fs.save(
4073 path!("/dir/file1").as_ref(),
4074 &"the first contents".into(),
4075 Default::default(),
4076 )
4077 .await
4078 .unwrap();
4079 worktree.next_event(cx).await;
4080
4081 // Change the buffer's file again. Depending on the random seed, the
4082 // previous file change may still be in progress.
4083 fs.save(
4084 path!("/dir/file1").as_ref(),
4085 &"the second contents".into(),
4086 Default::default(),
4087 )
4088 .await
4089 .unwrap();
4090 worktree.next_event(cx).await;
4091
4092 cx.executor().run_until_parked();
4093 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4094 buffer.read_with(cx, |buffer, _| {
4095 assert_eq!(buffer.text(), on_disk_text);
4096 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4097 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4098 });
4099}
4100
4101#[gpui::test(iterations = 30)]
4102async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4103 init_test(cx);
4104
4105 let fs = FakeFs::new(cx.executor());
4106 fs.insert_tree(
4107 path!("/dir"),
4108 json!({
4109 "file1": "the original contents",
4110 }),
4111 )
4112 .await;
4113
4114 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4115 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4116 let buffer = project
4117 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4118 .await
4119 .unwrap();
4120
4121 // Simulate buffer diffs being slow, so that they don't complete before
4122 // the next file change occurs.
4123 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4124
4125 // Change the buffer's file on disk, and then wait for the file change
4126 // to be detected by the worktree, so that the buffer starts reloading.
4127 fs.save(
4128 path!("/dir/file1").as_ref(),
4129 &"the first contents".into(),
4130 Default::default(),
4131 )
4132 .await
4133 .unwrap();
4134 worktree.next_event(cx).await;
4135
4136 cx.executor()
4137 .spawn(cx.executor().simulate_random_delay())
4138 .await;
4139
4140 // Perform a noop edit, causing the buffer's version to increase.
4141 buffer.update(cx, |buffer, cx| {
4142 buffer.edit([(0..0, " ")], None, cx);
4143 buffer.undo(cx);
4144 });
4145
4146 cx.executor().run_until_parked();
4147 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4148 buffer.read_with(cx, |buffer, _| {
4149 let buffer_text = buffer.text();
4150 if buffer_text == on_disk_text {
4151 assert!(
4152 !buffer.is_dirty() && !buffer.has_conflict(),
4153 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4154 );
4155 }
4156 // If the file change occurred while the buffer was processing the first
4157 // change, the buffer will be in a conflicting state.
4158 else {
4159 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4160 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4161 }
4162 });
4163}
4164
4165#[gpui::test]
4166async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4167 init_test(cx);
4168
4169 let fs = FakeFs::new(cx.executor());
4170 fs.insert_tree(
4171 path!("/dir"),
4172 json!({
4173 "file1": "the old contents",
4174 }),
4175 )
4176 .await;
4177
4178 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4179 let buffer = project
4180 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4181 .await
4182 .unwrap();
4183 buffer.update(cx, |buffer, cx| {
4184 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4185 });
4186
4187 project
4188 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4189 .await
4190 .unwrap();
4191
4192 let new_text = fs
4193 .load(Path::new(path!("/dir/file1")))
4194 .await
4195 .unwrap()
4196 .replace("\r\n", "\n");
4197 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4198}
4199
4200#[gpui::test]
4201async fn test_save_as(cx: &mut gpui::TestAppContext) {
4202 init_test(cx);
4203
4204 let fs = FakeFs::new(cx.executor());
4205 fs.insert_tree("/dir", json!({})).await;
4206
4207 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4208
4209 let languages = project.update(cx, |project, _| project.languages().clone());
4210 languages.add(rust_lang());
4211
4212 let buffer = project.update(cx, |project, cx| {
4213 project.create_local_buffer("", None, false, cx)
4214 });
4215 buffer.update(cx, |buffer, cx| {
4216 buffer.edit([(0..0, "abc")], None, cx);
4217 assert!(buffer.is_dirty());
4218 assert!(!buffer.has_conflict());
4219 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4220 });
4221 project
4222 .update(cx, |project, cx| {
4223 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4224 let path = ProjectPath {
4225 worktree_id,
4226 path: rel_path("file1.rs").into(),
4227 };
4228 project.save_buffer_as(buffer.clone(), path, cx)
4229 })
4230 .await
4231 .unwrap();
4232 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4233
4234 cx.executor().run_until_parked();
4235 buffer.update(cx, |buffer, cx| {
4236 assert_eq!(
4237 buffer.file().unwrap().full_path(cx),
4238 Path::new("dir/file1.rs")
4239 );
4240 assert!(!buffer.is_dirty());
4241 assert!(!buffer.has_conflict());
4242 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4243 });
4244
4245 let opened_buffer = project
4246 .update(cx, |project, cx| {
4247 project.open_local_buffer("/dir/file1.rs", cx)
4248 })
4249 .await
4250 .unwrap();
4251 assert_eq!(opened_buffer, buffer);
4252}
4253
4254#[gpui::test(retries = 5)]
4255async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4256 use worktree::WorktreeModelHandle as _;
4257
4258 init_test(cx);
4259 cx.executor().allow_parking();
4260
4261 let dir = TempTree::new(json!({
4262 "a": {
4263 "file1": "",
4264 "file2": "",
4265 "file3": "",
4266 },
4267 "b": {
4268 "c": {
4269 "file4": "",
4270 "file5": "",
4271 }
4272 }
4273 }));
4274
4275 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4276
4277 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4278 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4279 async move { buffer.await.unwrap() }
4280 };
4281 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4282 project.update(cx, |project, cx| {
4283 let tree = project.worktrees(cx).next().unwrap();
4284 tree.read(cx)
4285 .entry_for_path(rel_path(path))
4286 .unwrap_or_else(|| panic!("no entry for path {}", path))
4287 .id
4288 })
4289 };
4290
4291 let buffer2 = buffer_for_path("a/file2", cx).await;
4292 let buffer3 = buffer_for_path("a/file3", cx).await;
4293 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4294 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4295
4296 let file2_id = id_for_path("a/file2", cx);
4297 let file3_id = id_for_path("a/file3", cx);
4298 let file4_id = id_for_path("b/c/file4", cx);
4299
4300 // Create a remote copy of this worktree.
4301 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4302 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4303
4304 let updates = Arc::new(Mutex::new(Vec::new()));
4305 tree.update(cx, |tree, cx| {
4306 let updates = updates.clone();
4307 tree.observe_updates(0, cx, move |update| {
4308 updates.lock().push(update);
4309 async { true }
4310 });
4311 });
4312
4313 let remote = cx.update(|cx| {
4314 Worktree::remote(
4315 0,
4316 ReplicaId::REMOTE_SERVER,
4317 metadata,
4318 project.read(cx).client().into(),
4319 project.read(cx).path_style(cx),
4320 cx,
4321 )
4322 });
4323
4324 cx.executor().run_until_parked();
4325
4326 cx.update(|cx| {
4327 assert!(!buffer2.read(cx).is_dirty());
4328 assert!(!buffer3.read(cx).is_dirty());
4329 assert!(!buffer4.read(cx).is_dirty());
4330 assert!(!buffer5.read(cx).is_dirty());
4331 });
4332
4333 // Rename and delete files and directories.
4334 tree.flush_fs_events(cx).await;
4335 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4336 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4337 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4338 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4339 tree.flush_fs_events(cx).await;
4340
4341 cx.update(|app| {
4342 assert_eq!(
4343 tree.read(app).paths().collect::<Vec<_>>(),
4344 vec![
4345 rel_path("a"),
4346 rel_path("a/file1"),
4347 rel_path("a/file2.new"),
4348 rel_path("b"),
4349 rel_path("d"),
4350 rel_path("d/file3"),
4351 rel_path("d/file4"),
4352 ]
4353 );
4354 });
4355
4356 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4357 assert_eq!(id_for_path("d/file3", cx), file3_id);
4358 assert_eq!(id_for_path("d/file4", cx), file4_id);
4359
4360 cx.update(|cx| {
4361 assert_eq!(
4362 buffer2.read(cx).file().unwrap().path().as_ref(),
4363 rel_path("a/file2.new")
4364 );
4365 assert_eq!(
4366 buffer3.read(cx).file().unwrap().path().as_ref(),
4367 rel_path("d/file3")
4368 );
4369 assert_eq!(
4370 buffer4.read(cx).file().unwrap().path().as_ref(),
4371 rel_path("d/file4")
4372 );
4373 assert_eq!(
4374 buffer5.read(cx).file().unwrap().path().as_ref(),
4375 rel_path("b/c/file5")
4376 );
4377
4378 assert_matches!(
4379 buffer2.read(cx).file().unwrap().disk_state(),
4380 DiskState::Present { .. }
4381 );
4382 assert_matches!(
4383 buffer3.read(cx).file().unwrap().disk_state(),
4384 DiskState::Present { .. }
4385 );
4386 assert_matches!(
4387 buffer4.read(cx).file().unwrap().disk_state(),
4388 DiskState::Present { .. }
4389 );
4390 assert_eq!(
4391 buffer5.read(cx).file().unwrap().disk_state(),
4392 DiskState::Deleted
4393 );
4394 });
4395
4396 // Update the remote worktree. Check that it becomes consistent with the
4397 // local worktree.
4398 cx.executor().run_until_parked();
4399
4400 remote.update(cx, |remote, _| {
4401 for update in updates.lock().drain(..) {
4402 remote.as_remote_mut().unwrap().update_from_remote(update);
4403 }
4404 });
4405 cx.executor().run_until_parked();
4406 remote.update(cx, |remote, _| {
4407 assert_eq!(
4408 remote.paths().collect::<Vec<_>>(),
4409 vec![
4410 rel_path("a"),
4411 rel_path("a/file1"),
4412 rel_path("a/file2.new"),
4413 rel_path("b"),
4414 rel_path("d"),
4415 rel_path("d/file3"),
4416 rel_path("d/file4"),
4417 ]
4418 );
4419 });
4420}
4421
4422#[gpui::test(iterations = 10)]
4423async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4424 init_test(cx);
4425
4426 let fs = FakeFs::new(cx.executor());
4427 fs.insert_tree(
4428 path!("/dir"),
4429 json!({
4430 "a": {
4431 "file1": "",
4432 }
4433 }),
4434 )
4435 .await;
4436
4437 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4438 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4439 let tree_id = tree.update(cx, |tree, _| tree.id());
4440
4441 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4442 project.update(cx, |project, cx| {
4443 let tree = project.worktrees(cx).next().unwrap();
4444 tree.read(cx)
4445 .entry_for_path(rel_path(path))
4446 .unwrap_or_else(|| panic!("no entry for path {}", path))
4447 .id
4448 })
4449 };
4450
4451 let dir_id = id_for_path("a", cx);
4452 let file_id = id_for_path("a/file1", cx);
4453 let buffer = project
4454 .update(cx, |p, cx| {
4455 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4456 })
4457 .await
4458 .unwrap();
4459 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4460
4461 project
4462 .update(cx, |project, cx| {
4463 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4464 })
4465 .unwrap()
4466 .await
4467 .into_included()
4468 .unwrap();
4469 cx.executor().run_until_parked();
4470
4471 assert_eq!(id_for_path("b", cx), dir_id);
4472 assert_eq!(id_for_path("b/file1", cx), file_id);
4473 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4474}
4475
4476#[gpui::test]
4477async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4478 init_test(cx);
4479
4480 let fs = FakeFs::new(cx.executor());
4481 fs.insert_tree(
4482 "/dir",
4483 json!({
4484 "a.txt": "a-contents",
4485 "b.txt": "b-contents",
4486 }),
4487 )
4488 .await;
4489
4490 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4491
4492 // Spawn multiple tasks to open paths, repeating some paths.
4493 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4494 (
4495 p.open_local_buffer("/dir/a.txt", cx),
4496 p.open_local_buffer("/dir/b.txt", cx),
4497 p.open_local_buffer("/dir/a.txt", cx),
4498 )
4499 });
4500
4501 let buffer_a_1 = buffer_a_1.await.unwrap();
4502 let buffer_a_2 = buffer_a_2.await.unwrap();
4503 let buffer_b = buffer_b.await.unwrap();
4504 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4505 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4506
4507 // There is only one buffer per path.
4508 let buffer_a_id = buffer_a_1.entity_id();
4509 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4510
4511 // Open the same path again while it is still open.
4512 drop(buffer_a_1);
4513 let buffer_a_3 = project
4514 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4515 .await
4516 .unwrap();
4517
4518 // There's still only one buffer per path.
4519 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4520}
4521
4522#[gpui::test]
4523async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4524 init_test(cx);
4525
4526 let fs = FakeFs::new(cx.executor());
4527 fs.insert_tree(
4528 path!("/dir"),
4529 json!({
4530 "file1": "abc",
4531 "file2": "def",
4532 "file3": "ghi",
4533 }),
4534 )
4535 .await;
4536
4537 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4538
4539 let buffer1 = project
4540 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4541 .await
4542 .unwrap();
4543 let events = Arc::new(Mutex::new(Vec::new()));
4544
4545 // initially, the buffer isn't dirty.
4546 buffer1.update(cx, |buffer, cx| {
4547 cx.subscribe(&buffer1, {
4548 let events = events.clone();
4549 move |_, _, event, _| match event {
4550 BufferEvent::Operation { .. } => {}
4551 _ => events.lock().push(event.clone()),
4552 }
4553 })
4554 .detach();
4555
4556 assert!(!buffer.is_dirty());
4557 assert!(events.lock().is_empty());
4558
4559 buffer.edit([(1..2, "")], None, cx);
4560 });
4561
4562 // after the first edit, the buffer is dirty, and emits a dirtied event.
4563 buffer1.update(cx, |buffer, cx| {
4564 assert!(buffer.text() == "ac");
4565 assert!(buffer.is_dirty());
4566 assert_eq!(
4567 *events.lock(),
4568 &[
4569 language::BufferEvent::Edited,
4570 language::BufferEvent::DirtyChanged
4571 ]
4572 );
4573 events.lock().clear();
4574 buffer.did_save(
4575 buffer.version(),
4576 buffer.file().unwrap().disk_state().mtime(),
4577 cx,
4578 );
4579 });
4580
4581 // after saving, the buffer is not dirty, and emits a saved event.
4582 buffer1.update(cx, |buffer, cx| {
4583 assert!(!buffer.is_dirty());
4584 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4585 events.lock().clear();
4586
4587 buffer.edit([(1..1, "B")], None, cx);
4588 buffer.edit([(2..2, "D")], None, cx);
4589 });
4590
4591 // after editing again, the buffer is dirty, and emits another dirty event.
4592 buffer1.update(cx, |buffer, cx| {
4593 assert!(buffer.text() == "aBDc");
4594 assert!(buffer.is_dirty());
4595 assert_eq!(
4596 *events.lock(),
4597 &[
4598 language::BufferEvent::Edited,
4599 language::BufferEvent::DirtyChanged,
4600 language::BufferEvent::Edited,
4601 ],
4602 );
4603 events.lock().clear();
4604
4605 // After restoring the buffer to its previously-saved state,
4606 // the buffer is not considered dirty anymore.
4607 buffer.edit([(1..3, "")], None, cx);
4608 assert!(buffer.text() == "ac");
4609 assert!(!buffer.is_dirty());
4610 });
4611
4612 assert_eq!(
4613 *events.lock(),
4614 &[
4615 language::BufferEvent::Edited,
4616 language::BufferEvent::DirtyChanged
4617 ]
4618 );
4619
4620 // When a file is deleted, it is not considered dirty.
4621 let events = Arc::new(Mutex::new(Vec::new()));
4622 let buffer2 = project
4623 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4624 .await
4625 .unwrap();
4626 buffer2.update(cx, |_, cx| {
4627 cx.subscribe(&buffer2, {
4628 let events = events.clone();
4629 move |_, _, event, _| match event {
4630 BufferEvent::Operation { .. } => {}
4631 _ => events.lock().push(event.clone()),
4632 }
4633 })
4634 .detach();
4635 });
4636
4637 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4638 .await
4639 .unwrap();
4640 cx.executor().run_until_parked();
4641 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4642 assert_eq!(
4643 mem::take(&mut *events.lock()),
4644 &[language::BufferEvent::FileHandleChanged]
4645 );
4646
4647 // Buffer becomes dirty when edited.
4648 buffer2.update(cx, |buffer, cx| {
4649 buffer.edit([(2..3, "")], None, cx);
4650 assert_eq!(buffer.is_dirty(), true);
4651 });
4652 assert_eq!(
4653 mem::take(&mut *events.lock()),
4654 &[
4655 language::BufferEvent::Edited,
4656 language::BufferEvent::DirtyChanged
4657 ]
4658 );
4659
4660 // Buffer becomes clean again when all of its content is removed, because
4661 // the file was deleted.
4662 buffer2.update(cx, |buffer, cx| {
4663 buffer.edit([(0..2, "")], None, cx);
4664 assert_eq!(buffer.is_empty(), true);
4665 assert_eq!(buffer.is_dirty(), false);
4666 });
4667 assert_eq!(
4668 *events.lock(),
4669 &[
4670 language::BufferEvent::Edited,
4671 language::BufferEvent::DirtyChanged
4672 ]
4673 );
4674
4675 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4676 let events = Arc::new(Mutex::new(Vec::new()));
4677 let buffer3 = project
4678 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4679 .await
4680 .unwrap();
4681 buffer3.update(cx, |_, cx| {
4682 cx.subscribe(&buffer3, {
4683 let events = events.clone();
4684 move |_, _, event, _| match event {
4685 BufferEvent::Operation { .. } => {}
4686 _ => events.lock().push(event.clone()),
4687 }
4688 })
4689 .detach();
4690 });
4691
4692 buffer3.update(cx, |buffer, cx| {
4693 buffer.edit([(0..0, "x")], None, cx);
4694 });
4695 events.lock().clear();
4696 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4697 .await
4698 .unwrap();
4699 cx.executor().run_until_parked();
4700 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4701 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4702}
4703
4704#[gpui::test]
4705async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4706 init_test(cx);
4707
4708 let (initial_contents, initial_offsets) =
4709 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4710 let fs = FakeFs::new(cx.executor());
4711 fs.insert_tree(
4712 path!("/dir"),
4713 json!({
4714 "the-file": initial_contents,
4715 }),
4716 )
4717 .await;
4718 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4719 let buffer = project
4720 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4721 .await
4722 .unwrap();
4723
4724 let anchors = initial_offsets
4725 .iter()
4726 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4727 .collect::<Vec<_>>();
4728
4729 // Change the file on disk, adding two new lines of text, and removing
4730 // one line.
4731 buffer.update(cx, |buffer, _| {
4732 assert!(!buffer.is_dirty());
4733 assert!(!buffer.has_conflict());
4734 });
4735
4736 let (new_contents, new_offsets) =
4737 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4738 fs.save(
4739 path!("/dir/the-file").as_ref(),
4740 &new_contents.as_str().into(),
4741 LineEnding::Unix,
4742 )
4743 .await
4744 .unwrap();
4745
4746 // Because the buffer was not modified, it is reloaded from disk. Its
4747 // contents are edited according to the diff between the old and new
4748 // file contents.
4749 cx.executor().run_until_parked();
4750 buffer.update(cx, |buffer, _| {
4751 assert_eq!(buffer.text(), new_contents);
4752 assert!(!buffer.is_dirty());
4753 assert!(!buffer.has_conflict());
4754
4755 let anchor_offsets = anchors
4756 .iter()
4757 .map(|anchor| anchor.to_offset(&*buffer))
4758 .collect::<Vec<_>>();
4759 assert_eq!(anchor_offsets, new_offsets);
4760 });
4761
4762 // Modify the buffer
4763 buffer.update(cx, |buffer, cx| {
4764 buffer.edit([(0..0, " ")], None, cx);
4765 assert!(buffer.is_dirty());
4766 assert!(!buffer.has_conflict());
4767 });
4768
4769 // Change the file on disk again, adding blank lines to the beginning.
4770 fs.save(
4771 path!("/dir/the-file").as_ref(),
4772 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4773 LineEnding::Unix,
4774 )
4775 .await
4776 .unwrap();
4777
4778 // Because the buffer is modified, it doesn't reload from disk, but is
4779 // marked as having a conflict.
4780 cx.executor().run_until_parked();
4781 buffer.update(cx, |buffer, _| {
4782 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4783 assert!(buffer.has_conflict());
4784 });
4785}
4786
4787#[gpui::test]
4788async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4789 init_test(cx);
4790
4791 let fs = FakeFs::new(cx.executor());
4792 fs.insert_tree(
4793 path!("/dir"),
4794 json!({
4795 "file1": "a\nb\nc\n",
4796 "file2": "one\r\ntwo\r\nthree\r\n",
4797 }),
4798 )
4799 .await;
4800
4801 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4802 let buffer1 = project
4803 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4804 .await
4805 .unwrap();
4806 let buffer2 = project
4807 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4808 .await
4809 .unwrap();
4810
4811 buffer1.update(cx, |buffer, _| {
4812 assert_eq!(buffer.text(), "a\nb\nc\n");
4813 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4814 });
4815 buffer2.update(cx, |buffer, _| {
4816 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4817 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4818 });
4819
4820 // Change a file's line endings on disk from unix to windows. The buffer's
4821 // state updates correctly.
4822 fs.save(
4823 path!("/dir/file1").as_ref(),
4824 &"aaa\nb\nc\n".into(),
4825 LineEnding::Windows,
4826 )
4827 .await
4828 .unwrap();
4829 cx.executor().run_until_parked();
4830 buffer1.update(cx, |buffer, _| {
4831 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4832 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4833 });
4834
4835 // Save a file with windows line endings. The file is written correctly.
4836 buffer2.update(cx, |buffer, cx| {
4837 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4838 });
4839 project
4840 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4841 .await
4842 .unwrap();
4843 assert_eq!(
4844 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4845 "one\r\ntwo\r\nthree\r\nfour\r\n",
4846 );
4847}
4848
4849#[gpui::test]
4850async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4851 init_test(cx);
4852
4853 let fs = FakeFs::new(cx.executor());
4854 fs.insert_tree(
4855 path!("/dir"),
4856 json!({
4857 "a.rs": "
4858 fn foo(mut v: Vec<usize>) {
4859 for x in &v {
4860 v.push(1);
4861 }
4862 }
4863 "
4864 .unindent(),
4865 }),
4866 )
4867 .await;
4868
4869 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4870 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4871 let buffer = project
4872 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4873 .await
4874 .unwrap();
4875
4876 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4877 let message = lsp::PublishDiagnosticsParams {
4878 uri: buffer_uri.clone(),
4879 diagnostics: vec![
4880 lsp::Diagnostic {
4881 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4882 severity: Some(DiagnosticSeverity::WARNING),
4883 message: "error 1".to_string(),
4884 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4885 location: lsp::Location {
4886 uri: buffer_uri.clone(),
4887 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4888 },
4889 message: "error 1 hint 1".to_string(),
4890 }]),
4891 ..Default::default()
4892 },
4893 lsp::Diagnostic {
4894 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4895 severity: Some(DiagnosticSeverity::HINT),
4896 message: "error 1 hint 1".to_string(),
4897 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4898 location: lsp::Location {
4899 uri: buffer_uri.clone(),
4900 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4901 },
4902 message: "original diagnostic".to_string(),
4903 }]),
4904 ..Default::default()
4905 },
4906 lsp::Diagnostic {
4907 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4908 severity: Some(DiagnosticSeverity::ERROR),
4909 message: "error 2".to_string(),
4910 related_information: Some(vec![
4911 lsp::DiagnosticRelatedInformation {
4912 location: lsp::Location {
4913 uri: buffer_uri.clone(),
4914 range: lsp::Range::new(
4915 lsp::Position::new(1, 13),
4916 lsp::Position::new(1, 15),
4917 ),
4918 },
4919 message: "error 2 hint 1".to_string(),
4920 },
4921 lsp::DiagnosticRelatedInformation {
4922 location: lsp::Location {
4923 uri: buffer_uri.clone(),
4924 range: lsp::Range::new(
4925 lsp::Position::new(1, 13),
4926 lsp::Position::new(1, 15),
4927 ),
4928 },
4929 message: "error 2 hint 2".to_string(),
4930 },
4931 ]),
4932 ..Default::default()
4933 },
4934 lsp::Diagnostic {
4935 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4936 severity: Some(DiagnosticSeverity::HINT),
4937 message: "error 2 hint 1".to_string(),
4938 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4939 location: lsp::Location {
4940 uri: buffer_uri.clone(),
4941 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4942 },
4943 message: "original diagnostic".to_string(),
4944 }]),
4945 ..Default::default()
4946 },
4947 lsp::Diagnostic {
4948 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4949 severity: Some(DiagnosticSeverity::HINT),
4950 message: "error 2 hint 2".to_string(),
4951 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4952 location: lsp::Location {
4953 uri: buffer_uri,
4954 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4955 },
4956 message: "original diagnostic".to_string(),
4957 }]),
4958 ..Default::default()
4959 },
4960 ],
4961 version: None,
4962 };
4963
4964 lsp_store
4965 .update(cx, |lsp_store, cx| {
4966 lsp_store.update_diagnostics(
4967 LanguageServerId(0),
4968 message,
4969 None,
4970 DiagnosticSourceKind::Pushed,
4971 &[],
4972 cx,
4973 )
4974 })
4975 .unwrap();
4976 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4977
4978 assert_eq!(
4979 buffer
4980 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4981 .collect::<Vec<_>>(),
4982 &[
4983 DiagnosticEntry {
4984 range: Point::new(1, 8)..Point::new(1, 9),
4985 diagnostic: Diagnostic {
4986 severity: DiagnosticSeverity::WARNING,
4987 message: "error 1".to_string(),
4988 group_id: 1,
4989 is_primary: true,
4990 source_kind: DiagnosticSourceKind::Pushed,
4991 ..Diagnostic::default()
4992 }
4993 },
4994 DiagnosticEntry {
4995 range: Point::new(1, 8)..Point::new(1, 9),
4996 diagnostic: Diagnostic {
4997 severity: DiagnosticSeverity::HINT,
4998 message: "error 1 hint 1".to_string(),
4999 group_id: 1,
5000 is_primary: false,
5001 source_kind: DiagnosticSourceKind::Pushed,
5002 ..Diagnostic::default()
5003 }
5004 },
5005 DiagnosticEntry {
5006 range: Point::new(1, 13)..Point::new(1, 15),
5007 diagnostic: Diagnostic {
5008 severity: DiagnosticSeverity::HINT,
5009 message: "error 2 hint 1".to_string(),
5010 group_id: 0,
5011 is_primary: false,
5012 source_kind: DiagnosticSourceKind::Pushed,
5013 ..Diagnostic::default()
5014 }
5015 },
5016 DiagnosticEntry {
5017 range: Point::new(1, 13)..Point::new(1, 15),
5018 diagnostic: Diagnostic {
5019 severity: DiagnosticSeverity::HINT,
5020 message: "error 2 hint 2".to_string(),
5021 group_id: 0,
5022 is_primary: false,
5023 source_kind: DiagnosticSourceKind::Pushed,
5024 ..Diagnostic::default()
5025 }
5026 },
5027 DiagnosticEntry {
5028 range: Point::new(2, 8)..Point::new(2, 17),
5029 diagnostic: Diagnostic {
5030 severity: DiagnosticSeverity::ERROR,
5031 message: "error 2".to_string(),
5032 group_id: 0,
5033 is_primary: true,
5034 source_kind: DiagnosticSourceKind::Pushed,
5035 ..Diagnostic::default()
5036 }
5037 }
5038 ]
5039 );
5040
5041 assert_eq!(
5042 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5043 &[
5044 DiagnosticEntry {
5045 range: Point::new(1, 13)..Point::new(1, 15),
5046 diagnostic: Diagnostic {
5047 severity: DiagnosticSeverity::HINT,
5048 message: "error 2 hint 1".to_string(),
5049 group_id: 0,
5050 is_primary: false,
5051 source_kind: DiagnosticSourceKind::Pushed,
5052 ..Diagnostic::default()
5053 }
5054 },
5055 DiagnosticEntry {
5056 range: Point::new(1, 13)..Point::new(1, 15),
5057 diagnostic: Diagnostic {
5058 severity: DiagnosticSeverity::HINT,
5059 message: "error 2 hint 2".to_string(),
5060 group_id: 0,
5061 is_primary: false,
5062 source_kind: DiagnosticSourceKind::Pushed,
5063 ..Diagnostic::default()
5064 }
5065 },
5066 DiagnosticEntry {
5067 range: Point::new(2, 8)..Point::new(2, 17),
5068 diagnostic: Diagnostic {
5069 severity: DiagnosticSeverity::ERROR,
5070 message: "error 2".to_string(),
5071 group_id: 0,
5072 is_primary: true,
5073 source_kind: DiagnosticSourceKind::Pushed,
5074 ..Diagnostic::default()
5075 }
5076 }
5077 ]
5078 );
5079
5080 assert_eq!(
5081 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5082 &[
5083 DiagnosticEntry {
5084 range: Point::new(1, 8)..Point::new(1, 9),
5085 diagnostic: Diagnostic {
5086 severity: DiagnosticSeverity::WARNING,
5087 message: "error 1".to_string(),
5088 group_id: 1,
5089 is_primary: true,
5090 source_kind: DiagnosticSourceKind::Pushed,
5091 ..Diagnostic::default()
5092 }
5093 },
5094 DiagnosticEntry {
5095 range: Point::new(1, 8)..Point::new(1, 9),
5096 diagnostic: Diagnostic {
5097 severity: DiagnosticSeverity::HINT,
5098 message: "error 1 hint 1".to_string(),
5099 group_id: 1,
5100 is_primary: false,
5101 source_kind: DiagnosticSourceKind::Pushed,
5102 ..Diagnostic::default()
5103 }
5104 },
5105 ]
5106 );
5107}
5108
5109#[gpui::test]
5110async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5111 init_test(cx);
5112
5113 let fs = FakeFs::new(cx.executor());
5114 fs.insert_tree(
5115 path!("/dir"),
5116 json!({
5117 "one.rs": "const ONE: usize = 1;",
5118 "two": {
5119 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5120 }
5121
5122 }),
5123 )
5124 .await;
5125 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5126
5127 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5128 language_registry.add(rust_lang());
5129 let watched_paths = lsp::FileOperationRegistrationOptions {
5130 filters: vec![
5131 FileOperationFilter {
5132 scheme: Some("file".to_owned()),
5133 pattern: lsp::FileOperationPattern {
5134 glob: "**/*.rs".to_owned(),
5135 matches: Some(lsp::FileOperationPatternKind::File),
5136 options: None,
5137 },
5138 },
5139 FileOperationFilter {
5140 scheme: Some("file".to_owned()),
5141 pattern: lsp::FileOperationPattern {
5142 glob: "**/**".to_owned(),
5143 matches: Some(lsp::FileOperationPatternKind::Folder),
5144 options: None,
5145 },
5146 },
5147 ],
5148 };
5149 let mut fake_servers = language_registry.register_fake_lsp(
5150 "Rust",
5151 FakeLspAdapter {
5152 capabilities: lsp::ServerCapabilities {
5153 workspace: Some(lsp::WorkspaceServerCapabilities {
5154 workspace_folders: None,
5155 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5156 did_rename: Some(watched_paths.clone()),
5157 will_rename: Some(watched_paths),
5158 ..Default::default()
5159 }),
5160 }),
5161 ..Default::default()
5162 },
5163 ..Default::default()
5164 },
5165 );
5166
5167 let _ = project
5168 .update(cx, |project, cx| {
5169 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5170 })
5171 .await
5172 .unwrap();
5173
5174 let fake_server = fake_servers.next().await.unwrap();
5175 let response = project.update(cx, |project, cx| {
5176 let worktree = project.worktrees(cx).next().unwrap();
5177 let entry = worktree
5178 .read(cx)
5179 .entry_for_path(rel_path("one.rs"))
5180 .unwrap();
5181 project.rename_entry(
5182 entry.id,
5183 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5184 cx,
5185 )
5186 });
5187 let expected_edit = lsp::WorkspaceEdit {
5188 changes: None,
5189 document_changes: Some(DocumentChanges::Edits({
5190 vec![TextDocumentEdit {
5191 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5192 range: lsp::Range {
5193 start: lsp::Position {
5194 line: 0,
5195 character: 1,
5196 },
5197 end: lsp::Position {
5198 line: 0,
5199 character: 3,
5200 },
5201 },
5202 new_text: "This is not a drill".to_owned(),
5203 })],
5204 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5205 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5206 version: Some(1337),
5207 },
5208 }]
5209 })),
5210 change_annotations: None,
5211 };
5212 let resolved_workspace_edit = Arc::new(OnceLock::new());
5213 fake_server
5214 .set_request_handler::<WillRenameFiles, _, _>({
5215 let resolved_workspace_edit = resolved_workspace_edit.clone();
5216 let expected_edit = expected_edit.clone();
5217 move |params, _| {
5218 let resolved_workspace_edit = resolved_workspace_edit.clone();
5219 let expected_edit = expected_edit.clone();
5220 async move {
5221 assert_eq!(params.files.len(), 1);
5222 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5223 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5224 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5225 Ok(Some(expected_edit))
5226 }
5227 }
5228 })
5229 .next()
5230 .await
5231 .unwrap();
5232 let _ = response.await.unwrap();
5233 fake_server
5234 .handle_notification::<DidRenameFiles, _>(|params, _| {
5235 assert_eq!(params.files.len(), 1);
5236 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5237 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5238 })
5239 .next()
5240 .await
5241 .unwrap();
5242 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5243}
5244
5245#[gpui::test]
5246async fn test_rename(cx: &mut gpui::TestAppContext) {
5247 // hi
5248 init_test(cx);
5249
5250 let fs = FakeFs::new(cx.executor());
5251 fs.insert_tree(
5252 path!("/dir"),
5253 json!({
5254 "one.rs": "const ONE: usize = 1;",
5255 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5256 }),
5257 )
5258 .await;
5259
5260 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5261
5262 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5263 language_registry.add(rust_lang());
5264 let mut fake_servers = language_registry.register_fake_lsp(
5265 "Rust",
5266 FakeLspAdapter {
5267 capabilities: lsp::ServerCapabilities {
5268 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5269 prepare_provider: Some(true),
5270 work_done_progress_options: Default::default(),
5271 })),
5272 ..Default::default()
5273 },
5274 ..Default::default()
5275 },
5276 );
5277
5278 let (buffer, _handle) = project
5279 .update(cx, |project, cx| {
5280 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5281 })
5282 .await
5283 .unwrap();
5284
5285 let fake_server = fake_servers.next().await.unwrap();
5286
5287 let response = project.update(cx, |project, cx| {
5288 project.prepare_rename(buffer.clone(), 7, cx)
5289 });
5290 fake_server
5291 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5292 assert_eq!(
5293 params.text_document.uri.as_str(),
5294 uri!("file:///dir/one.rs")
5295 );
5296 assert_eq!(params.position, lsp::Position::new(0, 7));
5297 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5298 lsp::Position::new(0, 6),
5299 lsp::Position::new(0, 9),
5300 ))))
5301 })
5302 .next()
5303 .await
5304 .unwrap();
5305 let response = response.await.unwrap();
5306 let PrepareRenameResponse::Success(range) = response else {
5307 panic!("{:?}", response);
5308 };
5309 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5310 assert_eq!(range, 6..9);
5311
5312 let response = project.update(cx, |project, cx| {
5313 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5314 });
5315 fake_server
5316 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5317 assert_eq!(
5318 params.text_document_position.text_document.uri.as_str(),
5319 uri!("file:///dir/one.rs")
5320 );
5321 assert_eq!(
5322 params.text_document_position.position,
5323 lsp::Position::new(0, 7)
5324 );
5325 assert_eq!(params.new_name, "THREE");
5326 Ok(Some(lsp::WorkspaceEdit {
5327 changes: Some(
5328 [
5329 (
5330 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5331 vec![lsp::TextEdit::new(
5332 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5333 "THREE".to_string(),
5334 )],
5335 ),
5336 (
5337 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5338 vec![
5339 lsp::TextEdit::new(
5340 lsp::Range::new(
5341 lsp::Position::new(0, 24),
5342 lsp::Position::new(0, 27),
5343 ),
5344 "THREE".to_string(),
5345 ),
5346 lsp::TextEdit::new(
5347 lsp::Range::new(
5348 lsp::Position::new(0, 35),
5349 lsp::Position::new(0, 38),
5350 ),
5351 "THREE".to_string(),
5352 ),
5353 ],
5354 ),
5355 ]
5356 .into_iter()
5357 .collect(),
5358 ),
5359 ..Default::default()
5360 }))
5361 })
5362 .next()
5363 .await
5364 .unwrap();
5365 let mut transaction = response.await.unwrap().0;
5366 assert_eq!(transaction.len(), 2);
5367 assert_eq!(
5368 transaction
5369 .remove_entry(&buffer)
5370 .unwrap()
5371 .0
5372 .update(cx, |buffer, _| buffer.text()),
5373 "const THREE: usize = 1;"
5374 );
5375 assert_eq!(
5376 transaction
5377 .into_keys()
5378 .next()
5379 .unwrap()
5380 .update(cx, |buffer, _| buffer.text()),
5381 "const TWO: usize = one::THREE + one::THREE;"
5382 );
5383}
5384
5385#[gpui::test]
5386async fn test_search(cx: &mut gpui::TestAppContext) {
5387 init_test(cx);
5388
5389 let fs = FakeFs::new(cx.executor());
5390 fs.insert_tree(
5391 path!("/dir"),
5392 json!({
5393 "one.rs": "const ONE: usize = 1;",
5394 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5395 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5396 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5397 }),
5398 )
5399 .await;
5400 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5401 assert_eq!(
5402 search(
5403 &project,
5404 SearchQuery::text(
5405 "TWO",
5406 false,
5407 true,
5408 false,
5409 Default::default(),
5410 Default::default(),
5411 false,
5412 None
5413 )
5414 .unwrap(),
5415 cx
5416 )
5417 .await
5418 .unwrap(),
5419 HashMap::from_iter([
5420 (path!("dir/two.rs").to_string(), vec![6..9]),
5421 (path!("dir/three.rs").to_string(), vec![37..40])
5422 ])
5423 );
5424
5425 let buffer_4 = project
5426 .update(cx, |project, cx| {
5427 project.open_local_buffer(path!("/dir/four.rs"), cx)
5428 })
5429 .await
5430 .unwrap();
5431 buffer_4.update(cx, |buffer, cx| {
5432 let text = "two::TWO";
5433 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5434 });
5435
5436 assert_eq!(
5437 search(
5438 &project,
5439 SearchQuery::text(
5440 "TWO",
5441 false,
5442 true,
5443 false,
5444 Default::default(),
5445 Default::default(),
5446 false,
5447 None,
5448 )
5449 .unwrap(),
5450 cx
5451 )
5452 .await
5453 .unwrap(),
5454 HashMap::from_iter([
5455 (path!("dir/two.rs").to_string(), vec![6..9]),
5456 (path!("dir/three.rs").to_string(), vec![37..40]),
5457 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5458 ])
5459 );
5460}
5461
5462#[gpui::test]
5463async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5464 init_test(cx);
5465
5466 let search_query = "file";
5467
5468 let fs = FakeFs::new(cx.executor());
5469 fs.insert_tree(
5470 path!("/dir"),
5471 json!({
5472 "one.rs": r#"// Rust file one"#,
5473 "one.ts": r#"// TypeScript file one"#,
5474 "two.rs": r#"// Rust file two"#,
5475 "two.ts": r#"// TypeScript file two"#,
5476 }),
5477 )
5478 .await;
5479 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5480
5481 assert!(
5482 search(
5483 &project,
5484 SearchQuery::text(
5485 search_query,
5486 false,
5487 true,
5488 false,
5489 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5490 Default::default(),
5491 false,
5492 None
5493 )
5494 .unwrap(),
5495 cx
5496 )
5497 .await
5498 .unwrap()
5499 .is_empty(),
5500 "If no inclusions match, no files should be returned"
5501 );
5502
5503 assert_eq!(
5504 search(
5505 &project,
5506 SearchQuery::text(
5507 search_query,
5508 false,
5509 true,
5510 false,
5511 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5512 Default::default(),
5513 false,
5514 None
5515 )
5516 .unwrap(),
5517 cx
5518 )
5519 .await
5520 .unwrap(),
5521 HashMap::from_iter([
5522 (path!("dir/one.rs").to_string(), vec![8..12]),
5523 (path!("dir/two.rs").to_string(), vec![8..12]),
5524 ]),
5525 "Rust only search should give only Rust files"
5526 );
5527
5528 assert_eq!(
5529 search(
5530 &project,
5531 SearchQuery::text(
5532 search_query,
5533 false,
5534 true,
5535 false,
5536 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5537 .unwrap(),
5538 Default::default(),
5539 false,
5540 None,
5541 )
5542 .unwrap(),
5543 cx
5544 )
5545 .await
5546 .unwrap(),
5547 HashMap::from_iter([
5548 (path!("dir/one.ts").to_string(), vec![14..18]),
5549 (path!("dir/two.ts").to_string(), vec![14..18]),
5550 ]),
5551 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5552 );
5553
5554 assert_eq!(
5555 search(
5556 &project,
5557 SearchQuery::text(
5558 search_query,
5559 false,
5560 true,
5561 false,
5562 PathMatcher::new(
5563 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5564 PathStyle::local()
5565 )
5566 .unwrap(),
5567 Default::default(),
5568 false,
5569 None,
5570 )
5571 .unwrap(),
5572 cx
5573 )
5574 .await
5575 .unwrap(),
5576 HashMap::from_iter([
5577 (path!("dir/two.ts").to_string(), vec![14..18]),
5578 (path!("dir/one.rs").to_string(), vec![8..12]),
5579 (path!("dir/one.ts").to_string(), vec![14..18]),
5580 (path!("dir/two.rs").to_string(), vec![8..12]),
5581 ]),
5582 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5583 );
5584}
5585
5586#[gpui::test]
5587async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5588 init_test(cx);
5589
5590 let search_query = "file";
5591
5592 let fs = FakeFs::new(cx.executor());
5593 fs.insert_tree(
5594 path!("/dir"),
5595 json!({
5596 "one.rs": r#"// Rust file one"#,
5597 "one.ts": r#"// TypeScript file one"#,
5598 "two.rs": r#"// Rust file two"#,
5599 "two.ts": r#"// TypeScript file two"#,
5600 }),
5601 )
5602 .await;
5603 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5604
5605 assert_eq!(
5606 search(
5607 &project,
5608 SearchQuery::text(
5609 search_query,
5610 false,
5611 true,
5612 false,
5613 Default::default(),
5614 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5615 false,
5616 None,
5617 )
5618 .unwrap(),
5619 cx
5620 )
5621 .await
5622 .unwrap(),
5623 HashMap::from_iter([
5624 (path!("dir/one.rs").to_string(), vec![8..12]),
5625 (path!("dir/one.ts").to_string(), vec![14..18]),
5626 (path!("dir/two.rs").to_string(), vec![8..12]),
5627 (path!("dir/two.ts").to_string(), vec![14..18]),
5628 ]),
5629 "If no exclusions match, all files should be returned"
5630 );
5631
5632 assert_eq!(
5633 search(
5634 &project,
5635 SearchQuery::text(
5636 search_query,
5637 false,
5638 true,
5639 false,
5640 Default::default(),
5641 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5642 false,
5643 None,
5644 )
5645 .unwrap(),
5646 cx
5647 )
5648 .await
5649 .unwrap(),
5650 HashMap::from_iter([
5651 (path!("dir/one.ts").to_string(), vec![14..18]),
5652 (path!("dir/two.ts").to_string(), vec![14..18]),
5653 ]),
5654 "Rust exclusion search should give only TypeScript files"
5655 );
5656
5657 assert_eq!(
5658 search(
5659 &project,
5660 SearchQuery::text(
5661 search_query,
5662 false,
5663 true,
5664 false,
5665 Default::default(),
5666 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5667 .unwrap(),
5668 false,
5669 None,
5670 )
5671 .unwrap(),
5672 cx
5673 )
5674 .await
5675 .unwrap(),
5676 HashMap::from_iter([
5677 (path!("dir/one.rs").to_string(), vec![8..12]),
5678 (path!("dir/two.rs").to_string(), vec![8..12]),
5679 ]),
5680 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5681 );
5682
5683 assert!(
5684 search(
5685 &project,
5686 SearchQuery::text(
5687 search_query,
5688 false,
5689 true,
5690 false,
5691 Default::default(),
5692 PathMatcher::new(
5693 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5694 PathStyle::local(),
5695 )
5696 .unwrap(),
5697 false,
5698 None,
5699 )
5700 .unwrap(),
5701 cx
5702 )
5703 .await
5704 .unwrap()
5705 .is_empty(),
5706 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5707 );
5708}
5709
5710#[gpui::test]
5711async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5712 init_test(cx);
5713
5714 let search_query = "file";
5715
5716 let fs = FakeFs::new(cx.executor());
5717 fs.insert_tree(
5718 path!("/dir"),
5719 json!({
5720 "one.rs": r#"// Rust file one"#,
5721 "one.ts": r#"// TypeScript file one"#,
5722 "two.rs": r#"// Rust file two"#,
5723 "two.ts": r#"// TypeScript file two"#,
5724 }),
5725 )
5726 .await;
5727
5728 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5729 let path_style = PathStyle::local();
5730 let _buffer = project.update(cx, |project, cx| {
5731 project.create_local_buffer("file", None, false, cx)
5732 });
5733
5734 assert_eq!(
5735 search(
5736 &project,
5737 SearchQuery::text(
5738 search_query,
5739 false,
5740 true,
5741 false,
5742 Default::default(),
5743 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5744 false,
5745 None,
5746 )
5747 .unwrap(),
5748 cx
5749 )
5750 .await
5751 .unwrap(),
5752 HashMap::from_iter([
5753 (path!("dir/one.rs").to_string(), vec![8..12]),
5754 (path!("dir/one.ts").to_string(), vec![14..18]),
5755 (path!("dir/two.rs").to_string(), vec![8..12]),
5756 (path!("dir/two.ts").to_string(), vec![14..18]),
5757 ]),
5758 "If no exclusions match, all files should be returned"
5759 );
5760
5761 assert_eq!(
5762 search(
5763 &project,
5764 SearchQuery::text(
5765 search_query,
5766 false,
5767 true,
5768 false,
5769 Default::default(),
5770 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5771 false,
5772 None,
5773 )
5774 .unwrap(),
5775 cx
5776 )
5777 .await
5778 .unwrap(),
5779 HashMap::from_iter([
5780 (path!("dir/one.ts").to_string(), vec![14..18]),
5781 (path!("dir/two.ts").to_string(), vec![14..18]),
5782 ]),
5783 "Rust exclusion search should give only TypeScript files"
5784 );
5785
5786 assert_eq!(
5787 search(
5788 &project,
5789 SearchQuery::text(
5790 search_query,
5791 false,
5792 true,
5793 false,
5794 Default::default(),
5795 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5796 false,
5797 None,
5798 )
5799 .unwrap(),
5800 cx
5801 )
5802 .await
5803 .unwrap(),
5804 HashMap::from_iter([
5805 (path!("dir/one.rs").to_string(), vec![8..12]),
5806 (path!("dir/two.rs").to_string(), vec![8..12]),
5807 ]),
5808 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5809 );
5810
5811 assert!(
5812 search(
5813 &project,
5814 SearchQuery::text(
5815 search_query,
5816 false,
5817 true,
5818 false,
5819 Default::default(),
5820 PathMatcher::new(
5821 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5822 PathStyle::local(),
5823 )
5824 .unwrap(),
5825 false,
5826 None,
5827 )
5828 .unwrap(),
5829 cx
5830 )
5831 .await
5832 .unwrap()
5833 .is_empty(),
5834 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5835 );
5836}
5837
5838#[gpui::test]
5839async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5840 init_test(cx);
5841
5842 let search_query = "file";
5843
5844 let fs = FakeFs::new(cx.executor());
5845 fs.insert_tree(
5846 path!("/dir"),
5847 json!({
5848 "one.rs": r#"// Rust file one"#,
5849 "one.ts": r#"// TypeScript file one"#,
5850 "two.rs": r#"// Rust file two"#,
5851 "two.ts": r#"// TypeScript file two"#,
5852 }),
5853 )
5854 .await;
5855 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5856 assert!(
5857 search(
5858 &project,
5859 SearchQuery::text(
5860 search_query,
5861 false,
5862 true,
5863 false,
5864 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5865 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5866 false,
5867 None,
5868 )
5869 .unwrap(),
5870 cx
5871 )
5872 .await
5873 .unwrap()
5874 .is_empty(),
5875 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5876 );
5877
5878 assert!(
5879 search(
5880 &project,
5881 SearchQuery::text(
5882 search_query,
5883 false,
5884 true,
5885 false,
5886 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5887 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5888 false,
5889 None,
5890 )
5891 .unwrap(),
5892 cx
5893 )
5894 .await
5895 .unwrap()
5896 .is_empty(),
5897 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5898 );
5899
5900 assert!(
5901 search(
5902 &project,
5903 SearchQuery::text(
5904 search_query,
5905 false,
5906 true,
5907 false,
5908 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5909 .unwrap(),
5910 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5911 .unwrap(),
5912 false,
5913 None,
5914 )
5915 .unwrap(),
5916 cx
5917 )
5918 .await
5919 .unwrap()
5920 .is_empty(),
5921 "Non-matching inclusions and exclusions should not change that."
5922 );
5923
5924 assert_eq!(
5925 search(
5926 &project,
5927 SearchQuery::text(
5928 search_query,
5929 false,
5930 true,
5931 false,
5932 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5933 .unwrap(),
5934 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
5935 .unwrap(),
5936 false,
5937 None,
5938 )
5939 .unwrap(),
5940 cx
5941 )
5942 .await
5943 .unwrap(),
5944 HashMap::from_iter([
5945 (path!("dir/one.ts").to_string(), vec![14..18]),
5946 (path!("dir/two.ts").to_string(), vec![14..18]),
5947 ]),
5948 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5949 );
5950}
5951
5952#[gpui::test]
5953async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5954 init_test(cx);
5955
5956 let fs = FakeFs::new(cx.executor());
5957 fs.insert_tree(
5958 path!("/worktree-a"),
5959 json!({
5960 "haystack.rs": r#"// NEEDLE"#,
5961 "haystack.ts": r#"// NEEDLE"#,
5962 }),
5963 )
5964 .await;
5965 fs.insert_tree(
5966 path!("/worktree-b"),
5967 json!({
5968 "haystack.rs": r#"// NEEDLE"#,
5969 "haystack.ts": r#"// NEEDLE"#,
5970 }),
5971 )
5972 .await;
5973
5974 let path_style = PathStyle::local();
5975 let project = Project::test(
5976 fs.clone(),
5977 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5978 cx,
5979 )
5980 .await;
5981
5982 assert_eq!(
5983 search(
5984 &project,
5985 SearchQuery::text(
5986 "NEEDLE",
5987 false,
5988 true,
5989 false,
5990 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
5991 Default::default(),
5992 true,
5993 None,
5994 )
5995 .unwrap(),
5996 cx
5997 )
5998 .await
5999 .unwrap(),
6000 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6001 "should only return results from included worktree"
6002 );
6003 assert_eq!(
6004 search(
6005 &project,
6006 SearchQuery::text(
6007 "NEEDLE",
6008 false,
6009 true,
6010 false,
6011 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6012 Default::default(),
6013 true,
6014 None,
6015 )
6016 .unwrap(),
6017 cx
6018 )
6019 .await
6020 .unwrap(),
6021 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6022 "should only return results from included worktree"
6023 );
6024
6025 assert_eq!(
6026 search(
6027 &project,
6028 SearchQuery::text(
6029 "NEEDLE",
6030 false,
6031 true,
6032 false,
6033 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6034 Default::default(),
6035 false,
6036 None,
6037 )
6038 .unwrap(),
6039 cx
6040 )
6041 .await
6042 .unwrap(),
6043 HashMap::from_iter([
6044 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6045 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6046 ]),
6047 "should return results from both worktrees"
6048 );
6049}
6050
6051#[gpui::test]
6052async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6053 init_test(cx);
6054
6055 let fs = FakeFs::new(cx.background_executor.clone());
6056 fs.insert_tree(
6057 path!("/dir"),
6058 json!({
6059 ".git": {},
6060 ".gitignore": "**/target\n/node_modules\n",
6061 "target": {
6062 "index.txt": "index_key:index_value"
6063 },
6064 "node_modules": {
6065 "eslint": {
6066 "index.ts": "const eslint_key = 'eslint value'",
6067 "package.json": r#"{ "some_key": "some value" }"#,
6068 },
6069 "prettier": {
6070 "index.ts": "const prettier_key = 'prettier value'",
6071 "package.json": r#"{ "other_key": "other value" }"#,
6072 },
6073 },
6074 "package.json": r#"{ "main_key": "main value" }"#,
6075 }),
6076 )
6077 .await;
6078 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6079
6080 let query = "key";
6081 assert_eq!(
6082 search(
6083 &project,
6084 SearchQuery::text(
6085 query,
6086 false,
6087 false,
6088 false,
6089 Default::default(),
6090 Default::default(),
6091 false,
6092 None,
6093 )
6094 .unwrap(),
6095 cx
6096 )
6097 .await
6098 .unwrap(),
6099 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6100 "Only one non-ignored file should have the query"
6101 );
6102
6103 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6104 let path_style = PathStyle::local();
6105 assert_eq!(
6106 search(
6107 &project,
6108 SearchQuery::text(
6109 query,
6110 false,
6111 false,
6112 true,
6113 Default::default(),
6114 Default::default(),
6115 false,
6116 None,
6117 )
6118 .unwrap(),
6119 cx
6120 )
6121 .await
6122 .unwrap(),
6123 HashMap::from_iter([
6124 (path!("dir/package.json").to_string(), vec![8..11]),
6125 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6126 (
6127 path!("dir/node_modules/prettier/package.json").to_string(),
6128 vec![9..12]
6129 ),
6130 (
6131 path!("dir/node_modules/prettier/index.ts").to_string(),
6132 vec![15..18]
6133 ),
6134 (
6135 path!("dir/node_modules/eslint/index.ts").to_string(),
6136 vec![13..16]
6137 ),
6138 (
6139 path!("dir/node_modules/eslint/package.json").to_string(),
6140 vec![8..11]
6141 ),
6142 ]),
6143 "Unrestricted search with ignored directories should find every file with the query"
6144 );
6145
6146 let files_to_include =
6147 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6148 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6149 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6150 assert_eq!(
6151 search(
6152 &project,
6153 SearchQuery::text(
6154 query,
6155 false,
6156 false,
6157 true,
6158 files_to_include,
6159 files_to_exclude,
6160 false,
6161 None,
6162 )
6163 .unwrap(),
6164 cx
6165 )
6166 .await
6167 .unwrap(),
6168 HashMap::from_iter([(
6169 path!("dir/node_modules/prettier/package.json").to_string(),
6170 vec![9..12]
6171 )]),
6172 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6173 );
6174}
6175
6176#[gpui::test]
6177async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6178 init_test(cx);
6179
6180 let fs = FakeFs::new(cx.executor());
6181 fs.insert_tree(
6182 path!("/dir"),
6183 json!({
6184 "one.rs": "// ПРИВЕТ? привет!",
6185 "two.rs": "// ПРИВЕТ.",
6186 "three.rs": "// привет",
6187 }),
6188 )
6189 .await;
6190 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6191 let unicode_case_sensitive_query = SearchQuery::text(
6192 "привет",
6193 false,
6194 true,
6195 false,
6196 Default::default(),
6197 Default::default(),
6198 false,
6199 None,
6200 );
6201 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6202 assert_eq!(
6203 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6204 .await
6205 .unwrap(),
6206 HashMap::from_iter([
6207 (path!("dir/one.rs").to_string(), vec![17..29]),
6208 (path!("dir/three.rs").to_string(), vec![3..15]),
6209 ])
6210 );
6211
6212 let unicode_case_insensitive_query = SearchQuery::text(
6213 "привет",
6214 false,
6215 false,
6216 false,
6217 Default::default(),
6218 Default::default(),
6219 false,
6220 None,
6221 );
6222 assert_matches!(
6223 unicode_case_insensitive_query,
6224 Ok(SearchQuery::Regex { .. })
6225 );
6226 assert_eq!(
6227 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6228 .await
6229 .unwrap(),
6230 HashMap::from_iter([
6231 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6232 (path!("dir/two.rs").to_string(), vec![3..15]),
6233 (path!("dir/three.rs").to_string(), vec![3..15]),
6234 ])
6235 );
6236
6237 assert_eq!(
6238 search(
6239 &project,
6240 SearchQuery::text(
6241 "привет.",
6242 false,
6243 false,
6244 false,
6245 Default::default(),
6246 Default::default(),
6247 false,
6248 None,
6249 )
6250 .unwrap(),
6251 cx
6252 )
6253 .await
6254 .unwrap(),
6255 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6256 );
6257}
6258
6259#[gpui::test]
6260async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6261 init_test(cx);
6262
6263 let fs = FakeFs::new(cx.executor());
6264 fs.insert_tree(
6265 "/one/two",
6266 json!({
6267 "three": {
6268 "a.txt": "",
6269 "four": {}
6270 },
6271 "c.rs": ""
6272 }),
6273 )
6274 .await;
6275
6276 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6277 project
6278 .update(cx, |project, cx| {
6279 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6280 project.create_entry((id, rel_path("b..")), true, cx)
6281 })
6282 .await
6283 .unwrap()
6284 .into_included()
6285 .unwrap();
6286
6287 assert_eq!(
6288 fs.paths(true),
6289 vec![
6290 PathBuf::from(path!("/")),
6291 PathBuf::from(path!("/one")),
6292 PathBuf::from(path!("/one/two")),
6293 PathBuf::from(path!("/one/two/c.rs")),
6294 PathBuf::from(path!("/one/two/three")),
6295 PathBuf::from(path!("/one/two/three/a.txt")),
6296 PathBuf::from(path!("/one/two/three/b..")),
6297 PathBuf::from(path!("/one/two/three/four")),
6298 ]
6299 );
6300}
6301
6302#[gpui::test]
6303async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6304 init_test(cx);
6305
6306 let fs = FakeFs::new(cx.executor());
6307 fs.insert_tree(
6308 path!("/dir"),
6309 json!({
6310 "a.tsx": "a",
6311 }),
6312 )
6313 .await;
6314
6315 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6316
6317 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6318 language_registry.add(tsx_lang());
6319 let language_server_names = [
6320 "TypeScriptServer",
6321 "TailwindServer",
6322 "ESLintServer",
6323 "NoHoverCapabilitiesServer",
6324 ];
6325 let mut language_servers = [
6326 language_registry.register_fake_lsp(
6327 "tsx",
6328 FakeLspAdapter {
6329 name: language_server_names[0],
6330 capabilities: lsp::ServerCapabilities {
6331 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6332 ..lsp::ServerCapabilities::default()
6333 },
6334 ..FakeLspAdapter::default()
6335 },
6336 ),
6337 language_registry.register_fake_lsp(
6338 "tsx",
6339 FakeLspAdapter {
6340 name: language_server_names[1],
6341 capabilities: lsp::ServerCapabilities {
6342 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6343 ..lsp::ServerCapabilities::default()
6344 },
6345 ..FakeLspAdapter::default()
6346 },
6347 ),
6348 language_registry.register_fake_lsp(
6349 "tsx",
6350 FakeLspAdapter {
6351 name: language_server_names[2],
6352 capabilities: lsp::ServerCapabilities {
6353 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6354 ..lsp::ServerCapabilities::default()
6355 },
6356 ..FakeLspAdapter::default()
6357 },
6358 ),
6359 language_registry.register_fake_lsp(
6360 "tsx",
6361 FakeLspAdapter {
6362 name: language_server_names[3],
6363 capabilities: lsp::ServerCapabilities {
6364 hover_provider: None,
6365 ..lsp::ServerCapabilities::default()
6366 },
6367 ..FakeLspAdapter::default()
6368 },
6369 ),
6370 ];
6371
6372 let (buffer, _handle) = project
6373 .update(cx, |p, cx| {
6374 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6375 })
6376 .await
6377 .unwrap();
6378 cx.executor().run_until_parked();
6379
6380 let mut servers_with_hover_requests = HashMap::default();
6381 for i in 0..language_server_names.len() {
6382 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6383 panic!(
6384 "Failed to get language server #{i} with name {}",
6385 &language_server_names[i]
6386 )
6387 });
6388 let new_server_name = new_server.server.name();
6389 assert!(
6390 !servers_with_hover_requests.contains_key(&new_server_name),
6391 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6392 );
6393 match new_server_name.as_ref() {
6394 "TailwindServer" | "TypeScriptServer" => {
6395 servers_with_hover_requests.insert(
6396 new_server_name.clone(),
6397 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6398 move |_, _| {
6399 let name = new_server_name.clone();
6400 async move {
6401 Ok(Some(lsp::Hover {
6402 contents: lsp::HoverContents::Scalar(
6403 lsp::MarkedString::String(format!("{name} hover")),
6404 ),
6405 range: None,
6406 }))
6407 }
6408 },
6409 ),
6410 );
6411 }
6412 "ESLintServer" => {
6413 servers_with_hover_requests.insert(
6414 new_server_name,
6415 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6416 |_, _| async move { Ok(None) },
6417 ),
6418 );
6419 }
6420 "NoHoverCapabilitiesServer" => {
6421 let _never_handled = new_server
6422 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6423 panic!(
6424 "Should not call for hovers server with no corresponding capabilities"
6425 )
6426 });
6427 }
6428 unexpected => panic!("Unexpected server name: {unexpected}"),
6429 }
6430 }
6431
6432 let hover_task = project.update(cx, |project, cx| {
6433 project.hover(&buffer, Point::new(0, 0), cx)
6434 });
6435 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6436 |mut hover_request| async move {
6437 hover_request
6438 .next()
6439 .await
6440 .expect("All hover requests should have been triggered")
6441 },
6442 ))
6443 .await;
6444 assert_eq!(
6445 vec!["TailwindServer hover", "TypeScriptServer hover"],
6446 hover_task
6447 .await
6448 .into_iter()
6449 .flatten()
6450 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6451 .sorted()
6452 .collect::<Vec<_>>(),
6453 "Should receive hover responses from all related servers with hover capabilities"
6454 );
6455}
6456
6457#[gpui::test]
6458async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6459 init_test(cx);
6460
6461 let fs = FakeFs::new(cx.executor());
6462 fs.insert_tree(
6463 path!("/dir"),
6464 json!({
6465 "a.ts": "a",
6466 }),
6467 )
6468 .await;
6469
6470 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6471
6472 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6473 language_registry.add(typescript_lang());
6474 let mut fake_language_servers = language_registry.register_fake_lsp(
6475 "TypeScript",
6476 FakeLspAdapter {
6477 capabilities: lsp::ServerCapabilities {
6478 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6479 ..lsp::ServerCapabilities::default()
6480 },
6481 ..FakeLspAdapter::default()
6482 },
6483 );
6484
6485 let (buffer, _handle) = project
6486 .update(cx, |p, cx| {
6487 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6488 })
6489 .await
6490 .unwrap();
6491 cx.executor().run_until_parked();
6492
6493 let fake_server = fake_language_servers
6494 .next()
6495 .await
6496 .expect("failed to get the language server");
6497
6498 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6499 move |_, _| async move {
6500 Ok(Some(lsp::Hover {
6501 contents: lsp::HoverContents::Array(vec![
6502 lsp::MarkedString::String("".to_string()),
6503 lsp::MarkedString::String(" ".to_string()),
6504 lsp::MarkedString::String("\n\n\n".to_string()),
6505 ]),
6506 range: None,
6507 }))
6508 },
6509 );
6510
6511 let hover_task = project.update(cx, |project, cx| {
6512 project.hover(&buffer, Point::new(0, 0), cx)
6513 });
6514 let () = request_handled
6515 .next()
6516 .await
6517 .expect("All hover requests should have been triggered");
6518 assert_eq!(
6519 Vec::<String>::new(),
6520 hover_task
6521 .await
6522 .into_iter()
6523 .flatten()
6524 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6525 .sorted()
6526 .collect::<Vec<_>>(),
6527 "Empty hover parts should be ignored"
6528 );
6529}
6530
6531#[gpui::test]
6532async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6533 init_test(cx);
6534
6535 let fs = FakeFs::new(cx.executor());
6536 fs.insert_tree(
6537 path!("/dir"),
6538 json!({
6539 "a.ts": "a",
6540 }),
6541 )
6542 .await;
6543
6544 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6545
6546 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6547 language_registry.add(typescript_lang());
6548 let mut fake_language_servers = language_registry.register_fake_lsp(
6549 "TypeScript",
6550 FakeLspAdapter {
6551 capabilities: lsp::ServerCapabilities {
6552 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6553 ..lsp::ServerCapabilities::default()
6554 },
6555 ..FakeLspAdapter::default()
6556 },
6557 );
6558
6559 let (buffer, _handle) = project
6560 .update(cx, |p, cx| {
6561 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6562 })
6563 .await
6564 .unwrap();
6565 cx.executor().run_until_parked();
6566
6567 let fake_server = fake_language_servers
6568 .next()
6569 .await
6570 .expect("failed to get the language server");
6571
6572 let mut request_handled = fake_server
6573 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6574 Ok(Some(vec![
6575 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6576 title: "organize imports".to_string(),
6577 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6578 ..lsp::CodeAction::default()
6579 }),
6580 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6581 title: "fix code".to_string(),
6582 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6583 ..lsp::CodeAction::default()
6584 }),
6585 ]))
6586 });
6587
6588 let code_actions_task = project.update(cx, |project, cx| {
6589 project.code_actions(
6590 &buffer,
6591 0..buffer.read(cx).len(),
6592 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6593 cx,
6594 )
6595 });
6596
6597 let () = request_handled
6598 .next()
6599 .await
6600 .expect("The code action request should have been triggered");
6601
6602 let code_actions = code_actions_task.await.unwrap().unwrap();
6603 assert_eq!(code_actions.len(), 1);
6604 assert_eq!(
6605 code_actions[0].lsp_action.action_kind(),
6606 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6607 );
6608}
6609
6610#[gpui::test]
6611async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6612 init_test(cx);
6613
6614 let fs = FakeFs::new(cx.executor());
6615 fs.insert_tree(
6616 path!("/dir"),
6617 json!({
6618 "a.tsx": "a",
6619 }),
6620 )
6621 .await;
6622
6623 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6624
6625 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6626 language_registry.add(tsx_lang());
6627 let language_server_names = [
6628 "TypeScriptServer",
6629 "TailwindServer",
6630 "ESLintServer",
6631 "NoActionsCapabilitiesServer",
6632 ];
6633
6634 let mut language_server_rxs = [
6635 language_registry.register_fake_lsp(
6636 "tsx",
6637 FakeLspAdapter {
6638 name: language_server_names[0],
6639 capabilities: lsp::ServerCapabilities {
6640 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6641 ..lsp::ServerCapabilities::default()
6642 },
6643 ..FakeLspAdapter::default()
6644 },
6645 ),
6646 language_registry.register_fake_lsp(
6647 "tsx",
6648 FakeLspAdapter {
6649 name: language_server_names[1],
6650 capabilities: lsp::ServerCapabilities {
6651 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6652 ..lsp::ServerCapabilities::default()
6653 },
6654 ..FakeLspAdapter::default()
6655 },
6656 ),
6657 language_registry.register_fake_lsp(
6658 "tsx",
6659 FakeLspAdapter {
6660 name: language_server_names[2],
6661 capabilities: lsp::ServerCapabilities {
6662 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6663 ..lsp::ServerCapabilities::default()
6664 },
6665 ..FakeLspAdapter::default()
6666 },
6667 ),
6668 language_registry.register_fake_lsp(
6669 "tsx",
6670 FakeLspAdapter {
6671 name: language_server_names[3],
6672 capabilities: lsp::ServerCapabilities {
6673 code_action_provider: None,
6674 ..lsp::ServerCapabilities::default()
6675 },
6676 ..FakeLspAdapter::default()
6677 },
6678 ),
6679 ];
6680
6681 let (buffer, _handle) = project
6682 .update(cx, |p, cx| {
6683 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6684 })
6685 .await
6686 .unwrap();
6687 cx.executor().run_until_parked();
6688
6689 let mut servers_with_actions_requests = HashMap::default();
6690 for i in 0..language_server_names.len() {
6691 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6692 panic!(
6693 "Failed to get language server #{i} with name {}",
6694 &language_server_names[i]
6695 )
6696 });
6697 let new_server_name = new_server.server.name();
6698
6699 assert!(
6700 !servers_with_actions_requests.contains_key(&new_server_name),
6701 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6702 );
6703 match new_server_name.0.as_ref() {
6704 "TailwindServer" | "TypeScriptServer" => {
6705 servers_with_actions_requests.insert(
6706 new_server_name.clone(),
6707 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6708 move |_, _| {
6709 let name = new_server_name.clone();
6710 async move {
6711 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6712 lsp::CodeAction {
6713 title: format!("{name} code action"),
6714 ..lsp::CodeAction::default()
6715 },
6716 )]))
6717 }
6718 },
6719 ),
6720 );
6721 }
6722 "ESLintServer" => {
6723 servers_with_actions_requests.insert(
6724 new_server_name,
6725 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6726 |_, _| async move { Ok(None) },
6727 ),
6728 );
6729 }
6730 "NoActionsCapabilitiesServer" => {
6731 let _never_handled = new_server
6732 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6733 panic!(
6734 "Should not call for code actions server with no corresponding capabilities"
6735 )
6736 });
6737 }
6738 unexpected => panic!("Unexpected server name: {unexpected}"),
6739 }
6740 }
6741
6742 let code_actions_task = project.update(cx, |project, cx| {
6743 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6744 });
6745
6746 // cx.run_until_parked();
6747 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6748 |mut code_actions_request| async move {
6749 code_actions_request
6750 .next()
6751 .await
6752 .expect("All code actions requests should have been triggered")
6753 },
6754 ))
6755 .await;
6756 assert_eq!(
6757 vec!["TailwindServer code action", "TypeScriptServer code action"],
6758 code_actions_task
6759 .await
6760 .unwrap()
6761 .unwrap()
6762 .into_iter()
6763 .map(|code_action| code_action.lsp_action.title().to_owned())
6764 .sorted()
6765 .collect::<Vec<_>>(),
6766 "Should receive code actions responses from all related servers with hover capabilities"
6767 );
6768}
6769
6770#[gpui::test]
6771async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6772 init_test(cx);
6773
6774 let fs = FakeFs::new(cx.executor());
6775 fs.insert_tree(
6776 "/dir",
6777 json!({
6778 "a.rs": "let a = 1;",
6779 "b.rs": "let b = 2;",
6780 "c.rs": "let c = 2;",
6781 }),
6782 )
6783 .await;
6784
6785 let project = Project::test(
6786 fs,
6787 [
6788 "/dir/a.rs".as_ref(),
6789 "/dir/b.rs".as_ref(),
6790 "/dir/c.rs".as_ref(),
6791 ],
6792 cx,
6793 )
6794 .await;
6795
6796 // check the initial state and get the worktrees
6797 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6798 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6799 assert_eq!(worktrees.len(), 3);
6800
6801 let worktree_a = worktrees[0].read(cx);
6802 let worktree_b = worktrees[1].read(cx);
6803 let worktree_c = worktrees[2].read(cx);
6804
6805 // check they start in the right order
6806 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6807 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6808 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6809
6810 (
6811 worktrees[0].clone(),
6812 worktrees[1].clone(),
6813 worktrees[2].clone(),
6814 )
6815 });
6816
6817 // move first worktree to after the second
6818 // [a, b, c] -> [b, a, c]
6819 project
6820 .update(cx, |project, cx| {
6821 let first = worktree_a.read(cx);
6822 let second = worktree_b.read(cx);
6823 project.move_worktree(first.id(), second.id(), cx)
6824 })
6825 .expect("moving first after second");
6826
6827 // check the state after moving
6828 project.update(cx, |project, cx| {
6829 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6830 assert_eq!(worktrees.len(), 3);
6831
6832 let first = worktrees[0].read(cx);
6833 let second = worktrees[1].read(cx);
6834 let third = worktrees[2].read(cx);
6835
6836 // check they are now in the right order
6837 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6838 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6839 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6840 });
6841
6842 // move the second worktree to before the first
6843 // [b, a, c] -> [a, b, c]
6844 project
6845 .update(cx, |project, cx| {
6846 let second = worktree_a.read(cx);
6847 let first = worktree_b.read(cx);
6848 project.move_worktree(first.id(), second.id(), cx)
6849 })
6850 .expect("moving second before first");
6851
6852 // check the state after moving
6853 project.update(cx, |project, cx| {
6854 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6855 assert_eq!(worktrees.len(), 3);
6856
6857 let first = worktrees[0].read(cx);
6858 let second = worktrees[1].read(cx);
6859 let third = worktrees[2].read(cx);
6860
6861 // check they are now in the right order
6862 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6863 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6864 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6865 });
6866
6867 // move the second worktree to after the third
6868 // [a, b, c] -> [a, c, b]
6869 project
6870 .update(cx, |project, cx| {
6871 let second = worktree_b.read(cx);
6872 let third = worktree_c.read(cx);
6873 project.move_worktree(second.id(), third.id(), cx)
6874 })
6875 .expect("moving second after third");
6876
6877 // check the state after moving
6878 project.update(cx, |project, cx| {
6879 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6880 assert_eq!(worktrees.len(), 3);
6881
6882 let first = worktrees[0].read(cx);
6883 let second = worktrees[1].read(cx);
6884 let third = worktrees[2].read(cx);
6885
6886 // check they are now in the right order
6887 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6888 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6889 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6890 });
6891
6892 // move the third worktree to before the second
6893 // [a, c, b] -> [a, b, c]
6894 project
6895 .update(cx, |project, cx| {
6896 let third = worktree_c.read(cx);
6897 let second = worktree_b.read(cx);
6898 project.move_worktree(third.id(), second.id(), cx)
6899 })
6900 .expect("moving third before second");
6901
6902 // check the state after moving
6903 project.update(cx, |project, cx| {
6904 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6905 assert_eq!(worktrees.len(), 3);
6906
6907 let first = worktrees[0].read(cx);
6908 let second = worktrees[1].read(cx);
6909 let third = worktrees[2].read(cx);
6910
6911 // check they are now in the right order
6912 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6913 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6914 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6915 });
6916
6917 // move the first worktree to after the third
6918 // [a, b, c] -> [b, c, a]
6919 project
6920 .update(cx, |project, cx| {
6921 let first = worktree_a.read(cx);
6922 let third = worktree_c.read(cx);
6923 project.move_worktree(first.id(), third.id(), cx)
6924 })
6925 .expect("moving first after third");
6926
6927 // check the state after moving
6928 project.update(cx, |project, cx| {
6929 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6930 assert_eq!(worktrees.len(), 3);
6931
6932 let first = worktrees[0].read(cx);
6933 let second = worktrees[1].read(cx);
6934 let third = worktrees[2].read(cx);
6935
6936 // check they are now in the right order
6937 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6938 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6939 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6940 });
6941
6942 // move the third worktree to before the first
6943 // [b, c, a] -> [a, b, c]
6944 project
6945 .update(cx, |project, cx| {
6946 let third = worktree_a.read(cx);
6947 let first = worktree_b.read(cx);
6948 project.move_worktree(third.id(), first.id(), cx)
6949 })
6950 .expect("moving third before first");
6951
6952 // check the state after moving
6953 project.update(cx, |project, cx| {
6954 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6955 assert_eq!(worktrees.len(), 3);
6956
6957 let first = worktrees[0].read(cx);
6958 let second = worktrees[1].read(cx);
6959 let third = worktrees[2].read(cx);
6960
6961 // check they are now in the right order
6962 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6963 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6964 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6965 });
6966}
6967
6968#[gpui::test]
6969async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6970 init_test(cx);
6971
6972 let staged_contents = r#"
6973 fn main() {
6974 println!("hello world");
6975 }
6976 "#
6977 .unindent();
6978 let file_contents = r#"
6979 // print goodbye
6980 fn main() {
6981 println!("goodbye world");
6982 }
6983 "#
6984 .unindent();
6985
6986 let fs = FakeFs::new(cx.background_executor.clone());
6987 fs.insert_tree(
6988 "/dir",
6989 json!({
6990 ".git": {},
6991 "src": {
6992 "main.rs": file_contents,
6993 }
6994 }),
6995 )
6996 .await;
6997
6998 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
6999
7000 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7001
7002 let buffer = project
7003 .update(cx, |project, cx| {
7004 project.open_local_buffer("/dir/src/main.rs", cx)
7005 })
7006 .await
7007 .unwrap();
7008 let unstaged_diff = project
7009 .update(cx, |project, cx| {
7010 project.open_unstaged_diff(buffer.clone(), cx)
7011 })
7012 .await
7013 .unwrap();
7014
7015 cx.run_until_parked();
7016 unstaged_diff.update(cx, |unstaged_diff, cx| {
7017 let snapshot = buffer.read(cx).snapshot();
7018 assert_hunks(
7019 unstaged_diff.hunks(&snapshot, cx),
7020 &snapshot,
7021 &unstaged_diff.base_text_string().unwrap(),
7022 &[
7023 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7024 (
7025 2..3,
7026 " println!(\"hello world\");\n",
7027 " println!(\"goodbye world\");\n",
7028 DiffHunkStatus::modified_none(),
7029 ),
7030 ],
7031 );
7032 });
7033
7034 let staged_contents = r#"
7035 // print goodbye
7036 fn main() {
7037 }
7038 "#
7039 .unindent();
7040
7041 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7042
7043 cx.run_until_parked();
7044 unstaged_diff.update(cx, |unstaged_diff, cx| {
7045 let snapshot = buffer.read(cx).snapshot();
7046 assert_hunks(
7047 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7048 &snapshot,
7049 &unstaged_diff.base_text().text(),
7050 &[(
7051 2..3,
7052 "",
7053 " println!(\"goodbye world\");\n",
7054 DiffHunkStatus::added_none(),
7055 )],
7056 );
7057 });
7058}
7059
7060#[gpui::test]
7061async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7062 init_test(cx);
7063
7064 let committed_contents = r#"
7065 fn main() {
7066 println!("hello world");
7067 }
7068 "#
7069 .unindent();
7070 let staged_contents = r#"
7071 fn main() {
7072 println!("goodbye world");
7073 }
7074 "#
7075 .unindent();
7076 let file_contents = r#"
7077 // print goodbye
7078 fn main() {
7079 println!("goodbye world");
7080 }
7081 "#
7082 .unindent();
7083
7084 let fs = FakeFs::new(cx.background_executor.clone());
7085 fs.insert_tree(
7086 "/dir",
7087 json!({
7088 ".git": {},
7089 "src": {
7090 "modification.rs": file_contents,
7091 }
7092 }),
7093 )
7094 .await;
7095
7096 fs.set_head_for_repo(
7097 Path::new("/dir/.git"),
7098 &[
7099 ("src/modification.rs", committed_contents),
7100 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7101 ],
7102 "deadbeef",
7103 );
7104 fs.set_index_for_repo(
7105 Path::new("/dir/.git"),
7106 &[
7107 ("src/modification.rs", staged_contents),
7108 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7109 ],
7110 );
7111
7112 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7113 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7114 let language = rust_lang();
7115 language_registry.add(language.clone());
7116
7117 let buffer_1 = project
7118 .update(cx, |project, cx| {
7119 project.open_local_buffer("/dir/src/modification.rs", cx)
7120 })
7121 .await
7122 .unwrap();
7123 let diff_1 = project
7124 .update(cx, |project, cx| {
7125 project.open_uncommitted_diff(buffer_1.clone(), cx)
7126 })
7127 .await
7128 .unwrap();
7129 diff_1.read_with(cx, |diff, _| {
7130 assert_eq!(diff.base_text().language().cloned(), Some(language))
7131 });
7132 cx.run_until_parked();
7133 diff_1.update(cx, |diff, cx| {
7134 let snapshot = buffer_1.read(cx).snapshot();
7135 assert_hunks(
7136 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7137 &snapshot,
7138 &diff.base_text_string().unwrap(),
7139 &[
7140 (
7141 0..1,
7142 "",
7143 "// print goodbye\n",
7144 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7145 ),
7146 (
7147 2..3,
7148 " println!(\"hello world\");\n",
7149 " println!(\"goodbye world\");\n",
7150 DiffHunkStatus::modified_none(),
7151 ),
7152 ],
7153 );
7154 });
7155
7156 // Reset HEAD to a version that differs from both the buffer and the index.
7157 let committed_contents = r#"
7158 // print goodbye
7159 fn main() {
7160 }
7161 "#
7162 .unindent();
7163 fs.set_head_for_repo(
7164 Path::new("/dir/.git"),
7165 &[
7166 ("src/modification.rs", committed_contents.clone()),
7167 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7168 ],
7169 "deadbeef",
7170 );
7171
7172 // Buffer now has an unstaged hunk.
7173 cx.run_until_parked();
7174 diff_1.update(cx, |diff, cx| {
7175 let snapshot = buffer_1.read(cx).snapshot();
7176 assert_hunks(
7177 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7178 &snapshot,
7179 &diff.base_text().text(),
7180 &[(
7181 2..3,
7182 "",
7183 " println!(\"goodbye world\");\n",
7184 DiffHunkStatus::added_none(),
7185 )],
7186 );
7187 });
7188
7189 // Open a buffer for a file that's been deleted.
7190 let buffer_2 = project
7191 .update(cx, |project, cx| {
7192 project.open_local_buffer("/dir/src/deletion.rs", cx)
7193 })
7194 .await
7195 .unwrap();
7196 let diff_2 = project
7197 .update(cx, |project, cx| {
7198 project.open_uncommitted_diff(buffer_2.clone(), cx)
7199 })
7200 .await
7201 .unwrap();
7202 cx.run_until_parked();
7203 diff_2.update(cx, |diff, cx| {
7204 let snapshot = buffer_2.read(cx).snapshot();
7205 assert_hunks(
7206 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7207 &snapshot,
7208 &diff.base_text_string().unwrap(),
7209 &[(
7210 0..0,
7211 "// the-deleted-contents\n",
7212 "",
7213 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7214 )],
7215 );
7216 });
7217
7218 // Stage the deletion of this file
7219 fs.set_index_for_repo(
7220 Path::new("/dir/.git"),
7221 &[("src/modification.rs", committed_contents.clone())],
7222 );
7223 cx.run_until_parked();
7224 diff_2.update(cx, |diff, cx| {
7225 let snapshot = buffer_2.read(cx).snapshot();
7226 assert_hunks(
7227 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7228 &snapshot,
7229 &diff.base_text_string().unwrap(),
7230 &[(
7231 0..0,
7232 "// the-deleted-contents\n",
7233 "",
7234 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7235 )],
7236 );
7237 });
7238}
7239
7240#[gpui::test]
7241async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7242 use DiffHunkSecondaryStatus::*;
7243 init_test(cx);
7244
7245 let committed_contents = r#"
7246 zero
7247 one
7248 two
7249 three
7250 four
7251 five
7252 "#
7253 .unindent();
7254 let file_contents = r#"
7255 one
7256 TWO
7257 three
7258 FOUR
7259 five
7260 "#
7261 .unindent();
7262
7263 let fs = FakeFs::new(cx.background_executor.clone());
7264 fs.insert_tree(
7265 "/dir",
7266 json!({
7267 ".git": {},
7268 "file.txt": file_contents.clone()
7269 }),
7270 )
7271 .await;
7272
7273 fs.set_head_and_index_for_repo(
7274 path!("/dir/.git").as_ref(),
7275 &[("file.txt", committed_contents.clone())],
7276 );
7277
7278 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7279
7280 let buffer = project
7281 .update(cx, |project, cx| {
7282 project.open_local_buffer("/dir/file.txt", cx)
7283 })
7284 .await
7285 .unwrap();
7286 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7287 let uncommitted_diff = project
7288 .update(cx, |project, cx| {
7289 project.open_uncommitted_diff(buffer.clone(), cx)
7290 })
7291 .await
7292 .unwrap();
7293 let mut diff_events = cx.events(&uncommitted_diff);
7294
7295 // The hunks are initially unstaged.
7296 uncommitted_diff.read_with(cx, |diff, cx| {
7297 assert_hunks(
7298 diff.hunks(&snapshot, cx),
7299 &snapshot,
7300 &diff.base_text_string().unwrap(),
7301 &[
7302 (
7303 0..0,
7304 "zero\n",
7305 "",
7306 DiffHunkStatus::deleted(HasSecondaryHunk),
7307 ),
7308 (
7309 1..2,
7310 "two\n",
7311 "TWO\n",
7312 DiffHunkStatus::modified(HasSecondaryHunk),
7313 ),
7314 (
7315 3..4,
7316 "four\n",
7317 "FOUR\n",
7318 DiffHunkStatus::modified(HasSecondaryHunk),
7319 ),
7320 ],
7321 );
7322 });
7323
7324 // Stage a hunk. It appears as optimistically staged.
7325 uncommitted_diff.update(cx, |diff, cx| {
7326 let range =
7327 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7328 let hunks = diff
7329 .hunks_intersecting_range(range, &snapshot, cx)
7330 .collect::<Vec<_>>();
7331 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7332
7333 assert_hunks(
7334 diff.hunks(&snapshot, cx),
7335 &snapshot,
7336 &diff.base_text_string().unwrap(),
7337 &[
7338 (
7339 0..0,
7340 "zero\n",
7341 "",
7342 DiffHunkStatus::deleted(HasSecondaryHunk),
7343 ),
7344 (
7345 1..2,
7346 "two\n",
7347 "TWO\n",
7348 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7349 ),
7350 (
7351 3..4,
7352 "four\n",
7353 "FOUR\n",
7354 DiffHunkStatus::modified(HasSecondaryHunk),
7355 ),
7356 ],
7357 );
7358 });
7359
7360 // The diff emits a change event for the range of the staged hunk.
7361 assert!(matches!(
7362 diff_events.next().await.unwrap(),
7363 BufferDiffEvent::HunksStagedOrUnstaged(_)
7364 ));
7365 let event = diff_events.next().await.unwrap();
7366 if let BufferDiffEvent::DiffChanged {
7367 changed_range: Some(changed_range),
7368 } = event
7369 {
7370 let changed_range = changed_range.to_point(&snapshot);
7371 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7372 } else {
7373 panic!("Unexpected event {event:?}");
7374 }
7375
7376 // When the write to the index completes, it appears as staged.
7377 cx.run_until_parked();
7378 uncommitted_diff.update(cx, |diff, cx| {
7379 assert_hunks(
7380 diff.hunks(&snapshot, cx),
7381 &snapshot,
7382 &diff.base_text_string().unwrap(),
7383 &[
7384 (
7385 0..0,
7386 "zero\n",
7387 "",
7388 DiffHunkStatus::deleted(HasSecondaryHunk),
7389 ),
7390 (
7391 1..2,
7392 "two\n",
7393 "TWO\n",
7394 DiffHunkStatus::modified(NoSecondaryHunk),
7395 ),
7396 (
7397 3..4,
7398 "four\n",
7399 "FOUR\n",
7400 DiffHunkStatus::modified(HasSecondaryHunk),
7401 ),
7402 ],
7403 );
7404 });
7405
7406 // The diff emits a change event for the changed index text.
7407 let event = diff_events.next().await.unwrap();
7408 if let BufferDiffEvent::DiffChanged {
7409 changed_range: Some(changed_range),
7410 } = event
7411 {
7412 let changed_range = changed_range.to_point(&snapshot);
7413 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7414 } else {
7415 panic!("Unexpected event {event:?}");
7416 }
7417
7418 // Simulate a problem writing to the git index.
7419 fs.set_error_message_for_index_write(
7420 "/dir/.git".as_ref(),
7421 Some("failed to write git index".into()),
7422 );
7423
7424 // Stage another hunk.
7425 uncommitted_diff.update(cx, |diff, cx| {
7426 let range =
7427 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7428 let hunks = diff
7429 .hunks_intersecting_range(range, &snapshot, cx)
7430 .collect::<Vec<_>>();
7431 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7432
7433 assert_hunks(
7434 diff.hunks(&snapshot, cx),
7435 &snapshot,
7436 &diff.base_text_string().unwrap(),
7437 &[
7438 (
7439 0..0,
7440 "zero\n",
7441 "",
7442 DiffHunkStatus::deleted(HasSecondaryHunk),
7443 ),
7444 (
7445 1..2,
7446 "two\n",
7447 "TWO\n",
7448 DiffHunkStatus::modified(NoSecondaryHunk),
7449 ),
7450 (
7451 3..4,
7452 "four\n",
7453 "FOUR\n",
7454 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7455 ),
7456 ],
7457 );
7458 });
7459 assert!(matches!(
7460 diff_events.next().await.unwrap(),
7461 BufferDiffEvent::HunksStagedOrUnstaged(_)
7462 ));
7463 let event = diff_events.next().await.unwrap();
7464 if let BufferDiffEvent::DiffChanged {
7465 changed_range: Some(changed_range),
7466 } = event
7467 {
7468 let changed_range = changed_range.to_point(&snapshot);
7469 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7470 } else {
7471 panic!("Unexpected event {event:?}");
7472 }
7473
7474 // When the write fails, the hunk returns to being unstaged.
7475 cx.run_until_parked();
7476 uncommitted_diff.update(cx, |diff, cx| {
7477 assert_hunks(
7478 diff.hunks(&snapshot, cx),
7479 &snapshot,
7480 &diff.base_text_string().unwrap(),
7481 &[
7482 (
7483 0..0,
7484 "zero\n",
7485 "",
7486 DiffHunkStatus::deleted(HasSecondaryHunk),
7487 ),
7488 (
7489 1..2,
7490 "two\n",
7491 "TWO\n",
7492 DiffHunkStatus::modified(NoSecondaryHunk),
7493 ),
7494 (
7495 3..4,
7496 "four\n",
7497 "FOUR\n",
7498 DiffHunkStatus::modified(HasSecondaryHunk),
7499 ),
7500 ],
7501 );
7502 });
7503
7504 let event = diff_events.next().await.unwrap();
7505 if let BufferDiffEvent::DiffChanged {
7506 changed_range: Some(changed_range),
7507 } = event
7508 {
7509 let changed_range = changed_range.to_point(&snapshot);
7510 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7511 } else {
7512 panic!("Unexpected event {event:?}");
7513 }
7514
7515 // Allow writing to the git index to succeed again.
7516 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7517
7518 // Stage two hunks with separate operations.
7519 uncommitted_diff.update(cx, |diff, cx| {
7520 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7521 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7522 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7523 });
7524
7525 // Both staged hunks appear as pending.
7526 uncommitted_diff.update(cx, |diff, cx| {
7527 assert_hunks(
7528 diff.hunks(&snapshot, cx),
7529 &snapshot,
7530 &diff.base_text_string().unwrap(),
7531 &[
7532 (
7533 0..0,
7534 "zero\n",
7535 "",
7536 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7537 ),
7538 (
7539 1..2,
7540 "two\n",
7541 "TWO\n",
7542 DiffHunkStatus::modified(NoSecondaryHunk),
7543 ),
7544 (
7545 3..4,
7546 "four\n",
7547 "FOUR\n",
7548 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7549 ),
7550 ],
7551 );
7552 });
7553
7554 // Both staging operations take effect.
7555 cx.run_until_parked();
7556 uncommitted_diff.update(cx, |diff, cx| {
7557 assert_hunks(
7558 diff.hunks(&snapshot, cx),
7559 &snapshot,
7560 &diff.base_text_string().unwrap(),
7561 &[
7562 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7563 (
7564 1..2,
7565 "two\n",
7566 "TWO\n",
7567 DiffHunkStatus::modified(NoSecondaryHunk),
7568 ),
7569 (
7570 3..4,
7571 "four\n",
7572 "FOUR\n",
7573 DiffHunkStatus::modified(NoSecondaryHunk),
7574 ),
7575 ],
7576 );
7577 });
7578}
7579
7580#[gpui::test(seeds(340, 472))]
7581async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7582 use DiffHunkSecondaryStatus::*;
7583 init_test(cx);
7584
7585 let committed_contents = r#"
7586 zero
7587 one
7588 two
7589 three
7590 four
7591 five
7592 "#
7593 .unindent();
7594 let file_contents = r#"
7595 one
7596 TWO
7597 three
7598 FOUR
7599 five
7600 "#
7601 .unindent();
7602
7603 let fs = FakeFs::new(cx.background_executor.clone());
7604 fs.insert_tree(
7605 "/dir",
7606 json!({
7607 ".git": {},
7608 "file.txt": file_contents.clone()
7609 }),
7610 )
7611 .await;
7612
7613 fs.set_head_for_repo(
7614 "/dir/.git".as_ref(),
7615 &[("file.txt", committed_contents.clone())],
7616 "deadbeef",
7617 );
7618 fs.set_index_for_repo(
7619 "/dir/.git".as_ref(),
7620 &[("file.txt", committed_contents.clone())],
7621 );
7622
7623 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7624
7625 let buffer = project
7626 .update(cx, |project, cx| {
7627 project.open_local_buffer("/dir/file.txt", cx)
7628 })
7629 .await
7630 .unwrap();
7631 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7632 let uncommitted_diff = project
7633 .update(cx, |project, cx| {
7634 project.open_uncommitted_diff(buffer.clone(), cx)
7635 })
7636 .await
7637 .unwrap();
7638
7639 // The hunks are initially unstaged.
7640 uncommitted_diff.read_with(cx, |diff, cx| {
7641 assert_hunks(
7642 diff.hunks(&snapshot, cx),
7643 &snapshot,
7644 &diff.base_text_string().unwrap(),
7645 &[
7646 (
7647 0..0,
7648 "zero\n",
7649 "",
7650 DiffHunkStatus::deleted(HasSecondaryHunk),
7651 ),
7652 (
7653 1..2,
7654 "two\n",
7655 "TWO\n",
7656 DiffHunkStatus::modified(HasSecondaryHunk),
7657 ),
7658 (
7659 3..4,
7660 "four\n",
7661 "FOUR\n",
7662 DiffHunkStatus::modified(HasSecondaryHunk),
7663 ),
7664 ],
7665 );
7666 });
7667
7668 // Pause IO events
7669 fs.pause_events();
7670
7671 // Stage the first hunk.
7672 uncommitted_diff.update(cx, |diff, cx| {
7673 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7674 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7675 assert_hunks(
7676 diff.hunks(&snapshot, cx),
7677 &snapshot,
7678 &diff.base_text_string().unwrap(),
7679 &[
7680 (
7681 0..0,
7682 "zero\n",
7683 "",
7684 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7685 ),
7686 (
7687 1..2,
7688 "two\n",
7689 "TWO\n",
7690 DiffHunkStatus::modified(HasSecondaryHunk),
7691 ),
7692 (
7693 3..4,
7694 "four\n",
7695 "FOUR\n",
7696 DiffHunkStatus::modified(HasSecondaryHunk),
7697 ),
7698 ],
7699 );
7700 });
7701
7702 // Stage the second hunk *before* receiving the FS event for the first hunk.
7703 cx.run_until_parked();
7704 uncommitted_diff.update(cx, |diff, cx| {
7705 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7706 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7707 assert_hunks(
7708 diff.hunks(&snapshot, cx),
7709 &snapshot,
7710 &diff.base_text_string().unwrap(),
7711 &[
7712 (
7713 0..0,
7714 "zero\n",
7715 "",
7716 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7717 ),
7718 (
7719 1..2,
7720 "two\n",
7721 "TWO\n",
7722 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7723 ),
7724 (
7725 3..4,
7726 "four\n",
7727 "FOUR\n",
7728 DiffHunkStatus::modified(HasSecondaryHunk),
7729 ),
7730 ],
7731 );
7732 });
7733
7734 // Process the FS event for staging the first hunk (second event is still pending).
7735 fs.flush_events(1);
7736 cx.run_until_parked();
7737
7738 // Stage the third hunk before receiving the second FS event.
7739 uncommitted_diff.update(cx, |diff, cx| {
7740 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7741 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7742 });
7743
7744 // Wait for all remaining IO.
7745 cx.run_until_parked();
7746 fs.flush_events(fs.buffered_event_count());
7747
7748 // Now all hunks are staged.
7749 cx.run_until_parked();
7750 uncommitted_diff.update(cx, |diff, cx| {
7751 assert_hunks(
7752 diff.hunks(&snapshot, cx),
7753 &snapshot,
7754 &diff.base_text_string().unwrap(),
7755 &[
7756 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7757 (
7758 1..2,
7759 "two\n",
7760 "TWO\n",
7761 DiffHunkStatus::modified(NoSecondaryHunk),
7762 ),
7763 (
7764 3..4,
7765 "four\n",
7766 "FOUR\n",
7767 DiffHunkStatus::modified(NoSecondaryHunk),
7768 ),
7769 ],
7770 );
7771 });
7772}
7773
7774#[gpui::test(iterations = 25)]
7775async fn test_staging_random_hunks(
7776 mut rng: StdRng,
7777 executor: BackgroundExecutor,
7778 cx: &mut gpui::TestAppContext,
7779) {
7780 let operations = env::var("OPERATIONS")
7781 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7782 .unwrap_or(20);
7783
7784 // Try to induce races between diff recalculation and index writes.
7785 if rng.random_bool(0.5) {
7786 executor.deprioritize(*CALCULATE_DIFF_TASK);
7787 }
7788
7789 use DiffHunkSecondaryStatus::*;
7790 init_test(cx);
7791
7792 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7793 let index_text = committed_text.clone();
7794 let buffer_text = (0..30)
7795 .map(|i| match i % 5 {
7796 0 => format!("line {i} (modified)\n"),
7797 _ => format!("line {i}\n"),
7798 })
7799 .collect::<String>();
7800
7801 let fs = FakeFs::new(cx.background_executor.clone());
7802 fs.insert_tree(
7803 path!("/dir"),
7804 json!({
7805 ".git": {},
7806 "file.txt": buffer_text.clone()
7807 }),
7808 )
7809 .await;
7810 fs.set_head_for_repo(
7811 path!("/dir/.git").as_ref(),
7812 &[("file.txt", committed_text.clone())],
7813 "deadbeef",
7814 );
7815 fs.set_index_for_repo(
7816 path!("/dir/.git").as_ref(),
7817 &[("file.txt", index_text.clone())],
7818 );
7819 let repo = fs
7820 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7821 .unwrap();
7822
7823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7824 let buffer = project
7825 .update(cx, |project, cx| {
7826 project.open_local_buffer(path!("/dir/file.txt"), cx)
7827 })
7828 .await
7829 .unwrap();
7830 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7831 let uncommitted_diff = project
7832 .update(cx, |project, cx| {
7833 project.open_uncommitted_diff(buffer.clone(), cx)
7834 })
7835 .await
7836 .unwrap();
7837
7838 let mut hunks =
7839 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7840 assert_eq!(hunks.len(), 6);
7841
7842 for _i in 0..operations {
7843 let hunk_ix = rng.random_range(0..hunks.len());
7844 let hunk = &mut hunks[hunk_ix];
7845 let row = hunk.range.start.row;
7846
7847 if hunk.status().has_secondary_hunk() {
7848 log::info!("staging hunk at {row}");
7849 uncommitted_diff.update(cx, |diff, cx| {
7850 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7851 });
7852 hunk.secondary_status = SecondaryHunkRemovalPending;
7853 } else {
7854 log::info!("unstaging hunk at {row}");
7855 uncommitted_diff.update(cx, |diff, cx| {
7856 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7857 });
7858 hunk.secondary_status = SecondaryHunkAdditionPending;
7859 }
7860
7861 for _ in 0..rng.random_range(0..10) {
7862 log::info!("yielding");
7863 cx.executor().simulate_random_delay().await;
7864 }
7865 }
7866
7867 cx.executor().run_until_parked();
7868
7869 for hunk in &mut hunks {
7870 if hunk.secondary_status == SecondaryHunkRemovalPending {
7871 hunk.secondary_status = NoSecondaryHunk;
7872 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7873 hunk.secondary_status = HasSecondaryHunk;
7874 }
7875 }
7876
7877 log::info!(
7878 "index text:\n{}",
7879 repo.load_index_text(rel_path("file.txt").into())
7880 .await
7881 .unwrap()
7882 );
7883
7884 uncommitted_diff.update(cx, |diff, cx| {
7885 let expected_hunks = hunks
7886 .iter()
7887 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7888 .collect::<Vec<_>>();
7889 let actual_hunks = diff
7890 .hunks(&snapshot, cx)
7891 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7892 .collect::<Vec<_>>();
7893 assert_eq!(actual_hunks, expected_hunks);
7894 });
7895}
7896
7897#[gpui::test]
7898async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7899 init_test(cx);
7900
7901 let committed_contents = r#"
7902 fn main() {
7903 println!("hello from HEAD");
7904 }
7905 "#
7906 .unindent();
7907 let file_contents = r#"
7908 fn main() {
7909 println!("hello from the working copy");
7910 }
7911 "#
7912 .unindent();
7913
7914 let fs = FakeFs::new(cx.background_executor.clone());
7915 fs.insert_tree(
7916 "/dir",
7917 json!({
7918 ".git": {},
7919 "src": {
7920 "main.rs": file_contents,
7921 }
7922 }),
7923 )
7924 .await;
7925
7926 fs.set_head_for_repo(
7927 Path::new("/dir/.git"),
7928 &[("src/main.rs", committed_contents.clone())],
7929 "deadbeef",
7930 );
7931 fs.set_index_for_repo(
7932 Path::new("/dir/.git"),
7933 &[("src/main.rs", committed_contents.clone())],
7934 );
7935
7936 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7937
7938 let buffer = project
7939 .update(cx, |project, cx| {
7940 project.open_local_buffer("/dir/src/main.rs", cx)
7941 })
7942 .await
7943 .unwrap();
7944 let uncommitted_diff = project
7945 .update(cx, |project, cx| {
7946 project.open_uncommitted_diff(buffer.clone(), cx)
7947 })
7948 .await
7949 .unwrap();
7950
7951 cx.run_until_parked();
7952 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7953 let snapshot = buffer.read(cx).snapshot();
7954 assert_hunks(
7955 uncommitted_diff.hunks(&snapshot, cx),
7956 &snapshot,
7957 &uncommitted_diff.base_text_string().unwrap(),
7958 &[(
7959 1..2,
7960 " println!(\"hello from HEAD\");\n",
7961 " println!(\"hello from the working copy\");\n",
7962 DiffHunkStatus {
7963 kind: DiffHunkStatusKind::Modified,
7964 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7965 },
7966 )],
7967 );
7968 });
7969}
7970
7971#[gpui::test]
7972async fn test_repository_and_path_for_project_path(
7973 background_executor: BackgroundExecutor,
7974 cx: &mut gpui::TestAppContext,
7975) {
7976 init_test(cx);
7977 let fs = FakeFs::new(background_executor);
7978 fs.insert_tree(
7979 path!("/root"),
7980 json!({
7981 "c.txt": "",
7982 "dir1": {
7983 ".git": {},
7984 "deps": {
7985 "dep1": {
7986 ".git": {},
7987 "src": {
7988 "a.txt": ""
7989 }
7990 }
7991 },
7992 "src": {
7993 "b.txt": ""
7994 }
7995 },
7996 }),
7997 )
7998 .await;
7999
8000 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8001 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8002 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8003 project
8004 .update(cx, |project, cx| project.git_scans_complete(cx))
8005 .await;
8006 cx.run_until_parked();
8007
8008 project.read_with(cx, |project, cx| {
8009 let git_store = project.git_store().read(cx);
8010 let pairs = [
8011 ("c.txt", None),
8012 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8013 (
8014 "dir1/deps/dep1/src/a.txt",
8015 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8016 ),
8017 ];
8018 let expected = pairs
8019 .iter()
8020 .map(|(path, result)| {
8021 (
8022 path,
8023 result.map(|(repo, repo_path)| {
8024 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8025 }),
8026 )
8027 })
8028 .collect::<Vec<_>>();
8029 let actual = pairs
8030 .iter()
8031 .map(|(path, _)| {
8032 let project_path = (tree_id, rel_path(path)).into();
8033 let result = maybe!({
8034 let (repo, repo_path) =
8035 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8036 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8037 });
8038 (path, result)
8039 })
8040 .collect::<Vec<_>>();
8041 pretty_assertions::assert_eq!(expected, actual);
8042 });
8043
8044 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8045 .await
8046 .unwrap();
8047 cx.run_until_parked();
8048
8049 project.read_with(cx, |project, cx| {
8050 let git_store = project.git_store().read(cx);
8051 assert_eq!(
8052 git_store.repository_and_path_for_project_path(
8053 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8054 cx
8055 ),
8056 None
8057 );
8058 });
8059}
8060
8061#[gpui::test]
8062async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8063 init_test(cx);
8064 let fs = FakeFs::new(cx.background_executor.clone());
8065 let home = paths::home_dir();
8066 fs.insert_tree(
8067 home,
8068 json!({
8069 ".git": {},
8070 "project": {
8071 "a.txt": "A"
8072 },
8073 }),
8074 )
8075 .await;
8076
8077 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8078 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8079 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8080
8081 project
8082 .update(cx, |project, cx| project.git_scans_complete(cx))
8083 .await;
8084 tree.flush_fs_events(cx).await;
8085
8086 project.read_with(cx, |project, cx| {
8087 let containing = project
8088 .git_store()
8089 .read(cx)
8090 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8091 assert!(containing.is_none());
8092 });
8093
8094 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8095 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8096 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8097 project
8098 .update(cx, |project, cx| project.git_scans_complete(cx))
8099 .await;
8100 tree.flush_fs_events(cx).await;
8101
8102 project.read_with(cx, |project, cx| {
8103 let containing = project
8104 .git_store()
8105 .read(cx)
8106 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8107 assert_eq!(
8108 containing
8109 .unwrap()
8110 .0
8111 .read(cx)
8112 .work_directory_abs_path
8113 .as_ref(),
8114 home,
8115 );
8116 });
8117}
8118
8119#[gpui::test]
8120async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8121 init_test(cx);
8122 cx.executor().allow_parking();
8123
8124 let root = TempTree::new(json!({
8125 "project": {
8126 "a.txt": "a", // Modified
8127 "b.txt": "bb", // Added
8128 "c.txt": "ccc", // Unchanged
8129 "d.txt": "dddd", // Deleted
8130 },
8131 }));
8132
8133 // Set up git repository before creating the project.
8134 let work_dir = root.path().join("project");
8135 let repo = git_init(work_dir.as_path());
8136 git_add("a.txt", &repo);
8137 git_add("c.txt", &repo);
8138 git_add("d.txt", &repo);
8139 git_commit("Initial commit", &repo);
8140 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8141 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8142
8143 let project = Project::test(
8144 Arc::new(RealFs::new(None, cx.executor())),
8145 [root.path()],
8146 cx,
8147 )
8148 .await;
8149
8150 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8151 tree.flush_fs_events(cx).await;
8152 project
8153 .update(cx, |project, cx| project.git_scans_complete(cx))
8154 .await;
8155 cx.executor().run_until_parked();
8156
8157 let repository = project.read_with(cx, |project, cx| {
8158 project.repositories(cx).values().next().unwrap().clone()
8159 });
8160
8161 // Check that the right git state is observed on startup
8162 repository.read_with(cx, |repository, _| {
8163 let entries = repository.cached_status().collect::<Vec<_>>();
8164 assert_eq!(
8165 entries,
8166 [
8167 StatusEntry {
8168 repo_path: repo_path("a.txt"),
8169 status: StatusCode::Modified.worktree(),
8170 },
8171 StatusEntry {
8172 repo_path: repo_path("b.txt"),
8173 status: FileStatus::Untracked,
8174 },
8175 StatusEntry {
8176 repo_path: repo_path("d.txt"),
8177 status: StatusCode::Deleted.worktree(),
8178 },
8179 ]
8180 );
8181 });
8182
8183 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8184
8185 tree.flush_fs_events(cx).await;
8186 project
8187 .update(cx, |project, cx| project.git_scans_complete(cx))
8188 .await;
8189 cx.executor().run_until_parked();
8190
8191 repository.read_with(cx, |repository, _| {
8192 let entries = repository.cached_status().collect::<Vec<_>>();
8193 assert_eq!(
8194 entries,
8195 [
8196 StatusEntry {
8197 repo_path: repo_path("a.txt"),
8198 status: StatusCode::Modified.worktree(),
8199 },
8200 StatusEntry {
8201 repo_path: repo_path("b.txt"),
8202 status: FileStatus::Untracked,
8203 },
8204 StatusEntry {
8205 repo_path: repo_path("c.txt"),
8206 status: StatusCode::Modified.worktree(),
8207 },
8208 StatusEntry {
8209 repo_path: repo_path("d.txt"),
8210 status: StatusCode::Deleted.worktree(),
8211 },
8212 ]
8213 );
8214 });
8215
8216 git_add("a.txt", &repo);
8217 git_add("c.txt", &repo);
8218 git_remove_index(Path::new("d.txt"), &repo);
8219 git_commit("Another commit", &repo);
8220 tree.flush_fs_events(cx).await;
8221 project
8222 .update(cx, |project, cx| project.git_scans_complete(cx))
8223 .await;
8224 cx.executor().run_until_parked();
8225
8226 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8227 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8228 tree.flush_fs_events(cx).await;
8229 project
8230 .update(cx, |project, cx| project.git_scans_complete(cx))
8231 .await;
8232 cx.executor().run_until_parked();
8233
8234 repository.read_with(cx, |repository, _cx| {
8235 let entries = repository.cached_status().collect::<Vec<_>>();
8236
8237 // Deleting an untracked entry, b.txt, should leave no status
8238 // a.txt was tracked, and so should have a status
8239 assert_eq!(
8240 entries,
8241 [StatusEntry {
8242 repo_path: repo_path("a.txt"),
8243 status: StatusCode::Deleted.worktree(),
8244 }]
8245 );
8246 });
8247}
8248
8249#[gpui::test]
8250async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8251 init_test(cx);
8252 cx.executor().allow_parking();
8253
8254 let root = TempTree::new(json!({
8255 "project": {
8256 "sub": {},
8257 "a.txt": "",
8258 },
8259 }));
8260
8261 let work_dir = root.path().join("project");
8262 let repo = git_init(work_dir.as_path());
8263 // a.txt exists in HEAD and the working copy but is deleted in the index.
8264 git_add("a.txt", &repo);
8265 git_commit("Initial commit", &repo);
8266 git_remove_index("a.txt".as_ref(), &repo);
8267 // `sub` is a nested git repository.
8268 let _sub = git_init(&work_dir.join("sub"));
8269
8270 let project = Project::test(
8271 Arc::new(RealFs::new(None, cx.executor())),
8272 [root.path()],
8273 cx,
8274 )
8275 .await;
8276
8277 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8278 tree.flush_fs_events(cx).await;
8279 project
8280 .update(cx, |project, cx| project.git_scans_complete(cx))
8281 .await;
8282 cx.executor().run_until_parked();
8283
8284 let repository = project.read_with(cx, |project, cx| {
8285 project
8286 .repositories(cx)
8287 .values()
8288 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8289 .unwrap()
8290 .clone()
8291 });
8292
8293 repository.read_with(cx, |repository, _cx| {
8294 let entries = repository.cached_status().collect::<Vec<_>>();
8295
8296 // `sub` doesn't appear in our computed statuses.
8297 // a.txt appears with a combined `DA` status.
8298 assert_eq!(
8299 entries,
8300 [StatusEntry {
8301 repo_path: repo_path("a.txt"),
8302 status: TrackedStatus {
8303 index_status: StatusCode::Deleted,
8304 worktree_status: StatusCode::Added
8305 }
8306 .into(),
8307 }]
8308 )
8309 });
8310}
8311
8312#[gpui::test]
8313async fn test_repository_subfolder_git_status(
8314 executor: gpui::BackgroundExecutor,
8315 cx: &mut gpui::TestAppContext,
8316) {
8317 init_test(cx);
8318
8319 let fs = FakeFs::new(executor);
8320 fs.insert_tree(
8321 path!("/root"),
8322 json!({
8323 "my-repo": {
8324 ".git": {},
8325 "a.txt": "a",
8326 "sub-folder-1": {
8327 "sub-folder-2": {
8328 "c.txt": "cc",
8329 "d": {
8330 "e.txt": "eee"
8331 }
8332 },
8333 }
8334 },
8335 }),
8336 )
8337 .await;
8338
8339 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8340 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8341
8342 fs.set_status_for_repo(
8343 path!("/root/my-repo/.git").as_ref(),
8344 &[(E_TXT, FileStatus::Untracked)],
8345 );
8346
8347 let project = Project::test(
8348 fs.clone(),
8349 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8350 cx,
8351 )
8352 .await;
8353
8354 project
8355 .update(cx, |project, cx| project.git_scans_complete(cx))
8356 .await;
8357 cx.run_until_parked();
8358
8359 let repository = project.read_with(cx, |project, cx| {
8360 project.repositories(cx).values().next().unwrap().clone()
8361 });
8362
8363 // Ensure that the git status is loaded correctly
8364 repository.read_with(cx, |repository, _cx| {
8365 assert_eq!(
8366 repository.work_directory_abs_path,
8367 Path::new(path!("/root/my-repo")).into()
8368 );
8369
8370 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8371 assert_eq!(
8372 repository
8373 .status_for_path(&repo_path(E_TXT))
8374 .unwrap()
8375 .status,
8376 FileStatus::Untracked
8377 );
8378 });
8379
8380 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8381 project
8382 .update(cx, |project, cx| project.git_scans_complete(cx))
8383 .await;
8384 cx.run_until_parked();
8385
8386 repository.read_with(cx, |repository, _cx| {
8387 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8388 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8389 });
8390}
8391
8392// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8393#[cfg(any())]
8394#[gpui::test]
8395async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8396 init_test(cx);
8397 cx.executor().allow_parking();
8398
8399 let root = TempTree::new(json!({
8400 "project": {
8401 "a.txt": "a",
8402 },
8403 }));
8404 let root_path = root.path();
8405
8406 let repo = git_init(&root_path.join("project"));
8407 git_add("a.txt", &repo);
8408 git_commit("init", &repo);
8409
8410 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8411
8412 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8413 tree.flush_fs_events(cx).await;
8414 project
8415 .update(cx, |project, cx| project.git_scans_complete(cx))
8416 .await;
8417 cx.executor().run_until_parked();
8418
8419 let repository = project.read_with(cx, |project, cx| {
8420 project.repositories(cx).values().next().unwrap().clone()
8421 });
8422
8423 git_branch("other-branch", &repo);
8424 git_checkout("refs/heads/other-branch", &repo);
8425 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8426 git_add("a.txt", &repo);
8427 git_commit("capitalize", &repo);
8428 let commit = repo
8429 .head()
8430 .expect("Failed to get HEAD")
8431 .peel_to_commit()
8432 .expect("HEAD is not a commit");
8433 git_checkout("refs/heads/main", &repo);
8434 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8435 git_add("a.txt", &repo);
8436 git_commit("improve letter", &repo);
8437 git_cherry_pick(&commit, &repo);
8438 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8439 .expect("No CHERRY_PICK_HEAD");
8440 pretty_assertions::assert_eq!(
8441 git_status(&repo),
8442 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8443 );
8444 tree.flush_fs_events(cx).await;
8445 project
8446 .update(cx, |project, cx| project.git_scans_complete(cx))
8447 .await;
8448 cx.executor().run_until_parked();
8449 let conflicts = repository.update(cx, |repository, _| {
8450 repository
8451 .merge_conflicts
8452 .iter()
8453 .cloned()
8454 .collect::<Vec<_>>()
8455 });
8456 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8457
8458 git_add("a.txt", &repo);
8459 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8460 git_commit("whatevs", &repo);
8461 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8462 .expect("Failed to remove CHERRY_PICK_HEAD");
8463 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8464 tree.flush_fs_events(cx).await;
8465 let conflicts = repository.update(cx, |repository, _| {
8466 repository
8467 .merge_conflicts
8468 .iter()
8469 .cloned()
8470 .collect::<Vec<_>>()
8471 });
8472 pretty_assertions::assert_eq!(conflicts, []);
8473}
8474
8475#[gpui::test]
8476async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8477 init_test(cx);
8478 let fs = FakeFs::new(cx.background_executor.clone());
8479 fs.insert_tree(
8480 path!("/root"),
8481 json!({
8482 ".git": {},
8483 ".gitignore": "*.txt\n",
8484 "a.xml": "<a></a>",
8485 "b.txt": "Some text"
8486 }),
8487 )
8488 .await;
8489
8490 fs.set_head_and_index_for_repo(
8491 path!("/root/.git").as_ref(),
8492 &[
8493 (".gitignore", "*.txt\n".into()),
8494 ("a.xml", "<a></a>".into()),
8495 ],
8496 );
8497
8498 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8499
8500 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8501 tree.flush_fs_events(cx).await;
8502 project
8503 .update(cx, |project, cx| project.git_scans_complete(cx))
8504 .await;
8505 cx.executor().run_until_parked();
8506
8507 let repository = project.read_with(cx, |project, cx| {
8508 project.repositories(cx).values().next().unwrap().clone()
8509 });
8510
8511 // One file is unmodified, the other is ignored.
8512 cx.read(|cx| {
8513 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8514 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8515 });
8516
8517 // Change the gitignore, and stage the newly non-ignored file.
8518 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8519 .await
8520 .unwrap();
8521 fs.set_index_for_repo(
8522 Path::new(path!("/root/.git")),
8523 &[
8524 (".gitignore", "*.txt\n".into()),
8525 ("a.xml", "<a></a>".into()),
8526 ("b.txt", "Some text".into()),
8527 ],
8528 );
8529
8530 cx.executor().run_until_parked();
8531 cx.read(|cx| {
8532 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8533 assert_entry_git_state(
8534 tree.read(cx),
8535 repository.read(cx),
8536 "b.txt",
8537 Some(StatusCode::Added),
8538 false,
8539 );
8540 });
8541}
8542
8543// NOTE:
8544// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8545// a directory which some program has already open.
8546// This is a limitation of the Windows.
8547// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8548// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8549#[gpui::test]
8550#[cfg_attr(target_os = "windows", ignore)]
8551async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8552 init_test(cx);
8553 cx.executor().allow_parking();
8554 let root = TempTree::new(json!({
8555 "projects": {
8556 "project1": {
8557 "a": "",
8558 "b": "",
8559 }
8560 },
8561
8562 }));
8563 let root_path = root.path();
8564
8565 let repo = git_init(&root_path.join("projects/project1"));
8566 git_add("a", &repo);
8567 git_commit("init", &repo);
8568 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8569
8570 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8571
8572 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8573 tree.flush_fs_events(cx).await;
8574 project
8575 .update(cx, |project, cx| project.git_scans_complete(cx))
8576 .await;
8577 cx.executor().run_until_parked();
8578
8579 let repository = project.read_with(cx, |project, cx| {
8580 project.repositories(cx).values().next().unwrap().clone()
8581 });
8582
8583 repository.read_with(cx, |repository, _| {
8584 assert_eq!(
8585 repository.work_directory_abs_path.as_ref(),
8586 root_path.join("projects/project1").as_path()
8587 );
8588 assert_eq!(
8589 repository
8590 .status_for_path(&repo_path("a"))
8591 .map(|entry| entry.status),
8592 Some(StatusCode::Modified.worktree()),
8593 );
8594 assert_eq!(
8595 repository
8596 .status_for_path(&repo_path("b"))
8597 .map(|entry| entry.status),
8598 Some(FileStatus::Untracked),
8599 );
8600 });
8601
8602 std::fs::rename(
8603 root_path.join("projects/project1"),
8604 root_path.join("projects/project2"),
8605 )
8606 .unwrap();
8607 tree.flush_fs_events(cx).await;
8608
8609 repository.read_with(cx, |repository, _| {
8610 assert_eq!(
8611 repository.work_directory_abs_path.as_ref(),
8612 root_path.join("projects/project2").as_path()
8613 );
8614 assert_eq!(
8615 repository.status_for_path(&repo_path("a")).unwrap().status,
8616 StatusCode::Modified.worktree(),
8617 );
8618 assert_eq!(
8619 repository.status_for_path(&repo_path("b")).unwrap().status,
8620 FileStatus::Untracked,
8621 );
8622 });
8623}
8624
8625// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8626// you can't rename a directory which some program has already open. This is a
8627// limitation of the Windows. See:
8628// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8629// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8630#[gpui::test]
8631#[cfg_attr(target_os = "windows", ignore)]
8632async fn test_file_status(cx: &mut gpui::TestAppContext) {
8633 init_test(cx);
8634 cx.executor().allow_parking();
8635 const IGNORE_RULE: &str = "**/target";
8636
8637 let root = TempTree::new(json!({
8638 "project": {
8639 "a.txt": "a",
8640 "b.txt": "bb",
8641 "c": {
8642 "d": {
8643 "e.txt": "eee"
8644 }
8645 },
8646 "f.txt": "ffff",
8647 "target": {
8648 "build_file": "???"
8649 },
8650 ".gitignore": IGNORE_RULE
8651 },
8652
8653 }));
8654 let root_path = root.path();
8655
8656 const A_TXT: &str = "a.txt";
8657 const B_TXT: &str = "b.txt";
8658 const E_TXT: &str = "c/d/e.txt";
8659 const F_TXT: &str = "f.txt";
8660 const DOTGITIGNORE: &str = ".gitignore";
8661 const BUILD_FILE: &str = "target/build_file";
8662
8663 // Set up git repository before creating the worktree.
8664 let work_dir = root.path().join("project");
8665 let mut repo = git_init(work_dir.as_path());
8666 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8667 git_add(A_TXT, &repo);
8668 git_add(E_TXT, &repo);
8669 git_add(DOTGITIGNORE, &repo);
8670 git_commit("Initial commit", &repo);
8671
8672 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8673
8674 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8675 tree.flush_fs_events(cx).await;
8676 project
8677 .update(cx, |project, cx| project.git_scans_complete(cx))
8678 .await;
8679 cx.executor().run_until_parked();
8680
8681 let repository = project.read_with(cx, |project, cx| {
8682 project.repositories(cx).values().next().unwrap().clone()
8683 });
8684
8685 // Check that the right git state is observed on startup
8686 repository.read_with(cx, |repository, _cx| {
8687 assert_eq!(
8688 repository.work_directory_abs_path.as_ref(),
8689 root_path.join("project").as_path()
8690 );
8691
8692 assert_eq!(
8693 repository
8694 .status_for_path(&repo_path(B_TXT))
8695 .unwrap()
8696 .status,
8697 FileStatus::Untracked,
8698 );
8699 assert_eq!(
8700 repository
8701 .status_for_path(&repo_path(F_TXT))
8702 .unwrap()
8703 .status,
8704 FileStatus::Untracked,
8705 );
8706 });
8707
8708 // Modify a file in the working copy.
8709 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8710 tree.flush_fs_events(cx).await;
8711 project
8712 .update(cx, |project, cx| project.git_scans_complete(cx))
8713 .await;
8714 cx.executor().run_until_parked();
8715
8716 // The worktree detects that the file's git status has changed.
8717 repository.read_with(cx, |repository, _| {
8718 assert_eq!(
8719 repository
8720 .status_for_path(&repo_path(A_TXT))
8721 .unwrap()
8722 .status,
8723 StatusCode::Modified.worktree(),
8724 );
8725 });
8726
8727 // Create a commit in the git repository.
8728 git_add(A_TXT, &repo);
8729 git_add(B_TXT, &repo);
8730 git_commit("Committing modified and added", &repo);
8731 tree.flush_fs_events(cx).await;
8732 project
8733 .update(cx, |project, cx| project.git_scans_complete(cx))
8734 .await;
8735 cx.executor().run_until_parked();
8736
8737 // The worktree detects that the files' git status have changed.
8738 repository.read_with(cx, |repository, _cx| {
8739 assert_eq!(
8740 repository
8741 .status_for_path(&repo_path(F_TXT))
8742 .unwrap()
8743 .status,
8744 FileStatus::Untracked,
8745 );
8746 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8747 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8748 });
8749
8750 // Modify files in the working copy and perform git operations on other files.
8751 git_reset(0, &repo);
8752 git_remove_index(Path::new(B_TXT), &repo);
8753 git_stash(&mut repo);
8754 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8755 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8756 tree.flush_fs_events(cx).await;
8757 project
8758 .update(cx, |project, cx| project.git_scans_complete(cx))
8759 .await;
8760 cx.executor().run_until_parked();
8761
8762 // Check that more complex repo changes are tracked
8763 repository.read_with(cx, |repository, _cx| {
8764 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8765 assert_eq!(
8766 repository
8767 .status_for_path(&repo_path(B_TXT))
8768 .unwrap()
8769 .status,
8770 FileStatus::Untracked,
8771 );
8772 assert_eq!(
8773 repository
8774 .status_for_path(&repo_path(E_TXT))
8775 .unwrap()
8776 .status,
8777 StatusCode::Modified.worktree(),
8778 );
8779 });
8780
8781 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8782 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8783 std::fs::write(
8784 work_dir.join(DOTGITIGNORE),
8785 [IGNORE_RULE, "f.txt"].join("\n"),
8786 )
8787 .unwrap();
8788
8789 git_add(Path::new(DOTGITIGNORE), &repo);
8790 git_commit("Committing modified git ignore", &repo);
8791
8792 tree.flush_fs_events(cx).await;
8793 cx.executor().run_until_parked();
8794
8795 let mut renamed_dir_name = "first_directory/second_directory";
8796 const RENAMED_FILE: &str = "rf.txt";
8797
8798 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8799 std::fs::write(
8800 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8801 "new-contents",
8802 )
8803 .unwrap();
8804
8805 tree.flush_fs_events(cx).await;
8806 project
8807 .update(cx, |project, cx| project.git_scans_complete(cx))
8808 .await;
8809 cx.executor().run_until_parked();
8810
8811 repository.read_with(cx, |repository, _cx| {
8812 assert_eq!(
8813 repository
8814 .status_for_path(
8815 &rel_path(renamed_dir_name)
8816 .join(rel_path(RENAMED_FILE))
8817 .into()
8818 )
8819 .unwrap()
8820 .status,
8821 FileStatus::Untracked,
8822 );
8823 });
8824
8825 renamed_dir_name = "new_first_directory/second_directory";
8826
8827 std::fs::rename(
8828 work_dir.join("first_directory"),
8829 work_dir.join("new_first_directory"),
8830 )
8831 .unwrap();
8832
8833 tree.flush_fs_events(cx).await;
8834 project
8835 .update(cx, |project, cx| project.git_scans_complete(cx))
8836 .await;
8837 cx.executor().run_until_parked();
8838
8839 repository.read_with(cx, |repository, _cx| {
8840 assert_eq!(
8841 repository
8842 .status_for_path(
8843 &rel_path(renamed_dir_name)
8844 .join(rel_path(RENAMED_FILE))
8845 .into()
8846 )
8847 .unwrap()
8848 .status,
8849 FileStatus::Untracked,
8850 );
8851 });
8852}
8853
8854#[gpui::test]
8855#[ignore]
8856async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8857 init_test(cx);
8858 cx.executor().allow_parking();
8859
8860 const IGNORE_RULE: &str = "**/target";
8861
8862 let root = TempTree::new(json!({
8863 "project": {
8864 "src": {
8865 "main.rs": "fn main() {}"
8866 },
8867 "target": {
8868 "debug": {
8869 "important_text.txt": "important text",
8870 },
8871 },
8872 ".gitignore": IGNORE_RULE
8873 },
8874
8875 }));
8876 let root_path = root.path();
8877
8878 // Set up git repository before creating the worktree.
8879 let work_dir = root.path().join("project");
8880 let repo = git_init(work_dir.as_path());
8881 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8882 git_add("src/main.rs", &repo);
8883 git_add(".gitignore", &repo);
8884 git_commit("Initial commit", &repo);
8885
8886 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8887 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8888 let project_events = Arc::new(Mutex::new(Vec::new()));
8889 project.update(cx, |project, cx| {
8890 let repo_events = repository_updates.clone();
8891 cx.subscribe(project.git_store(), move |_, _, e, _| {
8892 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8893 repo_events.lock().push(e.clone());
8894 }
8895 })
8896 .detach();
8897 let project_events = project_events.clone();
8898 cx.subscribe_self(move |_, e, _| {
8899 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8900 project_events.lock().extend(
8901 updates
8902 .iter()
8903 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8904 .filter(|(path, _)| path != "fs-event-sentinel"),
8905 );
8906 }
8907 })
8908 .detach();
8909 });
8910
8911 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8912 tree.flush_fs_events(cx).await;
8913 tree.update(cx, |tree, cx| {
8914 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
8915 })
8916 .await
8917 .unwrap();
8918 tree.update(cx, |tree, _| {
8919 assert_eq!(
8920 tree.entries(true, 0)
8921 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8922 .collect::<Vec<_>>(),
8923 vec![
8924 (rel_path(""), false),
8925 (rel_path("project/"), false),
8926 (rel_path("project/.gitignore"), false),
8927 (rel_path("project/src"), false),
8928 (rel_path("project/src/main.rs"), false),
8929 (rel_path("project/target"), true),
8930 (rel_path("project/target/debug"), true),
8931 (rel_path("project/target/debug/important_text.txt"), true),
8932 ]
8933 );
8934 });
8935
8936 assert_eq!(
8937 repository_updates.lock().drain(..).collect::<Vec<_>>(),
8938 vec![
8939 RepositoryEvent::Updated {
8940 full_scan: true,
8941 new_instance: false,
8942 },
8943 RepositoryEvent::MergeHeadsChanged,
8944 ],
8945 "Initial worktree scan should produce a repo update event"
8946 );
8947 assert_eq!(
8948 project_events.lock().drain(..).collect::<Vec<_>>(),
8949 vec![
8950 ("project/target".to_string(), PathChange::Loaded),
8951 ("project/target/debug".to_string(), PathChange::Loaded),
8952 (
8953 "project/target/debug/important_text.txt".to_string(),
8954 PathChange::Loaded
8955 ),
8956 ],
8957 "Initial project changes should show that all not-ignored and all opened files are loaded"
8958 );
8959
8960 let deps_dir = work_dir.join("target").join("debug").join("deps");
8961 std::fs::create_dir_all(&deps_dir).unwrap();
8962 tree.flush_fs_events(cx).await;
8963 project
8964 .update(cx, |project, cx| project.git_scans_complete(cx))
8965 .await;
8966 cx.executor().run_until_parked();
8967 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
8968 tree.flush_fs_events(cx).await;
8969 project
8970 .update(cx, |project, cx| project.git_scans_complete(cx))
8971 .await;
8972 cx.executor().run_until_parked();
8973 std::fs::remove_dir_all(&deps_dir).unwrap();
8974 tree.flush_fs_events(cx).await;
8975 project
8976 .update(cx, |project, cx| project.git_scans_complete(cx))
8977 .await;
8978 cx.executor().run_until_parked();
8979
8980 tree.update(cx, |tree, _| {
8981 assert_eq!(
8982 tree.entries(true, 0)
8983 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8984 .collect::<Vec<_>>(),
8985 vec![
8986 (rel_path(""), false),
8987 (rel_path("project/"), false),
8988 (rel_path("project/.gitignore"), false),
8989 (rel_path("project/src"), false),
8990 (rel_path("project/src/main.rs"), false),
8991 (rel_path("project/target"), true),
8992 (rel_path("project/target/debug"), true),
8993 (rel_path("project/target/debug/important_text.txt"), true),
8994 ],
8995 "No stray temp files should be left after the flycheck changes"
8996 );
8997 });
8998
8999 assert_eq!(
9000 repository_updates
9001 .lock()
9002 .iter()
9003 .filter(|update| !matches!(update, RepositoryEvent::PathsChanged))
9004 .cloned()
9005 .collect::<Vec<_>>(),
9006 Vec::new(),
9007 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9008 );
9009 assert_eq!(
9010 project_events.lock().as_slice(),
9011 vec![
9012 ("project/target/debug/deps".to_string(), PathChange::Added),
9013 ("project/target/debug/deps".to_string(), PathChange::Removed),
9014 ],
9015 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9016 No updates for more nested directories should happen as those are ignored",
9017 );
9018}
9019
9020#[gpui::test]
9021async fn test_odd_events_for_ignored_dirs(
9022 executor: BackgroundExecutor,
9023 cx: &mut gpui::TestAppContext,
9024) {
9025 init_test(cx);
9026 let fs = FakeFs::new(executor);
9027 fs.insert_tree(
9028 path!("/root"),
9029 json!({
9030 ".git": {},
9031 ".gitignore": "**/target/",
9032 "src": {
9033 "main.rs": "fn main() {}",
9034 },
9035 "target": {
9036 "debug": {
9037 "foo.txt": "foo",
9038 "deps": {}
9039 }
9040 }
9041 }),
9042 )
9043 .await;
9044 fs.set_head_and_index_for_repo(
9045 path!("/root/.git").as_ref(),
9046 &[
9047 (".gitignore", "**/target/".into()),
9048 ("src/main.rs", "fn main() {}".into()),
9049 ],
9050 );
9051
9052 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9053 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9054 let project_events = Arc::new(Mutex::new(Vec::new()));
9055 project.update(cx, |project, cx| {
9056 let repository_updates = repository_updates.clone();
9057 cx.subscribe(project.git_store(), move |_, _, e, _| {
9058 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9059 repository_updates.lock().push(e.clone());
9060 }
9061 })
9062 .detach();
9063 let project_events = project_events.clone();
9064 cx.subscribe_self(move |_, e, _| {
9065 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9066 project_events.lock().extend(
9067 updates
9068 .iter()
9069 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9070 .filter(|(path, _)| path != "fs-event-sentinel"),
9071 );
9072 }
9073 })
9074 .detach();
9075 });
9076
9077 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9078 tree.update(cx, |tree, cx| {
9079 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9080 })
9081 .await
9082 .unwrap();
9083 tree.flush_fs_events(cx).await;
9084 project
9085 .update(cx, |project, cx| project.git_scans_complete(cx))
9086 .await;
9087 cx.run_until_parked();
9088 tree.update(cx, |tree, _| {
9089 assert_eq!(
9090 tree.entries(true, 0)
9091 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9092 .collect::<Vec<_>>(),
9093 vec![
9094 (rel_path(""), false),
9095 (rel_path(".gitignore"), false),
9096 (rel_path("src"), false),
9097 (rel_path("src/main.rs"), false),
9098 (rel_path("target"), true),
9099 (rel_path("target/debug"), true),
9100 (rel_path("target/debug/deps"), true),
9101 (rel_path("target/debug/foo.txt"), true),
9102 ]
9103 );
9104 });
9105
9106 assert_eq!(
9107 repository_updates
9108 .lock()
9109 .drain(..)
9110 .filter(|update| !matches!(update, RepositoryEvent::PathsChanged))
9111 .collect::<Vec<_>>(),
9112 vec![
9113 RepositoryEvent::Updated {
9114 full_scan: true,
9115 new_instance: false,
9116 },
9117 RepositoryEvent::MergeHeadsChanged,
9118 ],
9119 "Initial worktree scan should produce a repo update event"
9120 );
9121 assert_eq!(
9122 project_events.lock().drain(..).collect::<Vec<_>>(),
9123 vec![
9124 ("target".to_string(), PathChange::Loaded),
9125 ("target/debug".to_string(), PathChange::Loaded),
9126 ("target/debug/deps".to_string(), PathChange::Loaded),
9127 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9128 ],
9129 "All non-ignored entries and all opened firs should be getting a project event",
9130 );
9131
9132 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9133 // This may happen multiple times during a single flycheck, but once is enough for testing.
9134 fs.emit_fs_event("/root/target/debug/deps", None);
9135 tree.flush_fs_events(cx).await;
9136 project
9137 .update(cx, |project, cx| project.git_scans_complete(cx))
9138 .await;
9139 cx.executor().run_until_parked();
9140
9141 assert_eq!(
9142 repository_updates
9143 .lock()
9144 .iter()
9145 .filter(|update| !matches!(update, RepositoryEvent::PathsChanged))
9146 .cloned()
9147 .collect::<Vec<_>>(),
9148 Vec::new(),
9149 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9150 );
9151 assert_eq!(
9152 project_events.lock().as_slice(),
9153 Vec::new(),
9154 "No further project events should happen, as only ignored dirs received FS events",
9155 );
9156}
9157
9158#[gpui::test]
9159async fn test_repos_in_invisible_worktrees(
9160 executor: BackgroundExecutor,
9161 cx: &mut gpui::TestAppContext,
9162) {
9163 init_test(cx);
9164 let fs = FakeFs::new(executor);
9165 fs.insert_tree(
9166 path!("/root"),
9167 json!({
9168 "dir1": {
9169 ".git": {},
9170 "dep1": {
9171 ".git": {},
9172 "src": {
9173 "a.txt": "",
9174 },
9175 },
9176 "b.txt": "",
9177 },
9178 }),
9179 )
9180 .await;
9181
9182 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9183 let _visible_worktree =
9184 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9185 project
9186 .update(cx, |project, cx| project.git_scans_complete(cx))
9187 .await;
9188
9189 let repos = project.read_with(cx, |project, cx| {
9190 project
9191 .repositories(cx)
9192 .values()
9193 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9194 .collect::<Vec<_>>()
9195 });
9196 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9197
9198 let (_invisible_worktree, _) = project
9199 .update(cx, |project, cx| {
9200 project.worktree_store.update(cx, |worktree_store, cx| {
9201 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9202 })
9203 })
9204 .await
9205 .expect("failed to create worktree");
9206 project
9207 .update(cx, |project, cx| project.git_scans_complete(cx))
9208 .await;
9209
9210 let repos = project.read_with(cx, |project, cx| {
9211 project
9212 .repositories(cx)
9213 .values()
9214 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9215 .collect::<Vec<_>>()
9216 });
9217 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9218}
9219
9220#[gpui::test(iterations = 10)]
9221async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9222 init_test(cx);
9223 cx.update(|cx| {
9224 cx.update_global::<SettingsStore, _>(|store, cx| {
9225 store.update_user_settings(cx, |settings| {
9226 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9227 });
9228 });
9229 });
9230 let fs = FakeFs::new(cx.background_executor.clone());
9231 fs.insert_tree(
9232 path!("/root"),
9233 json!({
9234 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9235 "tree": {
9236 ".git": {},
9237 ".gitignore": "ignored-dir\n",
9238 "tracked-dir": {
9239 "tracked-file1": "",
9240 "ancestor-ignored-file1": "",
9241 },
9242 "ignored-dir": {
9243 "ignored-file1": ""
9244 }
9245 }
9246 }),
9247 )
9248 .await;
9249 fs.set_head_and_index_for_repo(
9250 path!("/root/tree/.git").as_ref(),
9251 &[
9252 (".gitignore", "ignored-dir\n".into()),
9253 ("tracked-dir/tracked-file1", "".into()),
9254 ],
9255 );
9256
9257 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9258
9259 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9260 tree.flush_fs_events(cx).await;
9261 project
9262 .update(cx, |project, cx| project.git_scans_complete(cx))
9263 .await;
9264 cx.executor().run_until_parked();
9265
9266 let repository = project.read_with(cx, |project, cx| {
9267 project.repositories(cx).values().next().unwrap().clone()
9268 });
9269
9270 tree.read_with(cx, |tree, _| {
9271 tree.as_local()
9272 .unwrap()
9273 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9274 })
9275 .recv()
9276 .await;
9277
9278 cx.read(|cx| {
9279 assert_entry_git_state(
9280 tree.read(cx),
9281 repository.read(cx),
9282 "tracked-dir/tracked-file1",
9283 None,
9284 false,
9285 );
9286 assert_entry_git_state(
9287 tree.read(cx),
9288 repository.read(cx),
9289 "tracked-dir/ancestor-ignored-file1",
9290 None,
9291 false,
9292 );
9293 assert_entry_git_state(
9294 tree.read(cx),
9295 repository.read(cx),
9296 "ignored-dir/ignored-file1",
9297 None,
9298 true,
9299 );
9300 });
9301
9302 fs.create_file(
9303 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9304 Default::default(),
9305 )
9306 .await
9307 .unwrap();
9308 fs.set_index_for_repo(
9309 path!("/root/tree/.git").as_ref(),
9310 &[
9311 (".gitignore", "ignored-dir\n".into()),
9312 ("tracked-dir/tracked-file1", "".into()),
9313 ("tracked-dir/tracked-file2", "".into()),
9314 ],
9315 );
9316 fs.create_file(
9317 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9318 Default::default(),
9319 )
9320 .await
9321 .unwrap();
9322 fs.create_file(
9323 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9324 Default::default(),
9325 )
9326 .await
9327 .unwrap();
9328
9329 cx.executor().run_until_parked();
9330 cx.read(|cx| {
9331 assert_entry_git_state(
9332 tree.read(cx),
9333 repository.read(cx),
9334 "tracked-dir/tracked-file2",
9335 Some(StatusCode::Added),
9336 false,
9337 );
9338 assert_entry_git_state(
9339 tree.read(cx),
9340 repository.read(cx),
9341 "tracked-dir/ancestor-ignored-file2",
9342 None,
9343 false,
9344 );
9345 assert_entry_git_state(
9346 tree.read(cx),
9347 repository.read(cx),
9348 "ignored-dir/ignored-file2",
9349 None,
9350 true,
9351 );
9352 assert!(
9353 tree.read(cx)
9354 .entry_for_path(&rel_path(".git"))
9355 .unwrap()
9356 .is_ignored
9357 );
9358 });
9359}
9360
9361#[gpui::test]
9362async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9363 init_test(cx);
9364
9365 let fs = FakeFs::new(cx.executor());
9366 fs.insert_tree(
9367 path!("/project"),
9368 json!({
9369 ".git": {
9370 "worktrees": {
9371 "some-worktree": {
9372 "commondir": "../..\n",
9373 // For is_git_dir
9374 "HEAD": "",
9375 "config": ""
9376 }
9377 },
9378 "modules": {
9379 "subdir": {
9380 "some-submodule": {
9381 // For is_git_dir
9382 "HEAD": "",
9383 "config": "",
9384 }
9385 }
9386 }
9387 },
9388 "src": {
9389 "a.txt": "A",
9390 },
9391 "some-worktree": {
9392 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9393 "src": {
9394 "b.txt": "B",
9395 }
9396 },
9397 "subdir": {
9398 "some-submodule": {
9399 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9400 "c.txt": "C",
9401 }
9402 }
9403 }),
9404 )
9405 .await;
9406
9407 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9408 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9409 scan_complete.await;
9410
9411 let mut repositories = project.update(cx, |project, cx| {
9412 project
9413 .repositories(cx)
9414 .values()
9415 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9416 .collect::<Vec<_>>()
9417 });
9418 repositories.sort();
9419 pretty_assertions::assert_eq!(
9420 repositories,
9421 [
9422 Path::new(path!("/project")).into(),
9423 Path::new(path!("/project/some-worktree")).into(),
9424 Path::new(path!("/project/subdir/some-submodule")).into(),
9425 ]
9426 );
9427
9428 // Generate a git-related event for the worktree and check that it's refreshed.
9429 fs.with_git_state(
9430 path!("/project/some-worktree/.git").as_ref(),
9431 true,
9432 |state| {
9433 state
9434 .head_contents
9435 .insert(repo_path("src/b.txt"), "b".to_owned());
9436 state
9437 .index_contents
9438 .insert(repo_path("src/b.txt"), "b".to_owned());
9439 },
9440 )
9441 .unwrap();
9442 cx.run_until_parked();
9443
9444 let buffer = project
9445 .update(cx, |project, cx| {
9446 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9447 })
9448 .await
9449 .unwrap();
9450 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9451 let (repo, _) = project
9452 .git_store()
9453 .read(cx)
9454 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9455 .unwrap();
9456 pretty_assertions::assert_eq!(
9457 repo.read(cx).work_directory_abs_path,
9458 Path::new(path!("/project/some-worktree")).into(),
9459 );
9460 let barrier = repo.update(cx, |repo, _| repo.barrier());
9461 (repo.clone(), barrier)
9462 });
9463 barrier.await.unwrap();
9464 worktree_repo.update(cx, |repo, _| {
9465 pretty_assertions::assert_eq!(
9466 repo.status_for_path(&repo_path("src/b.txt"))
9467 .unwrap()
9468 .status,
9469 StatusCode::Modified.worktree(),
9470 );
9471 });
9472
9473 // The same for the submodule.
9474 fs.with_git_state(
9475 path!("/project/subdir/some-submodule/.git").as_ref(),
9476 true,
9477 |state| {
9478 state
9479 .head_contents
9480 .insert(repo_path("c.txt"), "c".to_owned());
9481 state
9482 .index_contents
9483 .insert(repo_path("c.txt"), "c".to_owned());
9484 },
9485 )
9486 .unwrap();
9487 cx.run_until_parked();
9488
9489 let buffer = project
9490 .update(cx, |project, cx| {
9491 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9492 })
9493 .await
9494 .unwrap();
9495 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9496 let (repo, _) = project
9497 .git_store()
9498 .read(cx)
9499 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9500 .unwrap();
9501 pretty_assertions::assert_eq!(
9502 repo.read(cx).work_directory_abs_path,
9503 Path::new(path!("/project/subdir/some-submodule")).into(),
9504 );
9505 let barrier = repo.update(cx, |repo, _| repo.barrier());
9506 (repo.clone(), barrier)
9507 });
9508 barrier.await.unwrap();
9509 submodule_repo.update(cx, |repo, _| {
9510 pretty_assertions::assert_eq!(
9511 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9512 StatusCode::Modified.worktree(),
9513 );
9514 });
9515}
9516
9517#[gpui::test]
9518async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9519 init_test(cx);
9520 let fs = FakeFs::new(cx.background_executor.clone());
9521 fs.insert_tree(
9522 path!("/root"),
9523 json!({
9524 "project": {
9525 ".git": {},
9526 "child1": {
9527 "a.txt": "A",
9528 },
9529 "child2": {
9530 "b.txt": "B",
9531 }
9532 }
9533 }),
9534 )
9535 .await;
9536
9537 let project = Project::test(
9538 fs.clone(),
9539 [
9540 path!("/root/project/child1").as_ref(),
9541 path!("/root/project/child2").as_ref(),
9542 ],
9543 cx,
9544 )
9545 .await;
9546
9547 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9548 tree.flush_fs_events(cx).await;
9549 project
9550 .update(cx, |project, cx| project.git_scans_complete(cx))
9551 .await;
9552 cx.executor().run_until_parked();
9553
9554 let repos = project.read_with(cx, |project, cx| {
9555 project
9556 .repositories(cx)
9557 .values()
9558 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9559 .collect::<Vec<_>>()
9560 });
9561 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9562}
9563
9564async fn search(
9565 project: &Entity<Project>,
9566 query: SearchQuery,
9567 cx: &mut gpui::TestAppContext,
9568) -> Result<HashMap<String, Vec<Range<usize>>>> {
9569 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9570 let mut results = HashMap::default();
9571 while let Ok(search_result) = search_rx.recv().await {
9572 match search_result {
9573 SearchResult::Buffer { buffer, ranges } => {
9574 results.entry(buffer).or_insert(ranges);
9575 }
9576 SearchResult::LimitReached => {}
9577 }
9578 }
9579 Ok(results
9580 .into_iter()
9581 .map(|(buffer, ranges)| {
9582 buffer.update(cx, |buffer, cx| {
9583 let path = buffer
9584 .file()
9585 .unwrap()
9586 .full_path(cx)
9587 .to_string_lossy()
9588 .to_string();
9589 let ranges = ranges
9590 .into_iter()
9591 .map(|range| range.to_offset(buffer))
9592 .collect::<Vec<_>>();
9593 (path, ranges)
9594 })
9595 })
9596 .collect())
9597}
9598
9599pub fn init_test(cx: &mut gpui::TestAppContext) {
9600 zlog::init_test();
9601
9602 cx.update(|cx| {
9603 let settings_store = SettingsStore::test(cx);
9604 cx.set_global(settings_store);
9605 release_channel::init(SemanticVersion::default(), cx);
9606 language::init(cx);
9607 Project::init_settings(cx);
9608 });
9609}
9610
9611fn json_lang() -> Arc<Language> {
9612 Arc::new(Language::new(
9613 LanguageConfig {
9614 name: "JSON".into(),
9615 matcher: LanguageMatcher {
9616 path_suffixes: vec!["json".to_string()],
9617 ..Default::default()
9618 },
9619 ..Default::default()
9620 },
9621 None,
9622 ))
9623}
9624
9625fn js_lang() -> Arc<Language> {
9626 Arc::new(Language::new(
9627 LanguageConfig {
9628 name: "JavaScript".into(),
9629 matcher: LanguageMatcher {
9630 path_suffixes: vec!["js".to_string()],
9631 ..Default::default()
9632 },
9633 ..Default::default()
9634 },
9635 None,
9636 ))
9637}
9638
9639fn rust_lang() -> Arc<Language> {
9640 Arc::new(Language::new(
9641 LanguageConfig {
9642 name: "Rust".into(),
9643 matcher: LanguageMatcher {
9644 path_suffixes: vec!["rs".to_string()],
9645 ..Default::default()
9646 },
9647 ..Default::default()
9648 },
9649 Some(tree_sitter_rust::LANGUAGE.into()),
9650 ))
9651}
9652
9653fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9654 struct PythonMootToolchainLister(Arc<FakeFs>);
9655 #[async_trait]
9656 impl ToolchainLister for PythonMootToolchainLister {
9657 async fn list(
9658 &self,
9659 worktree_root: PathBuf,
9660 subroot_relative_path: Arc<RelPath>,
9661 _: Option<HashMap<String, String>>,
9662 _: &dyn Fs,
9663 ) -> ToolchainList {
9664 // This lister will always return a path .venv directories within ancestors
9665 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9666 let mut toolchains = vec![];
9667 for ancestor in ancestors {
9668 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9669 if self.0.is_dir(&venv_path).await {
9670 toolchains.push(Toolchain {
9671 name: SharedString::new("Python Venv"),
9672 path: venv_path.to_string_lossy().into_owned().into(),
9673 language_name: LanguageName(SharedString::new_static("Python")),
9674 as_json: serde_json::Value::Null,
9675 })
9676 }
9677 }
9678 ToolchainList {
9679 toolchains,
9680 ..Default::default()
9681 }
9682 }
9683 async fn resolve(
9684 &self,
9685 _: PathBuf,
9686 _: Option<HashMap<String, String>>,
9687 _: &dyn Fs,
9688 ) -> anyhow::Result<Toolchain> {
9689 Err(anyhow::anyhow!("Not implemented"))
9690 }
9691 fn meta(&self) -> ToolchainMetadata {
9692 ToolchainMetadata {
9693 term: SharedString::new_static("Virtual Environment"),
9694 new_toolchain_placeholder: SharedString::new_static(
9695 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9696 ),
9697 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9698 }
9699 }
9700 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9701 vec![]
9702 }
9703 }
9704 Arc::new(
9705 Language::new(
9706 LanguageConfig {
9707 name: "Python".into(),
9708 matcher: LanguageMatcher {
9709 path_suffixes: vec!["py".to_string()],
9710 ..Default::default()
9711 },
9712 ..Default::default()
9713 },
9714 None, // We're not testing Python parsing with this language.
9715 )
9716 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9717 "pyproject.toml",
9718 ))))
9719 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9720 )
9721}
9722
9723fn typescript_lang() -> Arc<Language> {
9724 Arc::new(Language::new(
9725 LanguageConfig {
9726 name: "TypeScript".into(),
9727 matcher: LanguageMatcher {
9728 path_suffixes: vec!["ts".to_string()],
9729 ..Default::default()
9730 },
9731 ..Default::default()
9732 },
9733 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9734 ))
9735}
9736
9737fn tsx_lang() -> Arc<Language> {
9738 Arc::new(Language::new(
9739 LanguageConfig {
9740 name: "tsx".into(),
9741 matcher: LanguageMatcher {
9742 path_suffixes: vec!["tsx".to_string()],
9743 ..Default::default()
9744 },
9745 ..Default::default()
9746 },
9747 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9748 ))
9749}
9750
9751fn get_all_tasks(
9752 project: &Entity<Project>,
9753 task_contexts: Arc<TaskContexts>,
9754 cx: &mut App,
9755) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9756 let new_tasks = project.update(cx, |project, cx| {
9757 project.task_store.update(cx, |task_store, cx| {
9758 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9759 this.used_and_current_resolved_tasks(task_contexts, cx)
9760 })
9761 })
9762 });
9763
9764 cx.background_spawn(async move {
9765 let (mut old, new) = new_tasks.await;
9766 old.extend(new);
9767 old
9768 })
9769}
9770
9771#[track_caller]
9772fn assert_entry_git_state(
9773 tree: &Worktree,
9774 repository: &Repository,
9775 path: &str,
9776 index_status: Option<StatusCode>,
9777 is_ignored: bool,
9778) {
9779 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9780 let entry = tree
9781 .entry_for_path(&rel_path(path))
9782 .unwrap_or_else(|| panic!("entry {path} not found"));
9783 let status = repository
9784 .status_for_path(&repo_path(path))
9785 .map(|entry| entry.status);
9786 let expected = index_status.map(|index_status| {
9787 TrackedStatus {
9788 index_status,
9789 worktree_status: StatusCode::Unmodified,
9790 }
9791 .into()
9792 });
9793 assert_eq!(
9794 status, expected,
9795 "expected {path} to have git status: {expected:?}"
9796 );
9797 assert_eq!(
9798 entry.is_ignored, is_ignored,
9799 "expected {path} to have is_ignored: {is_ignored}"
9800 );
9801}
9802
9803#[track_caller]
9804fn git_init(path: &Path) -> git2::Repository {
9805 let mut init_opts = RepositoryInitOptions::new();
9806 init_opts.initial_head("main");
9807 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9808}
9809
9810#[track_caller]
9811fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9812 let path = path.as_ref();
9813 let mut index = repo.index().expect("Failed to get index");
9814 index.add_path(path).expect("Failed to add file");
9815 index.write().expect("Failed to write index");
9816}
9817
9818#[track_caller]
9819fn git_remove_index(path: &Path, repo: &git2::Repository) {
9820 let mut index = repo.index().expect("Failed to get index");
9821 index.remove_path(path).expect("Failed to add file");
9822 index.write().expect("Failed to write index");
9823}
9824
9825#[track_caller]
9826fn git_commit(msg: &'static str, repo: &git2::Repository) {
9827 use git2::Signature;
9828
9829 let signature = Signature::now("test", "test@zed.dev").unwrap();
9830 let oid = repo.index().unwrap().write_tree().unwrap();
9831 let tree = repo.find_tree(oid).unwrap();
9832 if let Ok(head) = repo.head() {
9833 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9834
9835 let parent_commit = parent_obj.as_commit().unwrap();
9836
9837 repo.commit(
9838 Some("HEAD"),
9839 &signature,
9840 &signature,
9841 msg,
9842 &tree,
9843 &[parent_commit],
9844 )
9845 .expect("Failed to commit with parent");
9846 } else {
9847 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9848 .expect("Failed to commit");
9849 }
9850}
9851
9852#[cfg(any())]
9853#[track_caller]
9854fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9855 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9856}
9857
9858#[track_caller]
9859fn git_stash(repo: &mut git2::Repository) {
9860 use git2::Signature;
9861
9862 let signature = Signature::now("test", "test@zed.dev").unwrap();
9863 repo.stash_save(&signature, "N/A", None)
9864 .expect("Failed to stash");
9865}
9866
9867#[track_caller]
9868fn git_reset(offset: usize, repo: &git2::Repository) {
9869 let head = repo.head().expect("Couldn't get repo head");
9870 let object = head.peel(git2::ObjectType::Commit).unwrap();
9871 let commit = object.as_commit().unwrap();
9872 let new_head = commit
9873 .parents()
9874 .inspect(|parnet| {
9875 parnet.message();
9876 })
9877 .nth(offset)
9878 .expect("Not enough history");
9879 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9880 .expect("Could not reset");
9881}
9882
9883#[cfg(any())]
9884#[track_caller]
9885fn git_branch(name: &str, repo: &git2::Repository) {
9886 let head = repo
9887 .head()
9888 .expect("Couldn't get repo head")
9889 .peel_to_commit()
9890 .expect("HEAD is not a commit");
9891 repo.branch(name, &head, false).expect("Failed to commit");
9892}
9893
9894#[cfg(any())]
9895#[track_caller]
9896fn git_checkout(name: &str, repo: &git2::Repository) {
9897 repo.set_head(name).expect("Failed to set head");
9898 repo.checkout_head(None).expect("Failed to check out head");
9899}
9900
9901#[cfg(any())]
9902#[track_caller]
9903fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9904 repo.statuses(None)
9905 .unwrap()
9906 .iter()
9907 .map(|status| (status.path().unwrap().to_string(), status.status()))
9908 .collect()
9909}
9910
9911#[gpui::test]
9912async fn test_find_project_path_abs(
9913 background_executor: BackgroundExecutor,
9914 cx: &mut gpui::TestAppContext,
9915) {
9916 // find_project_path should work with absolute paths
9917 init_test(cx);
9918
9919 let fs = FakeFs::new(background_executor);
9920 fs.insert_tree(
9921 path!("/root"),
9922 json!({
9923 "project1": {
9924 "file1.txt": "content1",
9925 "subdir": {
9926 "file2.txt": "content2"
9927 }
9928 },
9929 "project2": {
9930 "file3.txt": "content3"
9931 }
9932 }),
9933 )
9934 .await;
9935
9936 let project = Project::test(
9937 fs.clone(),
9938 [
9939 path!("/root/project1").as_ref(),
9940 path!("/root/project2").as_ref(),
9941 ],
9942 cx,
9943 )
9944 .await;
9945
9946 // Make sure the worktrees are fully initialized
9947 project
9948 .update(cx, |project, cx| project.git_scans_complete(cx))
9949 .await;
9950 cx.run_until_parked();
9951
9952 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9953 project.read_with(cx, |project, cx| {
9954 let worktrees: Vec<_> = project.worktrees(cx).collect();
9955 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9956 let id1 = worktrees[0].read(cx).id();
9957 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9958 let id2 = worktrees[1].read(cx).id();
9959 (abs_path1, id1, abs_path2, id2)
9960 });
9961
9962 project.update(cx, |project, cx| {
9963 let abs_path = project1_abs_path.join("file1.txt");
9964 let found_path = project.find_project_path(abs_path, cx).unwrap();
9965 assert_eq!(found_path.worktree_id, project1_id);
9966 assert_eq!(&*found_path.path, rel_path("file1.txt"));
9967
9968 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9969 let found_path = project.find_project_path(abs_path, cx).unwrap();
9970 assert_eq!(found_path.worktree_id, project1_id);
9971 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
9972
9973 let abs_path = project2_abs_path.join("file3.txt");
9974 let found_path = project.find_project_path(abs_path, cx).unwrap();
9975 assert_eq!(found_path.worktree_id, project2_id);
9976 assert_eq!(&*found_path.path, rel_path("file3.txt"));
9977
9978 let abs_path = project1_abs_path.join("nonexistent.txt");
9979 let found_path = project.find_project_path(abs_path, cx);
9980 assert!(
9981 found_path.is_some(),
9982 "Should find project path for nonexistent file in worktree"
9983 );
9984
9985 // Test with an absolute path outside any worktree
9986 let abs_path = Path::new("/some/other/path");
9987 let found_path = project.find_project_path(abs_path, cx);
9988 assert!(
9989 found_path.is_none(),
9990 "Should not find project path for path outside any worktree"
9991 );
9992 });
9993}