1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .background_executor()
214 .block(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250 cx.update(|cx| {
251 GitHostingProviderRegistry::default_global(cx);
252 git_hosting_providers::init(cx);
253 });
254
255 let fs = FakeFs::new(cx.executor());
256 let str_path = path!("/dir");
257 let path = Path::new(str_path);
258
259 fs.insert_tree(
260 path!("/dir"),
261 json!({
262 ".zed": {
263 "settings.json": r#"{
264 "git_hosting_providers": [
265 {
266 "provider": "gitlab",
267 "base_url": "https://google.com",
268 "name": "foo"
269 }
270 ]
271 }"#
272 },
273 }),
274 )
275 .await;
276
277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
278 let (_worktree, _) =
279 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
280 cx.executor().run_until_parked();
281
282 cx.update(|cx| {
283 let provider = GitHostingProviderRegistry::global(cx);
284 assert!(
285 provider
286 .list_hosting_providers()
287 .into_iter()
288 .any(|provider| provider.name() == "foo")
289 );
290 });
291
292 fs.atomic_write(
293 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
294 "{}".into(),
295 )
296 .await
297 .unwrap();
298
299 cx.run_until_parked();
300
301 cx.update(|cx| {
302 let provider = GitHostingProviderRegistry::global(cx);
303 assert!(
304 !provider
305 .list_hosting_providers()
306 .into_iter()
307 .any(|provider| provider.name() == "foo")
308 );
309 });
310}
311
312#[gpui::test]
313async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315 TaskStore::init(None);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 path!("/dir"),
320 json!({
321 ".zed": {
322 "settings.json": r#"{ "tab_size": 8 }"#,
323 "tasks.json": r#"[{
324 "label": "cargo check all",
325 "command": "cargo",
326 "args": ["check", "--all"]
327 },]"#,
328 },
329 "a": {
330 "a.rs": "fn a() {\n A\n}"
331 },
332 "b": {
333 ".zed": {
334 "settings.json": r#"{ "tab_size": 2 }"#,
335 "tasks.json": r#"[{
336 "label": "cargo check",
337 "command": "cargo",
338 "args": ["check"]
339 },]"#,
340 },
341 "b.rs": "fn b() {\n B\n}"
342 }
343 }),
344 )
345 .await;
346
347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
348 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
349
350 cx.executor().run_until_parked();
351 let worktree_id = cx.update(|cx| {
352 project.update(cx, |project, cx| {
353 project.worktrees(cx).next().unwrap().read(cx).id()
354 })
355 });
356
357 let mut task_contexts = TaskContexts::default();
358 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
359 let task_contexts = Arc::new(task_contexts);
360
361 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
362 id: worktree_id,
363 directory_in_worktree: rel_path(".zed").into(),
364 id_base: "local worktree tasks from directory \".zed\"".into(),
365 };
366
367 let all_tasks = cx
368 .update(|cx| {
369 let tree = worktree.read(cx);
370
371 let file_a = File::for_entry(
372 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
373 worktree.clone(),
374 ) as _;
375 let settings_a = language_settings(None, Some(&file_a), cx);
376 let file_b = File::for_entry(
377 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
378 worktree.clone(),
379 ) as _;
380 let settings_b = language_settings(None, Some(&file_b), cx);
381
382 assert_eq!(settings_a.tab_size.get(), 8);
383 assert_eq!(settings_b.tab_size.get(), 2);
384
385 get_all_tasks(&project, task_contexts.clone(), cx)
386 })
387 .await
388 .into_iter()
389 .map(|(source_kind, task)| {
390 let resolved = task.resolved;
391 (
392 source_kind,
393 task.resolved_label,
394 resolved.args,
395 resolved.env,
396 )
397 })
398 .collect::<Vec<_>>();
399 assert_eq!(
400 all_tasks,
401 vec![
402 (
403 TaskSourceKind::Worktree {
404 id: worktree_id,
405 directory_in_worktree: rel_path("b/.zed").into(),
406 id_base: "local worktree tasks from directory \"b/.zed\"".into()
407 },
408 "cargo check".to_string(),
409 vec!["check".to_string()],
410 HashMap::default(),
411 ),
412 (
413 topmost_local_task_source_kind.clone(),
414 "cargo check all".to_string(),
415 vec!["check".to_string(), "--all".to_string()],
416 HashMap::default(),
417 ),
418 ]
419 );
420
421 let (_, resolved_task) = cx
422 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
423 .await
424 .into_iter()
425 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
426 .expect("should have one global task");
427 project.update(cx, |project, cx| {
428 let task_inventory = project
429 .task_store
430 .read(cx)
431 .task_inventory()
432 .cloned()
433 .unwrap();
434 task_inventory.update(cx, |inventory, _| {
435 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
436 inventory
437 .update_file_based_tasks(
438 TaskSettingsLocation::Global(tasks_file()),
439 Some(
440 &json!([{
441 "label": "cargo check unstable",
442 "command": "cargo",
443 "args": [
444 "check",
445 "--all",
446 "--all-targets"
447 ],
448 "env": {
449 "RUSTFLAGS": "-Zunstable-options"
450 }
451 }])
452 .to_string(),
453 ),
454 )
455 .unwrap();
456 });
457 });
458 cx.run_until_parked();
459
460 let all_tasks = cx
461 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
462 .await
463 .into_iter()
464 .map(|(source_kind, task)| {
465 let resolved = task.resolved;
466 (
467 source_kind,
468 task.resolved_label,
469 resolved.args,
470 resolved.env,
471 )
472 })
473 .collect::<Vec<_>>();
474 assert_eq!(
475 all_tasks,
476 vec![
477 (
478 topmost_local_task_source_kind.clone(),
479 "cargo check all".to_string(),
480 vec!["check".to_string(), "--all".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: rel_path("b/.zed").into(),
487 id_base: "local worktree tasks from directory \"b/.zed\"".into()
488 },
489 "cargo check".to_string(),
490 vec!["check".to_string()],
491 HashMap::default(),
492 ),
493 (
494 TaskSourceKind::AbsPath {
495 abs_path: paths::tasks_file().clone(),
496 id_base: "global tasks.json".into(),
497 },
498 "cargo check unstable".to_string(),
499 vec![
500 "check".to_string(),
501 "--all".to_string(),
502 "--all-targets".to_string(),
503 ],
504 HashMap::from_iter(Some((
505 "RUSTFLAGS".to_string(),
506 "-Zunstable-options".to_string()
507 ))),
508 ),
509 ]
510 );
511}
512
513#[gpui::test]
514async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
515 init_test(cx);
516 TaskStore::init(None);
517
518 let fs = FakeFs::new(cx.executor());
519 fs.insert_tree(
520 path!("/dir"),
521 json!({
522 ".zed": {
523 "tasks.json": r#"[{
524 "label": "test worktree root",
525 "command": "echo $ZED_WORKTREE_ROOT"
526 }]"#,
527 },
528 "a": {
529 "a.rs": "fn a() {\n A\n}"
530 },
531 }),
532 )
533 .await;
534
535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
536 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
537
538 cx.executor().run_until_parked();
539 let worktree_id = cx.update(|cx| {
540 project.update(cx, |project, cx| {
541 project.worktrees(cx).next().unwrap().read(cx).id()
542 })
543 });
544
545 let active_non_worktree_item_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: None,
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 }),
556 cx,
557 )
558 })
559 .await;
560 assert!(
561 active_non_worktree_item_tasks.is_empty(),
562 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
563 );
564
565 let active_worktree_tasks = cx
566 .update(|cx| {
567 get_all_tasks(
568 &project,
569 Arc::new(TaskContexts {
570 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
571 active_worktree_context: Some((worktree_id, {
572 let mut worktree_context = TaskContext::default();
573 worktree_context
574 .task_variables
575 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
576 worktree_context
577 })),
578 other_worktree_contexts: Vec::new(),
579 lsp_task_sources: HashMap::default(),
580 latest_selection: None,
581 }),
582 cx,
583 )
584 })
585 .await;
586 assert_eq!(
587 active_worktree_tasks
588 .into_iter()
589 .map(|(source_kind, task)| {
590 let resolved = task.resolved;
591 (source_kind, resolved.command.unwrap())
592 })
593 .collect::<Vec<_>>(),
594 vec![(
595 TaskSourceKind::Worktree {
596 id: worktree_id,
597 directory_in_worktree: rel_path(".zed").into(),
598 id_base: "local worktree tasks from directory \".zed\"".into(),
599 },
600 "echo /dir".to_string(),
601 )]
602 );
603}
604
605#[gpui::test]
606async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
607 cx: &mut gpui::TestAppContext,
608) {
609 pub(crate) struct PyprojectTomlManifestProvider;
610
611 impl ManifestProvider for PyprojectTomlManifestProvider {
612 fn name(&self) -> ManifestName {
613 SharedString::new_static("pyproject.toml").into()
614 }
615
616 fn search(
617 &self,
618 ManifestQuery {
619 path,
620 depth,
621 delegate,
622 }: ManifestQuery,
623 ) -> Option<Arc<RelPath>> {
624 for path in path.ancestors().take(depth) {
625 let p = path.join(rel_path("pyproject.toml"));
626 if delegate.exists(&p, Some(false)) {
627 return Some(path.into());
628 }
629 }
630
631 None
632 }
633 }
634
635 init_test(cx);
636 let fs = FakeFs::new(cx.executor());
637
638 fs.insert_tree(
639 path!("/the-root"),
640 json!({
641 ".zed": {
642 "settings.json": r#"
643 {
644 "languages": {
645 "Python": {
646 "language_servers": ["ty"]
647 }
648 }
649 }"#
650 },
651 "project-a": {
652 ".venv": {},
653 "file.py": "",
654 "pyproject.toml": ""
655 },
656 "project-b": {
657 ".venv": {},
658 "source_file.py":"",
659 "another_file.py": "",
660 "pyproject.toml": ""
661 }
662 }),
663 )
664 .await;
665 cx.update(|cx| {
666 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
667 });
668
669 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
671 let _fake_python_server = language_registry.register_fake_lsp(
672 "Python",
673 FakeLspAdapter {
674 name: "ty",
675 capabilities: lsp::ServerCapabilities {
676 ..Default::default()
677 },
678 ..Default::default()
679 },
680 );
681
682 language_registry.add(python_lang(fs.clone()));
683 let (first_buffer, _handle) = project
684 .update(cx, |project, cx| {
685 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
686 })
687 .await
688 .unwrap();
689 cx.executor().run_until_parked();
690 let servers = project.update(cx, |project, cx| {
691 project.lsp_store.update(cx, |this, cx| {
692 first_buffer.update(cx, |buffer, cx| {
693 this.language_servers_for_local_buffer(buffer, cx)
694 .map(|(adapter, server)| (adapter.clone(), server.clone()))
695 .collect::<Vec<_>>()
696 })
697 })
698 });
699 cx.executor().run_until_parked();
700 assert_eq!(servers.len(), 1);
701 let (adapter, server) = servers.into_iter().next().unwrap();
702 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
703 assert_eq!(server.server_id(), LanguageServerId(0));
704 // `workspace_folders` are set to the rooting point.
705 assert_eq!(
706 server.workspace_folders(),
707 BTreeSet::from_iter(
708 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
709 )
710 );
711
712 let (second_project_buffer, _other_handle) = project
713 .update(cx, |project, cx| {
714 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
715 })
716 .await
717 .unwrap();
718 cx.executor().run_until_parked();
719 let servers = project.update(cx, |project, cx| {
720 project.lsp_store.update(cx, |this, cx| {
721 second_project_buffer.update(cx, |buffer, cx| {
722 this.language_servers_for_local_buffer(buffer, cx)
723 .map(|(adapter, server)| (adapter.clone(), server.clone()))
724 .collect::<Vec<_>>()
725 })
726 })
727 });
728 cx.executor().run_until_parked();
729 assert_eq!(servers.len(), 1);
730 let (adapter, server) = servers.into_iter().next().unwrap();
731 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
732 // We're not using venvs at all here, so both folders should fall under the same root.
733 assert_eq!(server.server_id(), LanguageServerId(0));
734 // Now, let's select a different toolchain for one of subprojects.
735
736 let Toolchains {
737 toolchains: available_toolchains_for_b,
738 root_path,
739 ..
740 } = project
741 .update(cx, |this, cx| {
742 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
743 this.available_toolchains(
744 ProjectPath {
745 worktree_id,
746 path: rel_path("project-b/source_file.py").into(),
747 },
748 LanguageName::new("Python"),
749 cx,
750 )
751 })
752 .await
753 .expect("A toolchain to be discovered");
754 assert_eq!(root_path.as_ref(), rel_path("project-b"));
755 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
756 let currently_active_toolchain = project
757 .update(cx, |this, cx| {
758 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
759 this.active_toolchain(
760 ProjectPath {
761 worktree_id,
762 path: rel_path("project-b/source_file.py").into(),
763 },
764 LanguageName::new("Python"),
765 cx,
766 )
767 })
768 .await;
769
770 assert!(currently_active_toolchain.is_none());
771 let _ = project
772 .update(cx, |this, cx| {
773 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
774 this.activate_toolchain(
775 ProjectPath {
776 worktree_id,
777 path: root_path,
778 },
779 available_toolchains_for_b
780 .toolchains
781 .into_iter()
782 .next()
783 .unwrap(),
784 cx,
785 )
786 })
787 .await
788 .unwrap();
789 cx.run_until_parked();
790 let servers = project.update(cx, |project, cx| {
791 project.lsp_store.update(cx, |this, cx| {
792 second_project_buffer.update(cx, |buffer, cx| {
793 this.language_servers_for_local_buffer(buffer, cx)
794 .map(|(adapter, server)| (adapter.clone(), server.clone()))
795 .collect::<Vec<_>>()
796 })
797 })
798 });
799 cx.executor().run_until_parked();
800 assert_eq!(servers.len(), 1);
801 let (adapter, server) = servers.into_iter().next().unwrap();
802 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
803 // There's a new language server in town.
804 assert_eq!(server.server_id(), LanguageServerId(1));
805}
806
807#[gpui::test]
808async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
809 init_test(cx);
810
811 let fs = FakeFs::new(cx.executor());
812 fs.insert_tree(
813 path!("/dir"),
814 json!({
815 "test.rs": "const A: i32 = 1;",
816 "test2.rs": "",
817 "Cargo.toml": "a = 1",
818 "package.json": "{\"a\": 1}",
819 }),
820 )
821 .await;
822
823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825
826 let mut fake_rust_servers = language_registry.register_fake_lsp(
827 "Rust",
828 FakeLspAdapter {
829 name: "the-rust-language-server",
830 capabilities: lsp::ServerCapabilities {
831 completion_provider: Some(lsp::CompletionOptions {
832 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
833 ..Default::default()
834 }),
835 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
836 lsp::TextDocumentSyncOptions {
837 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
838 ..Default::default()
839 },
840 )),
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 );
846 let mut fake_json_servers = language_registry.register_fake_lsp(
847 "JSON",
848 FakeLspAdapter {
849 name: "the-json-language-server",
850 capabilities: lsp::ServerCapabilities {
851 completion_provider: Some(lsp::CompletionOptions {
852 trigger_characters: Some(vec![":".to_string()]),
853 ..Default::default()
854 }),
855 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
856 lsp::TextDocumentSyncOptions {
857 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
858 ..Default::default()
859 },
860 )),
861 ..Default::default()
862 },
863 ..Default::default()
864 },
865 );
866
867 // Open a buffer without an associated language server.
868 let (toml_buffer, _handle) = project
869 .update(cx, |project, cx| {
870 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
871 })
872 .await
873 .unwrap();
874
875 // Open a buffer with an associated language server before the language for it has been loaded.
876 let (rust_buffer, _handle2) = project
877 .update(cx, |project, cx| {
878 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
879 })
880 .await
881 .unwrap();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), None);
884 });
885
886 // Now we add the languages to the project, and ensure they get assigned to all
887 // the relevant open buffers.
888 language_registry.add(json_lang());
889 language_registry.add(rust_lang());
890 cx.executor().run_until_parked();
891 rust_buffer.update(cx, |buffer, _| {
892 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
893 });
894
895 // A server is started up, and it is notified about Rust files.
896 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
897 assert_eq!(
898 fake_rust_server
899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
900 .await
901 .text_document,
902 lsp::TextDocumentItem {
903 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
904 version: 0,
905 text: "const A: i32 = 1;".to_string(),
906 language_id: "rust".to_string(),
907 }
908 );
909
910 // The buffer is configured based on the language server's capabilities.
911 rust_buffer.update(cx, |buffer, _| {
912 assert_eq!(
913 buffer
914 .completion_triggers()
915 .iter()
916 .cloned()
917 .collect::<Vec<_>>(),
918 &[".".to_string(), "::".to_string()]
919 );
920 });
921 toml_buffer.update(cx, |buffer, _| {
922 assert!(buffer.completion_triggers().is_empty());
923 });
924
925 // Edit a buffer. The changes are reported to the language server.
926 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
927 assert_eq!(
928 fake_rust_server
929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
930 .await
931 .text_document,
932 lsp::VersionedTextDocumentIdentifier::new(
933 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
934 1
935 )
936 );
937
938 // Open a third buffer with a different associated language server.
939 let (json_buffer, _json_handle) = project
940 .update(cx, |project, cx| {
941 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
942 })
943 .await
944 .unwrap();
945
946 // A json language server is started up and is only notified about the json buffer.
947 let mut fake_json_server = fake_json_servers.next().await.unwrap();
948 assert_eq!(
949 fake_json_server
950 .receive_notification::<lsp::notification::DidOpenTextDocument>()
951 .await
952 .text_document,
953 lsp::TextDocumentItem {
954 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
955 version: 0,
956 text: "{\"a\": 1}".to_string(),
957 language_id: "json".to_string(),
958 }
959 );
960
961 // This buffer is configured based on the second language server's
962 // capabilities.
963 json_buffer.update(cx, |buffer, _| {
964 assert_eq!(
965 buffer
966 .completion_triggers()
967 .iter()
968 .cloned()
969 .collect::<Vec<_>>(),
970 &[":".to_string()]
971 );
972 });
973
974 // When opening another buffer whose language server is already running,
975 // it is also configured based on the existing language server's capabilities.
976 let (rust_buffer2, _handle4) = project
977 .update(cx, |project, cx| {
978 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
979 })
980 .await
981 .unwrap();
982 rust_buffer2.update(cx, |buffer, _| {
983 assert_eq!(
984 buffer
985 .completion_triggers()
986 .iter()
987 .cloned()
988 .collect::<Vec<_>>(),
989 &[".".to_string(), "::".to_string()]
990 );
991 });
992
993 // Changes are reported only to servers matching the buffer's language.
994 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
995 rust_buffer2.update(cx, |buffer, cx| {
996 buffer.edit([(0..0, "let x = 1;")], None, cx)
997 });
998 assert_eq!(
999 fake_rust_server
1000 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1001 .await
1002 .text_document,
1003 lsp::VersionedTextDocumentIdentifier::new(
1004 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1005 1
1006 )
1007 );
1008
1009 // Save notifications are reported to all servers.
1010 project
1011 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1012 .await
1013 .unwrap();
1014 assert_eq!(
1015 fake_rust_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023 assert_eq!(
1024 fake_json_server
1025 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1026 .await
1027 .text_document,
1028 lsp::TextDocumentIdentifier::new(
1029 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1030 )
1031 );
1032
1033 // Renames are reported only to servers matching the buffer's language.
1034 fs.rename(
1035 Path::new(path!("/dir/test2.rs")),
1036 Path::new(path!("/dir/test3.rs")),
1037 Default::default(),
1038 )
1039 .await
1040 .unwrap();
1041 assert_eq!(
1042 fake_rust_server
1043 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1044 .await
1045 .text_document,
1046 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1047 );
1048 assert_eq!(
1049 fake_rust_server
1050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1051 .await
1052 .text_document,
1053 lsp::TextDocumentItem {
1054 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1055 version: 0,
1056 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1057 language_id: "rust".to_string(),
1058 },
1059 );
1060
1061 rust_buffer2.update(cx, |buffer, cx| {
1062 buffer.update_diagnostics(
1063 LanguageServerId(0),
1064 DiagnosticSet::from_sorted_entries(
1065 vec![DiagnosticEntry {
1066 diagnostic: Default::default(),
1067 range: Anchor::MIN..Anchor::MAX,
1068 }],
1069 &buffer.snapshot(),
1070 ),
1071 cx,
1072 );
1073 assert_eq!(
1074 buffer
1075 .snapshot()
1076 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1077 .count(),
1078 1
1079 );
1080 });
1081
1082 // When the rename changes the extension of the file, the buffer gets closed on the old
1083 // language server and gets opened on the new one.
1084 fs.rename(
1085 Path::new(path!("/dir/test3.rs")),
1086 Path::new(path!("/dir/test3.json")),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091 assert_eq!(
1092 fake_rust_server
1093 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1094 .await
1095 .text_document,
1096 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1097 );
1098 assert_eq!(
1099 fake_json_server
1100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1101 .await
1102 .text_document,
1103 lsp::TextDocumentItem {
1104 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1105 version: 0,
1106 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1107 language_id: "json".to_string(),
1108 },
1109 );
1110
1111 // We clear the diagnostics, since the language has changed.
1112 rust_buffer2.update(cx, |buffer, _| {
1113 assert_eq!(
1114 buffer
1115 .snapshot()
1116 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1117 .count(),
1118 0
1119 );
1120 });
1121
1122 // The renamed file's version resets after changing language server.
1123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1124 assert_eq!(
1125 fake_json_server
1126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1127 .await
1128 .text_document,
1129 lsp::VersionedTextDocumentIdentifier::new(
1130 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1131 1
1132 )
1133 );
1134
1135 // Restart language servers
1136 project.update(cx, |project, cx| {
1137 project.restart_language_servers_for_buffers(
1138 vec![rust_buffer.clone(), json_buffer.clone()],
1139 HashSet::default(),
1140 cx,
1141 );
1142 });
1143
1144 let mut rust_shutdown_requests = fake_rust_server
1145 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1146 let mut json_shutdown_requests = fake_json_server
1147 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1148 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1149
1150 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1151 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1152
1153 // Ensure rust document is reopened in new rust language server
1154 assert_eq!(
1155 fake_rust_server
1156 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1157 .await
1158 .text_document,
1159 lsp::TextDocumentItem {
1160 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1161 version: 0,
1162 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1163 language_id: "rust".to_string(),
1164 }
1165 );
1166
1167 // Ensure json documents are reopened in new json language server
1168 assert_set_eq!(
1169 [
1170 fake_json_server
1171 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1172 .await
1173 .text_document,
1174 fake_json_server
1175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1176 .await
1177 .text_document,
1178 ],
1179 [
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1182 version: 0,
1183 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 },
1186 lsp::TextDocumentItem {
1187 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1188 version: 0,
1189 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1190 language_id: "json".to_string(),
1191 }
1192 ]
1193 );
1194
1195 // Close notifications are reported only to servers matching the buffer's language.
1196 cx.update(|_| drop(_json_handle));
1197 let close_message = lsp::DidCloseTextDocumentParams {
1198 text_document: lsp::TextDocumentIdentifier::new(
1199 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1200 ),
1201 };
1202 assert_eq!(
1203 fake_json_server
1204 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1205 .await,
1206 close_message,
1207 );
1208}
1209
1210#[gpui::test]
1211async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let fs = FakeFs::new(cx.executor());
1215 fs.insert_tree(
1216 path!("/the-root"),
1217 json!({
1218 ".gitignore": "target\n",
1219 "Cargo.lock": "",
1220 "src": {
1221 "a.rs": "",
1222 "b.rs": "",
1223 },
1224 "target": {
1225 "x": {
1226 "out": {
1227 "x.rs": ""
1228 }
1229 },
1230 "y": {
1231 "out": {
1232 "y.rs": "",
1233 }
1234 },
1235 "z": {
1236 "out": {
1237 "z.rs": ""
1238 }
1239 }
1240 }
1241 }),
1242 )
1243 .await;
1244 fs.insert_tree(
1245 path!("/the-registry"),
1246 json!({
1247 "dep1": {
1248 "src": {
1249 "dep1.rs": "",
1250 }
1251 },
1252 "dep2": {
1253 "src": {
1254 "dep2.rs": "",
1255 }
1256 },
1257 }),
1258 )
1259 .await;
1260 fs.insert_tree(
1261 path!("/the/stdlib"),
1262 json!({
1263 "LICENSE": "",
1264 "src": {
1265 "string.rs": "",
1266 }
1267 }),
1268 )
1269 .await;
1270
1271 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1272 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1273 (project.languages().clone(), project.lsp_store())
1274 });
1275 language_registry.add(rust_lang());
1276 let mut fake_servers = language_registry.register_fake_lsp(
1277 "Rust",
1278 FakeLspAdapter {
1279 name: "the-language-server",
1280 ..Default::default()
1281 },
1282 );
1283
1284 cx.executor().run_until_parked();
1285
1286 // Start the language server by opening a buffer with a compatible file extension.
1287 project
1288 .update(cx, |project, cx| {
1289 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1290 })
1291 .await
1292 .unwrap();
1293
1294 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1295 project.update(cx, |project, cx| {
1296 let worktree = project.worktrees(cx).next().unwrap();
1297 assert_eq!(
1298 worktree
1299 .read(cx)
1300 .snapshot()
1301 .entries(true, 0)
1302 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1303 .collect::<Vec<_>>(),
1304 &[
1305 ("", false),
1306 (".gitignore", false),
1307 ("Cargo.lock", false),
1308 ("src", false),
1309 ("src/a.rs", false),
1310 ("src/b.rs", false),
1311 ("target", true),
1312 ]
1313 );
1314 });
1315
1316 let prev_read_dir_count = fs.read_dir_call_count();
1317
1318 let fake_server = fake_servers.next().await.unwrap();
1319 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1320 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1321 id
1322 });
1323
1324 // Simulate jumping to a definition in a dependency outside of the worktree.
1325 let _out_of_worktree_buffer = project
1326 .update(cx, |project, cx| {
1327 project.open_local_buffer_via_lsp(
1328 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1329 server_id,
1330 cx,
1331 )
1332 })
1333 .await
1334 .unwrap();
1335
1336 // Keep track of the FS events reported to the language server.
1337 let file_changes = Arc::new(Mutex::new(Vec::new()));
1338 fake_server
1339 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1340 registrations: vec![lsp::Registration {
1341 id: Default::default(),
1342 method: "workspace/didChangeWatchedFiles".to_string(),
1343 register_options: serde_json::to_value(
1344 lsp::DidChangeWatchedFilesRegistrationOptions {
1345 watchers: vec![
1346 lsp::FileSystemWatcher {
1347 glob_pattern: lsp::GlobPattern::String(
1348 path!("/the-root/Cargo.toml").to_string(),
1349 ),
1350 kind: None,
1351 },
1352 lsp::FileSystemWatcher {
1353 glob_pattern: lsp::GlobPattern::String(
1354 path!("/the-root/src/*.{rs,c}").to_string(),
1355 ),
1356 kind: None,
1357 },
1358 lsp::FileSystemWatcher {
1359 glob_pattern: lsp::GlobPattern::String(
1360 path!("/the-root/target/y/**/*.rs").to_string(),
1361 ),
1362 kind: None,
1363 },
1364 lsp::FileSystemWatcher {
1365 glob_pattern: lsp::GlobPattern::String(
1366 path!("/the/stdlib/src/**/*.rs").to_string(),
1367 ),
1368 kind: None,
1369 },
1370 lsp::FileSystemWatcher {
1371 glob_pattern: lsp::GlobPattern::String(
1372 path!("**/Cargo.lock").to_string(),
1373 ),
1374 kind: None,
1375 },
1376 ],
1377 },
1378 )
1379 .ok(),
1380 }],
1381 })
1382 .await
1383 .into_response()
1384 .unwrap();
1385 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1386 let file_changes = file_changes.clone();
1387 move |params, _| {
1388 let mut file_changes = file_changes.lock();
1389 file_changes.extend(params.changes);
1390 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1391 }
1392 });
1393
1394 cx.executor().run_until_parked();
1395 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1396 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1397
1398 let mut new_watched_paths = fs.watched_paths();
1399 new_watched_paths.retain(|path| {
1400 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1401 });
1402 assert_eq!(
1403 &new_watched_paths,
1404 &[
1405 Path::new(path!("/the-root")),
1406 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1407 Path::new(path!("/the/stdlib/src"))
1408 ]
1409 );
1410
1411 // Now the language server has asked us to watch an ignored directory path,
1412 // so we recursively load it.
1413 project.update(cx, |project, cx| {
1414 let worktree = project.visible_worktrees(cx).next().unwrap();
1415 assert_eq!(
1416 worktree
1417 .read(cx)
1418 .snapshot()
1419 .entries(true, 0)
1420 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1421 .collect::<Vec<_>>(),
1422 &[
1423 ("", false),
1424 (".gitignore", false),
1425 ("Cargo.lock", false),
1426 ("src", false),
1427 ("src/a.rs", false),
1428 ("src/b.rs", false),
1429 ("target", true),
1430 ("target/x", true),
1431 ("target/y", true),
1432 ("target/y/out", true),
1433 ("target/y/out/y.rs", true),
1434 ("target/z", true),
1435 ]
1436 );
1437 });
1438
1439 // Perform some file system mutations, two of which match the watched patterns,
1440 // and one of which does not.
1441 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1442 .await
1443 .unwrap();
1444 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1445 .await
1446 .unwrap();
1447 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1448 .await
1449 .unwrap();
1450 fs.create_file(
1451 path!("/the-root/target/x/out/x2.rs").as_ref(),
1452 Default::default(),
1453 )
1454 .await
1455 .unwrap();
1456 fs.create_file(
1457 path!("/the-root/target/y/out/y2.rs").as_ref(),
1458 Default::default(),
1459 )
1460 .await
1461 .unwrap();
1462 fs.save(
1463 path!("/the-root/Cargo.lock").as_ref(),
1464 &"".into(),
1465 Default::default(),
1466 )
1467 .await
1468 .unwrap();
1469 fs.save(
1470 path!("/the-stdlib/LICENSE").as_ref(),
1471 &"".into(),
1472 Default::default(),
1473 )
1474 .await
1475 .unwrap();
1476 fs.save(
1477 path!("/the/stdlib/src/string.rs").as_ref(),
1478 &"".into(),
1479 Default::default(),
1480 )
1481 .await
1482 .unwrap();
1483
1484 // The language server receives events for the FS mutations that match its watch patterns.
1485 cx.executor().run_until_parked();
1486 assert_eq!(
1487 &*file_changes.lock(),
1488 &[
1489 lsp::FileEvent {
1490 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1491 typ: lsp::FileChangeType::CHANGED,
1492 },
1493 lsp::FileEvent {
1494 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1495 typ: lsp::FileChangeType::DELETED,
1496 },
1497 lsp::FileEvent {
1498 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1499 typ: lsp::FileChangeType::CREATED,
1500 },
1501 lsp::FileEvent {
1502 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1503 typ: lsp::FileChangeType::CREATED,
1504 },
1505 lsp::FileEvent {
1506 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1507 typ: lsp::FileChangeType::CHANGED,
1508 },
1509 ]
1510 );
1511}
1512
1513#[gpui::test]
1514async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1515 init_test(cx);
1516
1517 let fs = FakeFs::new(cx.executor());
1518 fs.insert_tree(
1519 path!("/dir"),
1520 json!({
1521 "a.rs": "let a = 1;",
1522 "b.rs": "let b = 2;"
1523 }),
1524 )
1525 .await;
1526
1527 let project = Project::test(
1528 fs,
1529 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1530 cx,
1531 )
1532 .await;
1533 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1534
1535 let buffer_a = project
1536 .update(cx, |project, cx| {
1537 project.open_local_buffer(path!("/dir/a.rs"), cx)
1538 })
1539 .await
1540 .unwrap();
1541 let buffer_b = project
1542 .update(cx, |project, cx| {
1543 project.open_local_buffer(path!("/dir/b.rs"), cx)
1544 })
1545 .await
1546 .unwrap();
1547
1548 lsp_store.update(cx, |lsp_store, cx| {
1549 lsp_store
1550 .update_diagnostics(
1551 LanguageServerId(0),
1552 lsp::PublishDiagnosticsParams {
1553 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1554 version: None,
1555 diagnostics: vec![lsp::Diagnostic {
1556 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1557 severity: Some(lsp::DiagnosticSeverity::ERROR),
1558 message: "error 1".to_string(),
1559 ..Default::default()
1560 }],
1561 },
1562 None,
1563 DiagnosticSourceKind::Pushed,
1564 &[],
1565 cx,
1566 )
1567 .unwrap();
1568 lsp_store
1569 .update_diagnostics(
1570 LanguageServerId(0),
1571 lsp::PublishDiagnosticsParams {
1572 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1573 version: None,
1574 diagnostics: vec![lsp::Diagnostic {
1575 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1576 severity: Some(DiagnosticSeverity::WARNING),
1577 message: "error 2".to_string(),
1578 ..Default::default()
1579 }],
1580 },
1581 None,
1582 DiagnosticSourceKind::Pushed,
1583 &[],
1584 cx,
1585 )
1586 .unwrap();
1587 });
1588
1589 buffer_a.update(cx, |buffer, _| {
1590 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1591 assert_eq!(
1592 chunks
1593 .iter()
1594 .map(|(s, d)| (s.as_str(), *d))
1595 .collect::<Vec<_>>(),
1596 &[
1597 ("let ", None),
1598 ("a", Some(DiagnosticSeverity::ERROR)),
1599 (" = 1;", None),
1600 ]
1601 );
1602 });
1603 buffer_b.update(cx, |buffer, _| {
1604 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1605 assert_eq!(
1606 chunks
1607 .iter()
1608 .map(|(s, d)| (s.as_str(), *d))
1609 .collect::<Vec<_>>(),
1610 &[
1611 ("let ", None),
1612 ("b", Some(DiagnosticSeverity::WARNING)),
1613 (" = 2;", None),
1614 ]
1615 );
1616 });
1617}
1618
1619#[gpui::test]
1620async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1621 init_test(cx);
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree(
1625 path!("/root"),
1626 json!({
1627 "dir": {
1628 ".git": {
1629 "HEAD": "ref: refs/heads/main",
1630 },
1631 ".gitignore": "b.rs",
1632 "a.rs": "let a = 1;",
1633 "b.rs": "let b = 2;",
1634 },
1635 "other.rs": "let b = c;"
1636 }),
1637 )
1638 .await;
1639
1640 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1641 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1642 let (worktree, _) = project
1643 .update(cx, |project, cx| {
1644 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1645 })
1646 .await
1647 .unwrap();
1648 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1649
1650 let (worktree, _) = project
1651 .update(cx, |project, cx| {
1652 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1653 })
1654 .await
1655 .unwrap();
1656 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1657
1658 let server_id = LanguageServerId(0);
1659 lsp_store.update(cx, |lsp_store, cx| {
1660 lsp_store
1661 .update_diagnostics(
1662 server_id,
1663 lsp::PublishDiagnosticsParams {
1664 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1665 version: None,
1666 diagnostics: vec![lsp::Diagnostic {
1667 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1668 severity: Some(lsp::DiagnosticSeverity::ERROR),
1669 message: "unused variable 'b'".to_string(),
1670 ..Default::default()
1671 }],
1672 },
1673 None,
1674 DiagnosticSourceKind::Pushed,
1675 &[],
1676 cx,
1677 )
1678 .unwrap();
1679 lsp_store
1680 .update_diagnostics(
1681 server_id,
1682 lsp::PublishDiagnosticsParams {
1683 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1684 version: None,
1685 diagnostics: vec![lsp::Diagnostic {
1686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1687 severity: Some(lsp::DiagnosticSeverity::ERROR),
1688 message: "unknown variable 'c'".to_string(),
1689 ..Default::default()
1690 }],
1691 },
1692 None,
1693 DiagnosticSourceKind::Pushed,
1694 &[],
1695 cx,
1696 )
1697 .unwrap();
1698 });
1699
1700 let main_ignored_buffer = project
1701 .update(cx, |project, cx| {
1702 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1703 })
1704 .await
1705 .unwrap();
1706 main_ignored_buffer.update(cx, |buffer, _| {
1707 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1708 assert_eq!(
1709 chunks
1710 .iter()
1711 .map(|(s, d)| (s.as_str(), *d))
1712 .collect::<Vec<_>>(),
1713 &[
1714 ("let ", None),
1715 ("b", Some(DiagnosticSeverity::ERROR)),
1716 (" = 2;", None),
1717 ],
1718 "Gigitnored buffers should still get in-buffer diagnostics",
1719 );
1720 });
1721 let other_buffer = project
1722 .update(cx, |project, cx| {
1723 project.open_buffer((other_worktree_id, rel_path("")), cx)
1724 })
1725 .await
1726 .unwrap();
1727 other_buffer.update(cx, |buffer, _| {
1728 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1729 assert_eq!(
1730 chunks
1731 .iter()
1732 .map(|(s, d)| (s.as_str(), *d))
1733 .collect::<Vec<_>>(),
1734 &[
1735 ("let b = ", None),
1736 ("c", Some(DiagnosticSeverity::ERROR)),
1737 (";", None),
1738 ],
1739 "Buffers from hidden projects should still get in-buffer diagnostics"
1740 );
1741 });
1742
1743 project.update(cx, |project, cx| {
1744 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1745 assert_eq!(
1746 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1747 vec![(
1748 ProjectPath {
1749 worktree_id: main_worktree_id,
1750 path: rel_path("b.rs").into(),
1751 },
1752 server_id,
1753 DiagnosticSummary {
1754 error_count: 1,
1755 warning_count: 0,
1756 }
1757 )]
1758 );
1759 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1760 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1761 });
1762}
1763
1764#[gpui::test]
1765async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1766 init_test(cx);
1767
1768 let progress_token = "the-progress-token";
1769
1770 let fs = FakeFs::new(cx.executor());
1771 fs.insert_tree(
1772 path!("/dir"),
1773 json!({
1774 "a.rs": "fn a() { A }",
1775 "b.rs": "const y: i32 = 1",
1776 }),
1777 )
1778 .await;
1779
1780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1781 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1782
1783 language_registry.add(rust_lang());
1784 let mut fake_servers = language_registry.register_fake_lsp(
1785 "Rust",
1786 FakeLspAdapter {
1787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1788 disk_based_diagnostics_sources: vec!["disk".into()],
1789 ..Default::default()
1790 },
1791 );
1792
1793 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1794
1795 // Cause worktree to start the fake language server
1796 let _ = project
1797 .update(cx, |project, cx| {
1798 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1799 })
1800 .await
1801 .unwrap();
1802
1803 let mut events = cx.events(&project);
1804
1805 let fake_server = fake_servers.next().await.unwrap();
1806 assert_eq!(
1807 events.next().await.unwrap(),
1808 Event::LanguageServerAdded(
1809 LanguageServerId(0),
1810 fake_server.server.name(),
1811 Some(worktree_id)
1812 ),
1813 );
1814
1815 fake_server
1816 .start_progress(format!("{}/0", progress_token))
1817 .await;
1818 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1819 assert_eq!(
1820 events.next().await.unwrap(),
1821 Event::DiskBasedDiagnosticsStarted {
1822 language_server_id: LanguageServerId(0),
1823 }
1824 );
1825
1826 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1827 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1828 version: None,
1829 diagnostics: vec![lsp::Diagnostic {
1830 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1831 severity: Some(lsp::DiagnosticSeverity::ERROR),
1832 message: "undefined variable 'A'".to_string(),
1833 ..Default::default()
1834 }],
1835 });
1836 assert_eq!(
1837 events.next().await.unwrap(),
1838 Event::DiagnosticsUpdated {
1839 language_server_id: LanguageServerId(0),
1840 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1841 }
1842 );
1843
1844 fake_server.end_progress(format!("{}/0", progress_token));
1845 assert_eq!(
1846 events.next().await.unwrap(),
1847 Event::DiskBasedDiagnosticsFinished {
1848 language_server_id: LanguageServerId(0)
1849 }
1850 );
1851
1852 let buffer = project
1853 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1854 .await
1855 .unwrap();
1856
1857 buffer.update(cx, |buffer, _| {
1858 let snapshot = buffer.snapshot();
1859 let diagnostics = snapshot
1860 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1861 .collect::<Vec<_>>();
1862 assert_eq!(
1863 diagnostics,
1864 &[DiagnosticEntryRef {
1865 range: Point::new(0, 9)..Point::new(0, 10),
1866 diagnostic: &Diagnostic {
1867 severity: lsp::DiagnosticSeverity::ERROR,
1868 message: "undefined variable 'A'".to_string(),
1869 group_id: 0,
1870 is_primary: true,
1871 source_kind: DiagnosticSourceKind::Pushed,
1872 ..Diagnostic::default()
1873 }
1874 }]
1875 )
1876 });
1877
1878 // Ensure publishing empty diagnostics twice only results in one update event.
1879 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1880 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1881 version: None,
1882 diagnostics: Default::default(),
1883 });
1884 assert_eq!(
1885 events.next().await.unwrap(),
1886 Event::DiagnosticsUpdated {
1887 language_server_id: LanguageServerId(0),
1888 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1889 }
1890 );
1891
1892 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1893 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1894 version: None,
1895 diagnostics: Default::default(),
1896 });
1897 cx.executor().run_until_parked();
1898 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1899}
1900
1901#[gpui::test]
1902async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1903 init_test(cx);
1904
1905 let progress_token = "the-progress-token";
1906
1907 let fs = FakeFs::new(cx.executor());
1908 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1909
1910 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1911
1912 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1913 language_registry.add(rust_lang());
1914 let mut fake_servers = language_registry.register_fake_lsp(
1915 "Rust",
1916 FakeLspAdapter {
1917 name: "the-language-server",
1918 disk_based_diagnostics_sources: vec!["disk".into()],
1919 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1920 ..FakeLspAdapter::default()
1921 },
1922 );
1923
1924 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1925
1926 let (buffer, _handle) = project
1927 .update(cx, |project, cx| {
1928 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1929 })
1930 .await
1931 .unwrap();
1932 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1933 // Simulate diagnostics starting to update.
1934 let fake_server = fake_servers.next().await.unwrap();
1935 fake_server.start_progress(progress_token).await;
1936
1937 // Restart the server before the diagnostics finish updating.
1938 project.update(cx, |project, cx| {
1939 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1940 });
1941 let mut events = cx.events(&project);
1942
1943 // Simulate the newly started server sending more diagnostics.
1944 let fake_server = fake_servers.next().await.unwrap();
1945 assert_eq!(
1946 events.next().await.unwrap(),
1947 Event::LanguageServerRemoved(LanguageServerId(0))
1948 );
1949 assert_eq!(
1950 events.next().await.unwrap(),
1951 Event::LanguageServerAdded(
1952 LanguageServerId(1),
1953 fake_server.server.name(),
1954 Some(worktree_id)
1955 )
1956 );
1957 assert_eq!(events.next().await.unwrap(), Event::RefreshInlayHints);
1958 fake_server.start_progress(progress_token).await;
1959 assert_eq!(
1960 events.next().await.unwrap(),
1961 Event::LanguageServerBufferRegistered {
1962 server_id: LanguageServerId(1),
1963 buffer_id,
1964 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1965 name: Some(fake_server.server.name())
1966 }
1967 );
1968 assert_eq!(
1969 events.next().await.unwrap(),
1970 Event::DiskBasedDiagnosticsStarted {
1971 language_server_id: LanguageServerId(1)
1972 }
1973 );
1974 project.update(cx, |project, cx| {
1975 assert_eq!(
1976 project
1977 .language_servers_running_disk_based_diagnostics(cx)
1978 .collect::<Vec<_>>(),
1979 [LanguageServerId(1)]
1980 );
1981 });
1982
1983 // All diagnostics are considered done, despite the old server's diagnostic
1984 // task never completing.
1985 fake_server.end_progress(progress_token);
1986 assert_eq!(
1987 events.next().await.unwrap(),
1988 Event::DiskBasedDiagnosticsFinished {
1989 language_server_id: LanguageServerId(1)
1990 }
1991 );
1992 project.update(cx, |project, cx| {
1993 assert_eq!(
1994 project
1995 .language_servers_running_disk_based_diagnostics(cx)
1996 .collect::<Vec<_>>(),
1997 [] as [language::LanguageServerId; 0]
1998 );
1999 });
2000}
2001
2002#[gpui::test]
2003async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2004 init_test(cx);
2005
2006 let fs = FakeFs::new(cx.executor());
2007 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2008
2009 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2010
2011 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2012 language_registry.add(rust_lang());
2013 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2014
2015 let (buffer, _) = project
2016 .update(cx, |project, cx| {
2017 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2018 })
2019 .await
2020 .unwrap();
2021
2022 // Publish diagnostics
2023 let fake_server = fake_servers.next().await.unwrap();
2024 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2025 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2026 version: None,
2027 diagnostics: vec![lsp::Diagnostic {
2028 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2029 severity: Some(lsp::DiagnosticSeverity::ERROR),
2030 message: "the message".to_string(),
2031 ..Default::default()
2032 }],
2033 });
2034
2035 cx.executor().run_until_parked();
2036 buffer.update(cx, |buffer, _| {
2037 assert_eq!(
2038 buffer
2039 .snapshot()
2040 .diagnostics_in_range::<_, usize>(0..1, false)
2041 .map(|entry| entry.diagnostic.message.clone())
2042 .collect::<Vec<_>>(),
2043 ["the message".to_string()]
2044 );
2045 });
2046 project.update(cx, |project, cx| {
2047 assert_eq!(
2048 project.diagnostic_summary(false, cx),
2049 DiagnosticSummary {
2050 error_count: 1,
2051 warning_count: 0,
2052 }
2053 );
2054 });
2055
2056 project.update(cx, |project, cx| {
2057 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2058 });
2059
2060 // The diagnostics are cleared.
2061 cx.executor().run_until_parked();
2062 buffer.update(cx, |buffer, _| {
2063 assert_eq!(
2064 buffer
2065 .snapshot()
2066 .diagnostics_in_range::<_, usize>(0..1, false)
2067 .map(|entry| entry.diagnostic.message.clone())
2068 .collect::<Vec<_>>(),
2069 Vec::<String>::new(),
2070 );
2071 });
2072 project.update(cx, |project, cx| {
2073 assert_eq!(
2074 project.diagnostic_summary(false, cx),
2075 DiagnosticSummary {
2076 error_count: 0,
2077 warning_count: 0,
2078 }
2079 );
2080 });
2081}
2082
2083#[gpui::test]
2084async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2085 init_test(cx);
2086
2087 let fs = FakeFs::new(cx.executor());
2088 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2089
2090 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2091 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2092
2093 language_registry.add(rust_lang());
2094 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2095
2096 let (buffer, _handle) = project
2097 .update(cx, |project, cx| {
2098 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2099 })
2100 .await
2101 .unwrap();
2102
2103 // Before restarting the server, report diagnostics with an unknown buffer version.
2104 let fake_server = fake_servers.next().await.unwrap();
2105 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2106 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2107 version: Some(10000),
2108 diagnostics: Vec::new(),
2109 });
2110 cx.executor().run_until_parked();
2111 project.update(cx, |project, cx| {
2112 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2113 });
2114
2115 let mut fake_server = fake_servers.next().await.unwrap();
2116 let notification = fake_server
2117 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2118 .await
2119 .text_document;
2120 assert_eq!(notification.version, 0);
2121}
2122
2123#[gpui::test]
2124async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2125 init_test(cx);
2126
2127 let progress_token = "the-progress-token";
2128
2129 let fs = FakeFs::new(cx.executor());
2130 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2131
2132 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2133
2134 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2135 language_registry.add(rust_lang());
2136 let mut fake_servers = language_registry.register_fake_lsp(
2137 "Rust",
2138 FakeLspAdapter {
2139 name: "the-language-server",
2140 disk_based_diagnostics_sources: vec!["disk".into()],
2141 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2142 ..Default::default()
2143 },
2144 );
2145
2146 let (buffer, _handle) = project
2147 .update(cx, |project, cx| {
2148 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2149 })
2150 .await
2151 .unwrap();
2152
2153 // Simulate diagnostics starting to update.
2154 let mut fake_server = fake_servers.next().await.unwrap();
2155 fake_server
2156 .start_progress_with(
2157 "another-token",
2158 lsp::WorkDoneProgressBegin {
2159 cancellable: Some(false),
2160 ..Default::default()
2161 },
2162 )
2163 .await;
2164 fake_server
2165 .start_progress_with(
2166 progress_token,
2167 lsp::WorkDoneProgressBegin {
2168 cancellable: Some(true),
2169 ..Default::default()
2170 },
2171 )
2172 .await;
2173 cx.executor().run_until_parked();
2174
2175 project.update(cx, |project, cx| {
2176 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2177 });
2178
2179 let cancel_notification = fake_server
2180 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2181 .await;
2182 assert_eq!(
2183 cancel_notification.token,
2184 NumberOrString::String(progress_token.into())
2185 );
2186}
2187
2188#[gpui::test]
2189async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2190 init_test(cx);
2191
2192 let fs = FakeFs::new(cx.executor());
2193 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2194 .await;
2195
2196 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2197 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2198
2199 let mut fake_rust_servers = language_registry.register_fake_lsp(
2200 "Rust",
2201 FakeLspAdapter {
2202 name: "rust-lsp",
2203 ..Default::default()
2204 },
2205 );
2206 let mut fake_js_servers = language_registry.register_fake_lsp(
2207 "JavaScript",
2208 FakeLspAdapter {
2209 name: "js-lsp",
2210 ..Default::default()
2211 },
2212 );
2213 language_registry.add(rust_lang());
2214 language_registry.add(js_lang());
2215
2216 let _rs_buffer = project
2217 .update(cx, |project, cx| {
2218 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2219 })
2220 .await
2221 .unwrap();
2222 let _js_buffer = project
2223 .update(cx, |project, cx| {
2224 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2225 })
2226 .await
2227 .unwrap();
2228
2229 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2230 assert_eq!(
2231 fake_rust_server_1
2232 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2233 .await
2234 .text_document
2235 .uri
2236 .as_str(),
2237 uri!("file:///dir/a.rs")
2238 );
2239
2240 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2241 assert_eq!(
2242 fake_js_server
2243 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2244 .await
2245 .text_document
2246 .uri
2247 .as_str(),
2248 uri!("file:///dir/b.js")
2249 );
2250
2251 // Disable Rust language server, ensuring only that server gets stopped.
2252 cx.update(|cx| {
2253 SettingsStore::update_global(cx, |settings, cx| {
2254 settings.update_user_settings(cx, |settings| {
2255 settings.languages_mut().insert(
2256 "Rust".into(),
2257 LanguageSettingsContent {
2258 enable_language_server: Some(false),
2259 ..Default::default()
2260 },
2261 );
2262 });
2263 })
2264 });
2265 fake_rust_server_1
2266 .receive_notification::<lsp::notification::Exit>()
2267 .await;
2268
2269 // Enable Rust and disable JavaScript language servers, ensuring that the
2270 // former gets started again and that the latter stops.
2271 cx.update(|cx| {
2272 SettingsStore::update_global(cx, |settings, cx| {
2273 settings.update_user_settings(cx, |settings| {
2274 settings.languages_mut().insert(
2275 "Rust".into(),
2276 LanguageSettingsContent {
2277 enable_language_server: Some(true),
2278 ..Default::default()
2279 },
2280 );
2281 settings.languages_mut().insert(
2282 "JavaScript".into(),
2283 LanguageSettingsContent {
2284 enable_language_server: Some(false),
2285 ..Default::default()
2286 },
2287 );
2288 });
2289 })
2290 });
2291 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2292 assert_eq!(
2293 fake_rust_server_2
2294 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2295 .await
2296 .text_document
2297 .uri
2298 .as_str(),
2299 uri!("file:///dir/a.rs")
2300 );
2301 fake_js_server
2302 .receive_notification::<lsp::notification::Exit>()
2303 .await;
2304}
2305
2306#[gpui::test(iterations = 3)]
2307async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2308 init_test(cx);
2309
2310 let text = "
2311 fn a() { A }
2312 fn b() { BB }
2313 fn c() { CCC }
2314 "
2315 .unindent();
2316
2317 let fs = FakeFs::new(cx.executor());
2318 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2319
2320 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2321 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2322
2323 language_registry.add(rust_lang());
2324 let mut fake_servers = language_registry.register_fake_lsp(
2325 "Rust",
2326 FakeLspAdapter {
2327 disk_based_diagnostics_sources: vec!["disk".into()],
2328 ..Default::default()
2329 },
2330 );
2331
2332 let buffer = project
2333 .update(cx, |project, cx| {
2334 project.open_local_buffer(path!("/dir/a.rs"), cx)
2335 })
2336 .await
2337 .unwrap();
2338
2339 let _handle = project.update(cx, |project, cx| {
2340 project.register_buffer_with_language_servers(&buffer, cx)
2341 });
2342
2343 let mut fake_server = fake_servers.next().await.unwrap();
2344 let open_notification = fake_server
2345 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2346 .await;
2347
2348 // Edit the buffer, moving the content down
2349 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2350 let change_notification_1 = fake_server
2351 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2352 .await;
2353 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2354
2355 // Report some diagnostics for the initial version of the buffer
2356 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2357 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2358 version: Some(open_notification.text_document.version),
2359 diagnostics: vec![
2360 lsp::Diagnostic {
2361 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2362 severity: Some(DiagnosticSeverity::ERROR),
2363 message: "undefined variable 'A'".to_string(),
2364 source: Some("disk".to_string()),
2365 ..Default::default()
2366 },
2367 lsp::Diagnostic {
2368 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2369 severity: Some(DiagnosticSeverity::ERROR),
2370 message: "undefined variable 'BB'".to_string(),
2371 source: Some("disk".to_string()),
2372 ..Default::default()
2373 },
2374 lsp::Diagnostic {
2375 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2376 severity: Some(DiagnosticSeverity::ERROR),
2377 source: Some("disk".to_string()),
2378 message: "undefined variable 'CCC'".to_string(),
2379 ..Default::default()
2380 },
2381 ],
2382 });
2383
2384 // The diagnostics have moved down since they were created.
2385 cx.executor().run_until_parked();
2386 buffer.update(cx, |buffer, _| {
2387 assert_eq!(
2388 buffer
2389 .snapshot()
2390 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2391 .collect::<Vec<_>>(),
2392 &[
2393 DiagnosticEntry {
2394 range: Point::new(3, 9)..Point::new(3, 11),
2395 diagnostic: Diagnostic {
2396 source: Some("disk".into()),
2397 severity: DiagnosticSeverity::ERROR,
2398 message: "undefined variable 'BB'".to_string(),
2399 is_disk_based: true,
2400 group_id: 1,
2401 is_primary: true,
2402 source_kind: DiagnosticSourceKind::Pushed,
2403 ..Diagnostic::default()
2404 },
2405 },
2406 DiagnosticEntry {
2407 range: Point::new(4, 9)..Point::new(4, 12),
2408 diagnostic: Diagnostic {
2409 source: Some("disk".into()),
2410 severity: DiagnosticSeverity::ERROR,
2411 message: "undefined variable 'CCC'".to_string(),
2412 is_disk_based: true,
2413 group_id: 2,
2414 is_primary: true,
2415 source_kind: DiagnosticSourceKind::Pushed,
2416 ..Diagnostic::default()
2417 }
2418 }
2419 ]
2420 );
2421 assert_eq!(
2422 chunks_with_diagnostics(buffer, 0..buffer.len()),
2423 [
2424 ("\n\nfn a() { ".to_string(), None),
2425 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2426 (" }\nfn b() { ".to_string(), None),
2427 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2428 (" }\nfn c() { ".to_string(), None),
2429 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2430 (" }\n".to_string(), None),
2431 ]
2432 );
2433 assert_eq!(
2434 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2435 [
2436 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2437 (" }\nfn c() { ".to_string(), None),
2438 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2439 ]
2440 );
2441 });
2442
2443 // Ensure overlapping diagnostics are highlighted correctly.
2444 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2445 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2446 version: Some(open_notification.text_document.version),
2447 diagnostics: vec![
2448 lsp::Diagnostic {
2449 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2450 severity: Some(DiagnosticSeverity::ERROR),
2451 message: "undefined variable 'A'".to_string(),
2452 source: Some("disk".to_string()),
2453 ..Default::default()
2454 },
2455 lsp::Diagnostic {
2456 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2457 severity: Some(DiagnosticSeverity::WARNING),
2458 message: "unreachable statement".to_string(),
2459 source: Some("disk".to_string()),
2460 ..Default::default()
2461 },
2462 ],
2463 });
2464
2465 cx.executor().run_until_parked();
2466 buffer.update(cx, |buffer, _| {
2467 assert_eq!(
2468 buffer
2469 .snapshot()
2470 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2471 .collect::<Vec<_>>(),
2472 &[
2473 DiagnosticEntry {
2474 range: Point::new(2, 9)..Point::new(2, 12),
2475 diagnostic: Diagnostic {
2476 source: Some("disk".into()),
2477 severity: DiagnosticSeverity::WARNING,
2478 message: "unreachable statement".to_string(),
2479 is_disk_based: true,
2480 group_id: 4,
2481 is_primary: true,
2482 source_kind: DiagnosticSourceKind::Pushed,
2483 ..Diagnostic::default()
2484 }
2485 },
2486 DiagnosticEntry {
2487 range: Point::new(2, 9)..Point::new(2, 10),
2488 diagnostic: Diagnostic {
2489 source: Some("disk".into()),
2490 severity: DiagnosticSeverity::ERROR,
2491 message: "undefined variable 'A'".to_string(),
2492 is_disk_based: true,
2493 group_id: 3,
2494 is_primary: true,
2495 source_kind: DiagnosticSourceKind::Pushed,
2496 ..Diagnostic::default()
2497 },
2498 }
2499 ]
2500 );
2501 assert_eq!(
2502 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2503 [
2504 ("fn a() { ".to_string(), None),
2505 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2506 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2507 ("\n".to_string(), None),
2508 ]
2509 );
2510 assert_eq!(
2511 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2512 [
2513 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2514 ("\n".to_string(), None),
2515 ]
2516 );
2517 });
2518
2519 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2520 // changes since the last save.
2521 buffer.update(cx, |buffer, cx| {
2522 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2523 buffer.edit(
2524 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2525 None,
2526 cx,
2527 );
2528 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2529 });
2530 let change_notification_2 = fake_server
2531 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2532 .await;
2533 assert!(
2534 change_notification_2.text_document.version > change_notification_1.text_document.version
2535 );
2536
2537 // Handle out-of-order diagnostics
2538 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2539 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2540 version: Some(change_notification_2.text_document.version),
2541 diagnostics: vec![
2542 lsp::Diagnostic {
2543 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2544 severity: Some(DiagnosticSeverity::ERROR),
2545 message: "undefined variable 'BB'".to_string(),
2546 source: Some("disk".to_string()),
2547 ..Default::default()
2548 },
2549 lsp::Diagnostic {
2550 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2551 severity: Some(DiagnosticSeverity::WARNING),
2552 message: "undefined variable 'A'".to_string(),
2553 source: Some("disk".to_string()),
2554 ..Default::default()
2555 },
2556 ],
2557 });
2558
2559 cx.executor().run_until_parked();
2560 buffer.update(cx, |buffer, _| {
2561 assert_eq!(
2562 buffer
2563 .snapshot()
2564 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2565 .collect::<Vec<_>>(),
2566 &[
2567 DiagnosticEntry {
2568 range: Point::new(2, 21)..Point::new(2, 22),
2569 diagnostic: Diagnostic {
2570 source: Some("disk".into()),
2571 severity: DiagnosticSeverity::WARNING,
2572 message: "undefined variable 'A'".to_string(),
2573 is_disk_based: true,
2574 group_id: 6,
2575 is_primary: true,
2576 source_kind: DiagnosticSourceKind::Pushed,
2577 ..Diagnostic::default()
2578 }
2579 },
2580 DiagnosticEntry {
2581 range: Point::new(3, 9)..Point::new(3, 14),
2582 diagnostic: Diagnostic {
2583 source: Some("disk".into()),
2584 severity: DiagnosticSeverity::ERROR,
2585 message: "undefined variable 'BB'".to_string(),
2586 is_disk_based: true,
2587 group_id: 5,
2588 is_primary: true,
2589 source_kind: DiagnosticSourceKind::Pushed,
2590 ..Diagnostic::default()
2591 },
2592 }
2593 ]
2594 );
2595 });
2596}
2597
2598#[gpui::test]
2599async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2600 init_test(cx);
2601
2602 let text = concat!(
2603 "let one = ;\n", //
2604 "let two = \n",
2605 "let three = 3;\n",
2606 );
2607
2608 let fs = FakeFs::new(cx.executor());
2609 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2610
2611 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2612 let buffer = project
2613 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2614 .await
2615 .unwrap();
2616
2617 project.update(cx, |project, cx| {
2618 project.lsp_store.update(cx, |lsp_store, cx| {
2619 lsp_store
2620 .update_diagnostic_entries(
2621 LanguageServerId(0),
2622 PathBuf::from("/dir/a.rs"),
2623 None,
2624 None,
2625 vec![
2626 DiagnosticEntry {
2627 range: Unclipped(PointUtf16::new(0, 10))
2628 ..Unclipped(PointUtf16::new(0, 10)),
2629 diagnostic: Diagnostic {
2630 severity: DiagnosticSeverity::ERROR,
2631 message: "syntax error 1".to_string(),
2632 source_kind: DiagnosticSourceKind::Pushed,
2633 ..Diagnostic::default()
2634 },
2635 },
2636 DiagnosticEntry {
2637 range: Unclipped(PointUtf16::new(1, 10))
2638 ..Unclipped(PointUtf16::new(1, 10)),
2639 diagnostic: Diagnostic {
2640 severity: DiagnosticSeverity::ERROR,
2641 message: "syntax error 2".to_string(),
2642 source_kind: DiagnosticSourceKind::Pushed,
2643 ..Diagnostic::default()
2644 },
2645 },
2646 ],
2647 cx,
2648 )
2649 .unwrap();
2650 })
2651 });
2652
2653 // An empty range is extended forward to include the following character.
2654 // At the end of a line, an empty range is extended backward to include
2655 // the preceding character.
2656 buffer.update(cx, |buffer, _| {
2657 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2658 assert_eq!(
2659 chunks
2660 .iter()
2661 .map(|(s, d)| (s.as_str(), *d))
2662 .collect::<Vec<_>>(),
2663 &[
2664 ("let one = ", None),
2665 (";", Some(DiagnosticSeverity::ERROR)),
2666 ("\nlet two =", None),
2667 (" ", Some(DiagnosticSeverity::ERROR)),
2668 ("\nlet three = 3;\n", None)
2669 ]
2670 );
2671 });
2672}
2673
2674#[gpui::test]
2675async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2676 init_test(cx);
2677
2678 let fs = FakeFs::new(cx.executor());
2679 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2680 .await;
2681
2682 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2683 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2684
2685 lsp_store.update(cx, |lsp_store, cx| {
2686 lsp_store
2687 .update_diagnostic_entries(
2688 LanguageServerId(0),
2689 Path::new("/dir/a.rs").to_owned(),
2690 None,
2691 None,
2692 vec![DiagnosticEntry {
2693 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2694 diagnostic: Diagnostic {
2695 severity: DiagnosticSeverity::ERROR,
2696 is_primary: true,
2697 message: "syntax error a1".to_string(),
2698 source_kind: DiagnosticSourceKind::Pushed,
2699 ..Diagnostic::default()
2700 },
2701 }],
2702 cx,
2703 )
2704 .unwrap();
2705 lsp_store
2706 .update_diagnostic_entries(
2707 LanguageServerId(1),
2708 Path::new("/dir/a.rs").to_owned(),
2709 None,
2710 None,
2711 vec![DiagnosticEntry {
2712 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2713 diagnostic: Diagnostic {
2714 severity: DiagnosticSeverity::ERROR,
2715 is_primary: true,
2716 message: "syntax error b1".to_string(),
2717 source_kind: DiagnosticSourceKind::Pushed,
2718 ..Diagnostic::default()
2719 },
2720 }],
2721 cx,
2722 )
2723 .unwrap();
2724
2725 assert_eq!(
2726 lsp_store.diagnostic_summary(false, cx),
2727 DiagnosticSummary {
2728 error_count: 2,
2729 warning_count: 0,
2730 }
2731 );
2732 });
2733}
2734
2735#[gpui::test]
2736async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2737 init_test(cx);
2738
2739 let text = "
2740 fn a() {
2741 f1();
2742 }
2743 fn b() {
2744 f2();
2745 }
2746 fn c() {
2747 f3();
2748 }
2749 "
2750 .unindent();
2751
2752 let fs = FakeFs::new(cx.executor());
2753 fs.insert_tree(
2754 path!("/dir"),
2755 json!({
2756 "a.rs": text.clone(),
2757 }),
2758 )
2759 .await;
2760
2761 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2762 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2763
2764 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2765 language_registry.add(rust_lang());
2766 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2767
2768 let (buffer, _handle) = project
2769 .update(cx, |project, cx| {
2770 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2771 })
2772 .await
2773 .unwrap();
2774
2775 let mut fake_server = fake_servers.next().await.unwrap();
2776 let lsp_document_version = fake_server
2777 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2778 .await
2779 .text_document
2780 .version;
2781
2782 // Simulate editing the buffer after the language server computes some edits.
2783 buffer.update(cx, |buffer, cx| {
2784 buffer.edit(
2785 [(
2786 Point::new(0, 0)..Point::new(0, 0),
2787 "// above first function\n",
2788 )],
2789 None,
2790 cx,
2791 );
2792 buffer.edit(
2793 [(
2794 Point::new(2, 0)..Point::new(2, 0),
2795 " // inside first function\n",
2796 )],
2797 None,
2798 cx,
2799 );
2800 buffer.edit(
2801 [(
2802 Point::new(6, 4)..Point::new(6, 4),
2803 "// inside second function ",
2804 )],
2805 None,
2806 cx,
2807 );
2808
2809 assert_eq!(
2810 buffer.text(),
2811 "
2812 // above first function
2813 fn a() {
2814 // inside first function
2815 f1();
2816 }
2817 fn b() {
2818 // inside second function f2();
2819 }
2820 fn c() {
2821 f3();
2822 }
2823 "
2824 .unindent()
2825 );
2826 });
2827
2828 let edits = lsp_store
2829 .update(cx, |lsp_store, cx| {
2830 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2831 &buffer,
2832 vec![
2833 // replace body of first function
2834 lsp::TextEdit {
2835 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2836 new_text: "
2837 fn a() {
2838 f10();
2839 }
2840 "
2841 .unindent(),
2842 },
2843 // edit inside second function
2844 lsp::TextEdit {
2845 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2846 new_text: "00".into(),
2847 },
2848 // edit inside third function via two distinct edits
2849 lsp::TextEdit {
2850 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2851 new_text: "4000".into(),
2852 },
2853 lsp::TextEdit {
2854 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2855 new_text: "".into(),
2856 },
2857 ],
2858 LanguageServerId(0),
2859 Some(lsp_document_version),
2860 cx,
2861 )
2862 })
2863 .await
2864 .unwrap();
2865
2866 buffer.update(cx, |buffer, cx| {
2867 for (range, new_text) in edits {
2868 buffer.edit([(range, new_text)], None, cx);
2869 }
2870 assert_eq!(
2871 buffer.text(),
2872 "
2873 // above first function
2874 fn a() {
2875 // inside first function
2876 f10();
2877 }
2878 fn b() {
2879 // inside second function f200();
2880 }
2881 fn c() {
2882 f4000();
2883 }
2884 "
2885 .unindent()
2886 );
2887 });
2888}
2889
2890#[gpui::test]
2891async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2892 init_test(cx);
2893
2894 let text = "
2895 use a::b;
2896 use a::c;
2897
2898 fn f() {
2899 b();
2900 c();
2901 }
2902 "
2903 .unindent();
2904
2905 let fs = FakeFs::new(cx.executor());
2906 fs.insert_tree(
2907 path!("/dir"),
2908 json!({
2909 "a.rs": text.clone(),
2910 }),
2911 )
2912 .await;
2913
2914 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2915 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2916 let buffer = project
2917 .update(cx, |project, cx| {
2918 project.open_local_buffer(path!("/dir/a.rs"), cx)
2919 })
2920 .await
2921 .unwrap();
2922
2923 // Simulate the language server sending us a small edit in the form of a very large diff.
2924 // Rust-analyzer does this when performing a merge-imports code action.
2925 let edits = lsp_store
2926 .update(cx, |lsp_store, cx| {
2927 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2928 &buffer,
2929 [
2930 // Replace the first use statement without editing the semicolon.
2931 lsp::TextEdit {
2932 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2933 new_text: "a::{b, c}".into(),
2934 },
2935 // Reinsert the remainder of the file between the semicolon and the final
2936 // newline of the file.
2937 lsp::TextEdit {
2938 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2939 new_text: "\n\n".into(),
2940 },
2941 lsp::TextEdit {
2942 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2943 new_text: "
2944 fn f() {
2945 b();
2946 c();
2947 }"
2948 .unindent(),
2949 },
2950 // Delete everything after the first newline of the file.
2951 lsp::TextEdit {
2952 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2953 new_text: "".into(),
2954 },
2955 ],
2956 LanguageServerId(0),
2957 None,
2958 cx,
2959 )
2960 })
2961 .await
2962 .unwrap();
2963
2964 buffer.update(cx, |buffer, cx| {
2965 let edits = edits
2966 .into_iter()
2967 .map(|(range, text)| {
2968 (
2969 range.start.to_point(buffer)..range.end.to_point(buffer),
2970 text,
2971 )
2972 })
2973 .collect::<Vec<_>>();
2974
2975 assert_eq!(
2976 edits,
2977 [
2978 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2979 (Point::new(1, 0)..Point::new(2, 0), "".into())
2980 ]
2981 );
2982
2983 for (range, new_text) in edits {
2984 buffer.edit([(range, new_text)], None, cx);
2985 }
2986 assert_eq!(
2987 buffer.text(),
2988 "
2989 use a::{b, c};
2990
2991 fn f() {
2992 b();
2993 c();
2994 }
2995 "
2996 .unindent()
2997 );
2998 });
2999}
3000
3001#[gpui::test]
3002async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3003 cx: &mut gpui::TestAppContext,
3004) {
3005 init_test(cx);
3006
3007 let text = "Path()";
3008
3009 let fs = FakeFs::new(cx.executor());
3010 fs.insert_tree(
3011 path!("/dir"),
3012 json!({
3013 "a.rs": text
3014 }),
3015 )
3016 .await;
3017
3018 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3019 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3020 let buffer = project
3021 .update(cx, |project, cx| {
3022 project.open_local_buffer(path!("/dir/a.rs"), cx)
3023 })
3024 .await
3025 .unwrap();
3026
3027 // Simulate the language server sending us a pair of edits at the same location,
3028 // with an insertion following a replacement (which violates the LSP spec).
3029 let edits = lsp_store
3030 .update(cx, |lsp_store, cx| {
3031 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3032 &buffer,
3033 [
3034 lsp::TextEdit {
3035 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3036 new_text: "Path".into(),
3037 },
3038 lsp::TextEdit {
3039 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3040 new_text: "from path import Path\n\n\n".into(),
3041 },
3042 ],
3043 LanguageServerId(0),
3044 None,
3045 cx,
3046 )
3047 })
3048 .await
3049 .unwrap();
3050
3051 buffer.update(cx, |buffer, cx| {
3052 buffer.edit(edits, None, cx);
3053 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3054 });
3055}
3056
3057#[gpui::test]
3058async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3059 init_test(cx);
3060
3061 let text = "
3062 use a::b;
3063 use a::c;
3064
3065 fn f() {
3066 b();
3067 c();
3068 }
3069 "
3070 .unindent();
3071
3072 let fs = FakeFs::new(cx.executor());
3073 fs.insert_tree(
3074 path!("/dir"),
3075 json!({
3076 "a.rs": text.clone(),
3077 }),
3078 )
3079 .await;
3080
3081 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3082 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3083 let buffer = project
3084 .update(cx, |project, cx| {
3085 project.open_local_buffer(path!("/dir/a.rs"), cx)
3086 })
3087 .await
3088 .unwrap();
3089
3090 // Simulate the language server sending us edits in a non-ordered fashion,
3091 // with ranges sometimes being inverted or pointing to invalid locations.
3092 let edits = lsp_store
3093 .update(cx, |lsp_store, cx| {
3094 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3095 &buffer,
3096 [
3097 lsp::TextEdit {
3098 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3099 new_text: "\n\n".into(),
3100 },
3101 lsp::TextEdit {
3102 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3103 new_text: "a::{b, c}".into(),
3104 },
3105 lsp::TextEdit {
3106 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3107 new_text: "".into(),
3108 },
3109 lsp::TextEdit {
3110 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3111 new_text: "
3112 fn f() {
3113 b();
3114 c();
3115 }"
3116 .unindent(),
3117 },
3118 ],
3119 LanguageServerId(0),
3120 None,
3121 cx,
3122 )
3123 })
3124 .await
3125 .unwrap();
3126
3127 buffer.update(cx, |buffer, cx| {
3128 let edits = edits
3129 .into_iter()
3130 .map(|(range, text)| {
3131 (
3132 range.start.to_point(buffer)..range.end.to_point(buffer),
3133 text,
3134 )
3135 })
3136 .collect::<Vec<_>>();
3137
3138 assert_eq!(
3139 edits,
3140 [
3141 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3142 (Point::new(1, 0)..Point::new(2, 0), "".into())
3143 ]
3144 );
3145
3146 for (range, new_text) in edits {
3147 buffer.edit([(range, new_text)], None, cx);
3148 }
3149 assert_eq!(
3150 buffer.text(),
3151 "
3152 use a::{b, c};
3153
3154 fn f() {
3155 b();
3156 c();
3157 }
3158 "
3159 .unindent()
3160 );
3161 });
3162}
3163
3164fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3165 buffer: &Buffer,
3166 range: Range<T>,
3167) -> Vec<(String, Option<DiagnosticSeverity>)> {
3168 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3169 for chunk in buffer.snapshot().chunks(range, true) {
3170 if chunks
3171 .last()
3172 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3173 {
3174 chunks.last_mut().unwrap().0.push_str(chunk.text);
3175 } else {
3176 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3177 }
3178 }
3179 chunks
3180}
3181
3182#[gpui::test(iterations = 10)]
3183async fn test_definition(cx: &mut gpui::TestAppContext) {
3184 init_test(cx);
3185
3186 let fs = FakeFs::new(cx.executor());
3187 fs.insert_tree(
3188 path!("/dir"),
3189 json!({
3190 "a.rs": "const fn a() { A }",
3191 "b.rs": "const y: i32 = crate::a()",
3192 }),
3193 )
3194 .await;
3195
3196 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3197
3198 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3199 language_registry.add(rust_lang());
3200 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3201
3202 let (buffer, _handle) = project
3203 .update(cx, |project, cx| {
3204 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3205 })
3206 .await
3207 .unwrap();
3208
3209 let fake_server = fake_servers.next().await.unwrap();
3210 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3211 let params = params.text_document_position_params;
3212 assert_eq!(
3213 params.text_document.uri.to_file_path().unwrap(),
3214 Path::new(path!("/dir/b.rs")),
3215 );
3216 assert_eq!(params.position, lsp::Position::new(0, 22));
3217
3218 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3219 lsp::Location::new(
3220 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3221 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3222 ),
3223 )))
3224 });
3225 let mut definitions = project
3226 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3227 .await
3228 .unwrap()
3229 .unwrap();
3230
3231 // Assert no new language server started
3232 cx.executor().run_until_parked();
3233 assert!(fake_servers.try_next().is_err());
3234
3235 assert_eq!(definitions.len(), 1);
3236 let definition = definitions.pop().unwrap();
3237 cx.update(|cx| {
3238 let target_buffer = definition.target.buffer.read(cx);
3239 assert_eq!(
3240 target_buffer
3241 .file()
3242 .unwrap()
3243 .as_local()
3244 .unwrap()
3245 .abs_path(cx),
3246 Path::new(path!("/dir/a.rs")),
3247 );
3248 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3249 assert_eq!(
3250 list_worktrees(&project, cx),
3251 [
3252 (path!("/dir/a.rs").as_ref(), false),
3253 (path!("/dir/b.rs").as_ref(), true)
3254 ],
3255 );
3256
3257 drop(definition);
3258 });
3259 cx.update(|cx| {
3260 assert_eq!(
3261 list_worktrees(&project, cx),
3262 [(path!("/dir/b.rs").as_ref(), true)]
3263 );
3264 });
3265
3266 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3267 project
3268 .read(cx)
3269 .worktrees(cx)
3270 .map(|worktree| {
3271 let worktree = worktree.read(cx);
3272 (
3273 worktree.as_local().unwrap().abs_path().as_ref(),
3274 worktree.is_visible(),
3275 )
3276 })
3277 .collect::<Vec<_>>()
3278 }
3279}
3280
3281#[gpui::test]
3282async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3283 init_test(cx);
3284
3285 let fs = FakeFs::new(cx.executor());
3286 fs.insert_tree(
3287 path!("/dir"),
3288 json!({
3289 "a.ts": "",
3290 }),
3291 )
3292 .await;
3293
3294 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3295
3296 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3297 language_registry.add(typescript_lang());
3298 let mut fake_language_servers = language_registry.register_fake_lsp(
3299 "TypeScript",
3300 FakeLspAdapter {
3301 capabilities: lsp::ServerCapabilities {
3302 completion_provider: Some(lsp::CompletionOptions {
3303 trigger_characters: Some(vec![".".to_string()]),
3304 ..Default::default()
3305 }),
3306 ..Default::default()
3307 },
3308 ..Default::default()
3309 },
3310 );
3311
3312 let (buffer, _handle) = project
3313 .update(cx, |p, cx| {
3314 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3315 })
3316 .await
3317 .unwrap();
3318
3319 let fake_server = fake_language_servers.next().await.unwrap();
3320
3321 // When text_edit exists, it takes precedence over insert_text and label
3322 let text = "let a = obj.fqn";
3323 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3324 let completions = project.update(cx, |project, cx| {
3325 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3326 });
3327
3328 fake_server
3329 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3330 Ok(Some(lsp::CompletionResponse::Array(vec![
3331 lsp::CompletionItem {
3332 label: "labelText".into(),
3333 insert_text: Some("insertText".into()),
3334 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3335 range: lsp::Range::new(
3336 lsp::Position::new(0, text.len() as u32 - 3),
3337 lsp::Position::new(0, text.len() as u32),
3338 ),
3339 new_text: "textEditText".into(),
3340 })),
3341 ..Default::default()
3342 },
3343 ])))
3344 })
3345 .next()
3346 .await;
3347
3348 let completions = completions
3349 .await
3350 .unwrap()
3351 .into_iter()
3352 .flat_map(|response| response.completions)
3353 .collect::<Vec<_>>();
3354 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3355
3356 assert_eq!(completions.len(), 1);
3357 assert_eq!(completions[0].new_text, "textEditText");
3358 assert_eq!(
3359 completions[0].replace_range.to_offset(&snapshot),
3360 text.len() - 3..text.len()
3361 );
3362}
3363
3364#[gpui::test]
3365async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3366 init_test(cx);
3367
3368 let fs = FakeFs::new(cx.executor());
3369 fs.insert_tree(
3370 path!("/dir"),
3371 json!({
3372 "a.ts": "",
3373 }),
3374 )
3375 .await;
3376
3377 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3378
3379 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3380 language_registry.add(typescript_lang());
3381 let mut fake_language_servers = language_registry.register_fake_lsp(
3382 "TypeScript",
3383 FakeLspAdapter {
3384 capabilities: lsp::ServerCapabilities {
3385 completion_provider: Some(lsp::CompletionOptions {
3386 trigger_characters: Some(vec![".".to_string()]),
3387 ..Default::default()
3388 }),
3389 ..Default::default()
3390 },
3391 ..Default::default()
3392 },
3393 );
3394
3395 let (buffer, _handle) = project
3396 .update(cx, |p, cx| {
3397 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3398 })
3399 .await
3400 .unwrap();
3401
3402 let fake_server = fake_language_servers.next().await.unwrap();
3403 let text = "let a = obj.fqn";
3404
3405 // Test 1: When text_edit is None but insert_text exists with default edit_range
3406 {
3407 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3408 let completions = project.update(cx, |project, cx| {
3409 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3410 });
3411
3412 fake_server
3413 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3414 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3415 is_incomplete: false,
3416 item_defaults: Some(lsp::CompletionListItemDefaults {
3417 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3418 lsp::Range::new(
3419 lsp::Position::new(0, text.len() as u32 - 3),
3420 lsp::Position::new(0, text.len() as u32),
3421 ),
3422 )),
3423 ..Default::default()
3424 }),
3425 items: vec![lsp::CompletionItem {
3426 label: "labelText".into(),
3427 insert_text: Some("insertText".into()),
3428 text_edit: None,
3429 ..Default::default()
3430 }],
3431 })))
3432 })
3433 .next()
3434 .await;
3435
3436 let completions = completions
3437 .await
3438 .unwrap()
3439 .into_iter()
3440 .flat_map(|response| response.completions)
3441 .collect::<Vec<_>>();
3442 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3443
3444 assert_eq!(completions.len(), 1);
3445 assert_eq!(completions[0].new_text, "insertText");
3446 assert_eq!(
3447 completions[0].replace_range.to_offset(&snapshot),
3448 text.len() - 3..text.len()
3449 );
3450 }
3451
3452 // Test 2: When both text_edit and insert_text are None with default edit_range
3453 {
3454 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3455 let completions = project.update(cx, |project, cx| {
3456 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3457 });
3458
3459 fake_server
3460 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3461 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3462 is_incomplete: false,
3463 item_defaults: Some(lsp::CompletionListItemDefaults {
3464 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3465 lsp::Range::new(
3466 lsp::Position::new(0, text.len() as u32 - 3),
3467 lsp::Position::new(0, text.len() as u32),
3468 ),
3469 )),
3470 ..Default::default()
3471 }),
3472 items: vec![lsp::CompletionItem {
3473 label: "labelText".into(),
3474 insert_text: None,
3475 text_edit: None,
3476 ..Default::default()
3477 }],
3478 })))
3479 })
3480 .next()
3481 .await;
3482
3483 let completions = completions
3484 .await
3485 .unwrap()
3486 .into_iter()
3487 .flat_map(|response| response.completions)
3488 .collect::<Vec<_>>();
3489 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3490
3491 assert_eq!(completions.len(), 1);
3492 assert_eq!(completions[0].new_text, "labelText");
3493 assert_eq!(
3494 completions[0].replace_range.to_offset(&snapshot),
3495 text.len() - 3..text.len()
3496 );
3497 }
3498}
3499
3500#[gpui::test]
3501async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3502 init_test(cx);
3503
3504 let fs = FakeFs::new(cx.executor());
3505 fs.insert_tree(
3506 path!("/dir"),
3507 json!({
3508 "a.ts": "",
3509 }),
3510 )
3511 .await;
3512
3513 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3514
3515 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3516 language_registry.add(typescript_lang());
3517 let mut fake_language_servers = language_registry.register_fake_lsp(
3518 "TypeScript",
3519 FakeLspAdapter {
3520 capabilities: lsp::ServerCapabilities {
3521 completion_provider: Some(lsp::CompletionOptions {
3522 trigger_characters: Some(vec![":".to_string()]),
3523 ..Default::default()
3524 }),
3525 ..Default::default()
3526 },
3527 ..Default::default()
3528 },
3529 );
3530
3531 let (buffer, _handle) = project
3532 .update(cx, |p, cx| {
3533 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3534 })
3535 .await
3536 .unwrap();
3537
3538 let fake_server = fake_language_servers.next().await.unwrap();
3539
3540 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3541 let text = "let a = b.fqn";
3542 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3543 let completions = project.update(cx, |project, cx| {
3544 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3545 });
3546
3547 fake_server
3548 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3549 Ok(Some(lsp::CompletionResponse::Array(vec![
3550 lsp::CompletionItem {
3551 label: "fullyQualifiedName?".into(),
3552 insert_text: Some("fullyQualifiedName".into()),
3553 ..Default::default()
3554 },
3555 ])))
3556 })
3557 .next()
3558 .await;
3559 let completions = completions
3560 .await
3561 .unwrap()
3562 .into_iter()
3563 .flat_map(|response| response.completions)
3564 .collect::<Vec<_>>();
3565 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3566 assert_eq!(completions.len(), 1);
3567 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3568 assert_eq!(
3569 completions[0].replace_range.to_offset(&snapshot),
3570 text.len() - 3..text.len()
3571 );
3572
3573 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3574 let text = "let a = \"atoms/cmp\"";
3575 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3576 let completions = project.update(cx, |project, cx| {
3577 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3578 });
3579
3580 fake_server
3581 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3582 Ok(Some(lsp::CompletionResponse::Array(vec![
3583 lsp::CompletionItem {
3584 label: "component".into(),
3585 ..Default::default()
3586 },
3587 ])))
3588 })
3589 .next()
3590 .await;
3591 let completions = completions
3592 .await
3593 .unwrap()
3594 .into_iter()
3595 .flat_map(|response| response.completions)
3596 .collect::<Vec<_>>();
3597 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3598 assert_eq!(completions.len(), 1);
3599 assert_eq!(completions[0].new_text, "component");
3600 assert_eq!(
3601 completions[0].replace_range.to_offset(&snapshot),
3602 text.len() - 4..text.len() - 1
3603 );
3604}
3605
3606#[gpui::test]
3607async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3608 init_test(cx);
3609
3610 let fs = FakeFs::new(cx.executor());
3611 fs.insert_tree(
3612 path!("/dir"),
3613 json!({
3614 "a.ts": "",
3615 }),
3616 )
3617 .await;
3618
3619 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3620
3621 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3622 language_registry.add(typescript_lang());
3623 let mut fake_language_servers = language_registry.register_fake_lsp(
3624 "TypeScript",
3625 FakeLspAdapter {
3626 capabilities: lsp::ServerCapabilities {
3627 completion_provider: Some(lsp::CompletionOptions {
3628 trigger_characters: Some(vec![":".to_string()]),
3629 ..Default::default()
3630 }),
3631 ..Default::default()
3632 },
3633 ..Default::default()
3634 },
3635 );
3636
3637 let (buffer, _handle) = project
3638 .update(cx, |p, cx| {
3639 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3640 })
3641 .await
3642 .unwrap();
3643
3644 let fake_server = fake_language_servers.next().await.unwrap();
3645
3646 let text = "let a = b.fqn";
3647 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3648 let completions = project.update(cx, |project, cx| {
3649 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3650 });
3651
3652 fake_server
3653 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3654 Ok(Some(lsp::CompletionResponse::Array(vec![
3655 lsp::CompletionItem {
3656 label: "fullyQualifiedName?".into(),
3657 insert_text: Some("fully\rQualified\r\nName".into()),
3658 ..Default::default()
3659 },
3660 ])))
3661 })
3662 .next()
3663 .await;
3664 let completions = completions
3665 .await
3666 .unwrap()
3667 .into_iter()
3668 .flat_map(|response| response.completions)
3669 .collect::<Vec<_>>();
3670 assert_eq!(completions.len(), 1);
3671 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3672}
3673
3674#[gpui::test(iterations = 10)]
3675async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3676 init_test(cx);
3677
3678 let fs = FakeFs::new(cx.executor());
3679 fs.insert_tree(
3680 path!("/dir"),
3681 json!({
3682 "a.ts": "a",
3683 }),
3684 )
3685 .await;
3686
3687 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3688
3689 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3690 language_registry.add(typescript_lang());
3691 let mut fake_language_servers = language_registry.register_fake_lsp(
3692 "TypeScript",
3693 FakeLspAdapter {
3694 capabilities: lsp::ServerCapabilities {
3695 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3696 lsp::CodeActionOptions {
3697 resolve_provider: Some(true),
3698 ..lsp::CodeActionOptions::default()
3699 },
3700 )),
3701 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3702 commands: vec!["_the/command".to_string()],
3703 ..lsp::ExecuteCommandOptions::default()
3704 }),
3705 ..lsp::ServerCapabilities::default()
3706 },
3707 ..FakeLspAdapter::default()
3708 },
3709 );
3710
3711 let (buffer, _handle) = project
3712 .update(cx, |p, cx| {
3713 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3714 })
3715 .await
3716 .unwrap();
3717
3718 let fake_server = fake_language_servers.next().await.unwrap();
3719
3720 // Language server returns code actions that contain commands, and not edits.
3721 let actions = project.update(cx, |project, cx| {
3722 project.code_actions(&buffer, 0..0, None, cx)
3723 });
3724 fake_server
3725 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3726 Ok(Some(vec![
3727 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3728 title: "The code action".into(),
3729 data: Some(serde_json::json!({
3730 "command": "_the/command",
3731 })),
3732 ..lsp::CodeAction::default()
3733 }),
3734 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3735 title: "two".into(),
3736 ..lsp::CodeAction::default()
3737 }),
3738 ]))
3739 })
3740 .next()
3741 .await;
3742
3743 let action = actions.await.unwrap().unwrap()[0].clone();
3744 let apply = project.update(cx, |project, cx| {
3745 project.apply_code_action(buffer.clone(), action, true, cx)
3746 });
3747
3748 // Resolving the code action does not populate its edits. In absence of
3749 // edits, we must execute the given command.
3750 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3751 |mut action, _| async move {
3752 if action.data.is_some() {
3753 action.command = Some(lsp::Command {
3754 title: "The command".into(),
3755 command: "_the/command".into(),
3756 arguments: Some(vec![json!("the-argument")]),
3757 });
3758 }
3759 Ok(action)
3760 },
3761 );
3762
3763 // While executing the command, the language server sends the editor
3764 // a `workspaceEdit` request.
3765 fake_server
3766 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3767 let fake = fake_server.clone();
3768 move |params, _| {
3769 assert_eq!(params.command, "_the/command");
3770 let fake = fake.clone();
3771 async move {
3772 fake.server
3773 .request::<lsp::request::ApplyWorkspaceEdit>(
3774 lsp::ApplyWorkspaceEditParams {
3775 label: None,
3776 edit: lsp::WorkspaceEdit {
3777 changes: Some(
3778 [(
3779 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3780 vec![lsp::TextEdit {
3781 range: lsp::Range::new(
3782 lsp::Position::new(0, 0),
3783 lsp::Position::new(0, 0),
3784 ),
3785 new_text: "X".into(),
3786 }],
3787 )]
3788 .into_iter()
3789 .collect(),
3790 ),
3791 ..Default::default()
3792 },
3793 },
3794 )
3795 .await
3796 .into_response()
3797 .unwrap();
3798 Ok(Some(json!(null)))
3799 }
3800 }
3801 })
3802 .next()
3803 .await;
3804
3805 // Applying the code action returns a project transaction containing the edits
3806 // sent by the language server in its `workspaceEdit` request.
3807 let transaction = apply.await.unwrap();
3808 assert!(transaction.0.contains_key(&buffer));
3809 buffer.update(cx, |buffer, cx| {
3810 assert_eq!(buffer.text(), "Xa");
3811 buffer.undo(cx);
3812 assert_eq!(buffer.text(), "a");
3813 });
3814}
3815
3816#[gpui::test]
3817async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3818 init_test(cx);
3819 let fs = FakeFs::new(cx.background_executor.clone());
3820 let expected_contents = "content";
3821 fs.as_fake()
3822 .insert_tree(
3823 "/root",
3824 json!({
3825 "test.txt": expected_contents
3826 }),
3827 )
3828 .await;
3829
3830 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3831
3832 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3833 let worktree = project.worktrees(cx).next().unwrap();
3834 let entry_id = worktree
3835 .read(cx)
3836 .entry_for_path(rel_path("test.txt"))
3837 .unwrap()
3838 .id;
3839 (worktree, entry_id)
3840 });
3841 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3842 let _result = project
3843 .update(cx, |project, cx| {
3844 project.rename_entry(
3845 entry_id,
3846 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3847 cx,
3848 )
3849 })
3850 .await
3851 .unwrap();
3852 worktree.read_with(cx, |worktree, _| {
3853 assert!(
3854 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3855 "Old file should have been removed"
3856 );
3857 assert!(
3858 worktree
3859 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3860 .is_some(),
3861 "Whole directory hierarchy and the new file should have been created"
3862 );
3863 });
3864 assert_eq!(
3865 worktree
3866 .update(cx, |worktree, cx| {
3867 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3868 })
3869 .await
3870 .unwrap()
3871 .text,
3872 expected_contents,
3873 "Moved file's contents should be preserved"
3874 );
3875
3876 let entry_id = worktree.read_with(cx, |worktree, _| {
3877 worktree
3878 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3879 .unwrap()
3880 .id
3881 });
3882
3883 let _result = project
3884 .update(cx, |project, cx| {
3885 project.rename_entry(
3886 entry_id,
3887 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3888 cx,
3889 )
3890 })
3891 .await
3892 .unwrap();
3893 worktree.read_with(cx, |worktree, _| {
3894 assert!(
3895 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3896 "First file should not reappear"
3897 );
3898 assert!(
3899 worktree
3900 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3901 .is_none(),
3902 "Old file should have been removed"
3903 );
3904 assert!(
3905 worktree
3906 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3907 .is_some(),
3908 "No error should have occurred after moving into existing directory"
3909 );
3910 });
3911 assert_eq!(
3912 worktree
3913 .update(cx, |worktree, cx| {
3914 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3915 })
3916 .await
3917 .unwrap()
3918 .text,
3919 expected_contents,
3920 "Moved file's contents should be preserved"
3921 );
3922}
3923
3924#[gpui::test(iterations = 10)]
3925async fn test_save_file(cx: &mut gpui::TestAppContext) {
3926 init_test(cx);
3927
3928 let fs = FakeFs::new(cx.executor());
3929 fs.insert_tree(
3930 path!("/dir"),
3931 json!({
3932 "file1": "the old contents",
3933 }),
3934 )
3935 .await;
3936
3937 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3938 let buffer = project
3939 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3940 .await
3941 .unwrap();
3942 buffer.update(cx, |buffer, cx| {
3943 assert_eq!(buffer.text(), "the old contents");
3944 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3945 });
3946
3947 project
3948 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3949 .await
3950 .unwrap();
3951
3952 let new_text = fs
3953 .load(Path::new(path!("/dir/file1")))
3954 .await
3955 .unwrap()
3956 .replace("\r\n", "\n");
3957 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3958}
3959
3960#[gpui::test(iterations = 10)]
3961async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3962 // Issue: #24349
3963 init_test(cx);
3964
3965 let fs = FakeFs::new(cx.executor());
3966 fs.insert_tree(path!("/dir"), json!({})).await;
3967
3968 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3969 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3970
3971 language_registry.add(rust_lang());
3972 let mut fake_rust_servers = language_registry.register_fake_lsp(
3973 "Rust",
3974 FakeLspAdapter {
3975 name: "the-rust-language-server",
3976 capabilities: lsp::ServerCapabilities {
3977 completion_provider: Some(lsp::CompletionOptions {
3978 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3979 ..Default::default()
3980 }),
3981 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3982 lsp::TextDocumentSyncOptions {
3983 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3984 ..Default::default()
3985 },
3986 )),
3987 ..Default::default()
3988 },
3989 ..Default::default()
3990 },
3991 );
3992
3993 let buffer = project
3994 .update(cx, |this, cx| this.create_buffer(false, cx))
3995 .unwrap()
3996 .await;
3997 project.update(cx, |this, cx| {
3998 this.register_buffer_with_language_servers(&buffer, cx);
3999 buffer.update(cx, |buffer, cx| {
4000 assert!(!this.has_language_servers_for(buffer, cx));
4001 })
4002 });
4003
4004 project
4005 .update(cx, |this, cx| {
4006 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4007 this.save_buffer_as(
4008 buffer.clone(),
4009 ProjectPath {
4010 worktree_id,
4011 path: rel_path("file.rs").into(),
4012 },
4013 cx,
4014 )
4015 })
4016 .await
4017 .unwrap();
4018 // A server is started up, and it is notified about Rust files.
4019 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4020 assert_eq!(
4021 fake_rust_server
4022 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4023 .await
4024 .text_document,
4025 lsp::TextDocumentItem {
4026 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4027 version: 0,
4028 text: "".to_string(),
4029 language_id: "rust".to_string(),
4030 }
4031 );
4032
4033 project.update(cx, |this, cx| {
4034 buffer.update(cx, |buffer, cx| {
4035 assert!(this.has_language_servers_for(buffer, cx));
4036 })
4037 });
4038}
4039
4040#[gpui::test(iterations = 30)]
4041async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4042 init_test(cx);
4043
4044 let fs = FakeFs::new(cx.executor());
4045 fs.insert_tree(
4046 path!("/dir"),
4047 json!({
4048 "file1": "the original contents",
4049 }),
4050 )
4051 .await;
4052
4053 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4054 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4055 let buffer = project
4056 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4057 .await
4058 .unwrap();
4059
4060 // Simulate buffer diffs being slow, so that they don't complete before
4061 // the next file change occurs.
4062 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4063
4064 // Change the buffer's file on disk, and then wait for the file change
4065 // to be detected by the worktree, so that the buffer starts reloading.
4066 fs.save(
4067 path!("/dir/file1").as_ref(),
4068 &"the first contents".into(),
4069 Default::default(),
4070 )
4071 .await
4072 .unwrap();
4073 worktree.next_event(cx).await;
4074
4075 // Change the buffer's file again. Depending on the random seed, the
4076 // previous file change may still be in progress.
4077 fs.save(
4078 path!("/dir/file1").as_ref(),
4079 &"the second contents".into(),
4080 Default::default(),
4081 )
4082 .await
4083 .unwrap();
4084 worktree.next_event(cx).await;
4085
4086 cx.executor().run_until_parked();
4087 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4088 buffer.read_with(cx, |buffer, _| {
4089 assert_eq!(buffer.text(), on_disk_text);
4090 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4091 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4092 });
4093}
4094
4095#[gpui::test(iterations = 30)]
4096async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4097 init_test(cx);
4098
4099 let fs = FakeFs::new(cx.executor());
4100 fs.insert_tree(
4101 path!("/dir"),
4102 json!({
4103 "file1": "the original contents",
4104 }),
4105 )
4106 .await;
4107
4108 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4109 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4110 let buffer = project
4111 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4112 .await
4113 .unwrap();
4114
4115 // Simulate buffer diffs being slow, so that they don't complete before
4116 // the next file change occurs.
4117 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4118
4119 // Change the buffer's file on disk, and then wait for the file change
4120 // to be detected by the worktree, so that the buffer starts reloading.
4121 fs.save(
4122 path!("/dir/file1").as_ref(),
4123 &"the first contents".into(),
4124 Default::default(),
4125 )
4126 .await
4127 .unwrap();
4128 worktree.next_event(cx).await;
4129
4130 cx.executor()
4131 .spawn(cx.executor().simulate_random_delay())
4132 .await;
4133
4134 // Perform a noop edit, causing the buffer's version to increase.
4135 buffer.update(cx, |buffer, cx| {
4136 buffer.edit([(0..0, " ")], None, cx);
4137 buffer.undo(cx);
4138 });
4139
4140 cx.executor().run_until_parked();
4141 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4142 buffer.read_with(cx, |buffer, _| {
4143 let buffer_text = buffer.text();
4144 if buffer_text == on_disk_text {
4145 assert!(
4146 !buffer.is_dirty() && !buffer.has_conflict(),
4147 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4148 );
4149 }
4150 // If the file change occurred while the buffer was processing the first
4151 // change, the buffer will be in a conflicting state.
4152 else {
4153 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4154 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4155 }
4156 });
4157}
4158
4159#[gpui::test]
4160async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4161 init_test(cx);
4162
4163 let fs = FakeFs::new(cx.executor());
4164 fs.insert_tree(
4165 path!("/dir"),
4166 json!({
4167 "file1": "the old contents",
4168 }),
4169 )
4170 .await;
4171
4172 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4173 let buffer = project
4174 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4175 .await
4176 .unwrap();
4177 buffer.update(cx, |buffer, cx| {
4178 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4179 });
4180
4181 project
4182 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4183 .await
4184 .unwrap();
4185
4186 let new_text = fs
4187 .load(Path::new(path!("/dir/file1")))
4188 .await
4189 .unwrap()
4190 .replace("\r\n", "\n");
4191 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4192}
4193
4194#[gpui::test]
4195async fn test_save_as(cx: &mut gpui::TestAppContext) {
4196 init_test(cx);
4197
4198 let fs = FakeFs::new(cx.executor());
4199 fs.insert_tree("/dir", json!({})).await;
4200
4201 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4202
4203 let languages = project.update(cx, |project, _| project.languages().clone());
4204 languages.add(rust_lang());
4205
4206 let buffer = project.update(cx, |project, cx| {
4207 project.create_local_buffer("", None, false, cx)
4208 });
4209 buffer.update(cx, |buffer, cx| {
4210 buffer.edit([(0..0, "abc")], None, cx);
4211 assert!(buffer.is_dirty());
4212 assert!(!buffer.has_conflict());
4213 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4214 });
4215 project
4216 .update(cx, |project, cx| {
4217 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4218 let path = ProjectPath {
4219 worktree_id,
4220 path: rel_path("file1.rs").into(),
4221 };
4222 project.save_buffer_as(buffer.clone(), path, cx)
4223 })
4224 .await
4225 .unwrap();
4226 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4227
4228 cx.executor().run_until_parked();
4229 buffer.update(cx, |buffer, cx| {
4230 assert_eq!(
4231 buffer.file().unwrap().full_path(cx),
4232 Path::new("dir/file1.rs")
4233 );
4234 assert!(!buffer.is_dirty());
4235 assert!(!buffer.has_conflict());
4236 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4237 });
4238
4239 let opened_buffer = project
4240 .update(cx, |project, cx| {
4241 project.open_local_buffer("/dir/file1.rs", cx)
4242 })
4243 .await
4244 .unwrap();
4245 assert_eq!(opened_buffer, buffer);
4246}
4247
4248#[gpui::test(retries = 5)]
4249async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4250 use worktree::WorktreeModelHandle as _;
4251
4252 init_test(cx);
4253 cx.executor().allow_parking();
4254
4255 let dir = TempTree::new(json!({
4256 "a": {
4257 "file1": "",
4258 "file2": "",
4259 "file3": "",
4260 },
4261 "b": {
4262 "c": {
4263 "file4": "",
4264 "file5": "",
4265 }
4266 }
4267 }));
4268
4269 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4270
4271 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4272 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4273 async move { buffer.await.unwrap() }
4274 };
4275 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4276 project.update(cx, |project, cx| {
4277 let tree = project.worktrees(cx).next().unwrap();
4278 tree.read(cx)
4279 .entry_for_path(rel_path(path))
4280 .unwrap_or_else(|| panic!("no entry for path {}", path))
4281 .id
4282 })
4283 };
4284
4285 let buffer2 = buffer_for_path("a/file2", cx).await;
4286 let buffer3 = buffer_for_path("a/file3", cx).await;
4287 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4288 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4289
4290 let file2_id = id_for_path("a/file2", cx);
4291 let file3_id = id_for_path("a/file3", cx);
4292 let file4_id = id_for_path("b/c/file4", cx);
4293
4294 // Create a remote copy of this worktree.
4295 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4296 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4297
4298 let updates = Arc::new(Mutex::new(Vec::new()));
4299 tree.update(cx, |tree, cx| {
4300 let updates = updates.clone();
4301 tree.observe_updates(0, cx, move |update| {
4302 updates.lock().push(update);
4303 async { true }
4304 });
4305 });
4306
4307 let remote = cx.update(|cx| {
4308 Worktree::remote(
4309 0,
4310 1,
4311 metadata,
4312 project.read(cx).client().into(),
4313 project.read(cx).path_style(cx),
4314 cx,
4315 )
4316 });
4317
4318 cx.executor().run_until_parked();
4319
4320 cx.update(|cx| {
4321 assert!(!buffer2.read(cx).is_dirty());
4322 assert!(!buffer3.read(cx).is_dirty());
4323 assert!(!buffer4.read(cx).is_dirty());
4324 assert!(!buffer5.read(cx).is_dirty());
4325 });
4326
4327 // Rename and delete files and directories.
4328 tree.flush_fs_events(cx).await;
4329 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4330 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4331 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4332 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4333 tree.flush_fs_events(cx).await;
4334
4335 cx.update(|app| {
4336 assert_eq!(
4337 tree.read(app).paths().collect::<Vec<_>>(),
4338 vec![
4339 rel_path("a"),
4340 rel_path("a/file1"),
4341 rel_path("a/file2.new"),
4342 rel_path("b"),
4343 rel_path("d"),
4344 rel_path("d/file3"),
4345 rel_path("d/file4"),
4346 ]
4347 );
4348 });
4349
4350 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4351 assert_eq!(id_for_path("d/file3", cx), file3_id);
4352 assert_eq!(id_for_path("d/file4", cx), file4_id);
4353
4354 cx.update(|cx| {
4355 assert_eq!(
4356 buffer2.read(cx).file().unwrap().path().as_ref(),
4357 rel_path("a/file2.new")
4358 );
4359 assert_eq!(
4360 buffer3.read(cx).file().unwrap().path().as_ref(),
4361 rel_path("d/file3")
4362 );
4363 assert_eq!(
4364 buffer4.read(cx).file().unwrap().path().as_ref(),
4365 rel_path("d/file4")
4366 );
4367 assert_eq!(
4368 buffer5.read(cx).file().unwrap().path().as_ref(),
4369 rel_path("b/c/file5")
4370 );
4371
4372 assert_matches!(
4373 buffer2.read(cx).file().unwrap().disk_state(),
4374 DiskState::Present { .. }
4375 );
4376 assert_matches!(
4377 buffer3.read(cx).file().unwrap().disk_state(),
4378 DiskState::Present { .. }
4379 );
4380 assert_matches!(
4381 buffer4.read(cx).file().unwrap().disk_state(),
4382 DiskState::Present { .. }
4383 );
4384 assert_eq!(
4385 buffer5.read(cx).file().unwrap().disk_state(),
4386 DiskState::Deleted
4387 );
4388 });
4389
4390 // Update the remote worktree. Check that it becomes consistent with the
4391 // local worktree.
4392 cx.executor().run_until_parked();
4393
4394 remote.update(cx, |remote, _| {
4395 for update in updates.lock().drain(..) {
4396 remote.as_remote_mut().unwrap().update_from_remote(update);
4397 }
4398 });
4399 cx.executor().run_until_parked();
4400 remote.update(cx, |remote, _| {
4401 assert_eq!(
4402 remote.paths().collect::<Vec<_>>(),
4403 vec![
4404 rel_path("a"),
4405 rel_path("a/file1"),
4406 rel_path("a/file2.new"),
4407 rel_path("b"),
4408 rel_path("d"),
4409 rel_path("d/file3"),
4410 rel_path("d/file4"),
4411 ]
4412 );
4413 });
4414}
4415
4416#[gpui::test(iterations = 10)]
4417async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4418 init_test(cx);
4419
4420 let fs = FakeFs::new(cx.executor());
4421 fs.insert_tree(
4422 path!("/dir"),
4423 json!({
4424 "a": {
4425 "file1": "",
4426 }
4427 }),
4428 )
4429 .await;
4430
4431 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4432 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4433 let tree_id = tree.update(cx, |tree, _| tree.id());
4434
4435 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4436 project.update(cx, |project, cx| {
4437 let tree = project.worktrees(cx).next().unwrap();
4438 tree.read(cx)
4439 .entry_for_path(rel_path(path))
4440 .unwrap_or_else(|| panic!("no entry for path {}", path))
4441 .id
4442 })
4443 };
4444
4445 let dir_id = id_for_path("a", cx);
4446 let file_id = id_for_path("a/file1", cx);
4447 let buffer = project
4448 .update(cx, |p, cx| {
4449 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4450 })
4451 .await
4452 .unwrap();
4453 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4454
4455 project
4456 .update(cx, |project, cx| {
4457 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4458 })
4459 .unwrap()
4460 .await
4461 .into_included()
4462 .unwrap();
4463 cx.executor().run_until_parked();
4464
4465 assert_eq!(id_for_path("b", cx), dir_id);
4466 assert_eq!(id_for_path("b/file1", cx), file_id);
4467 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4468}
4469
4470#[gpui::test]
4471async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4472 init_test(cx);
4473
4474 let fs = FakeFs::new(cx.executor());
4475 fs.insert_tree(
4476 "/dir",
4477 json!({
4478 "a.txt": "a-contents",
4479 "b.txt": "b-contents",
4480 }),
4481 )
4482 .await;
4483
4484 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4485
4486 // Spawn multiple tasks to open paths, repeating some paths.
4487 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4488 (
4489 p.open_local_buffer("/dir/a.txt", cx),
4490 p.open_local_buffer("/dir/b.txt", cx),
4491 p.open_local_buffer("/dir/a.txt", cx),
4492 )
4493 });
4494
4495 let buffer_a_1 = buffer_a_1.await.unwrap();
4496 let buffer_a_2 = buffer_a_2.await.unwrap();
4497 let buffer_b = buffer_b.await.unwrap();
4498 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4499 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4500
4501 // There is only one buffer per path.
4502 let buffer_a_id = buffer_a_1.entity_id();
4503 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4504
4505 // Open the same path again while it is still open.
4506 drop(buffer_a_1);
4507 let buffer_a_3 = project
4508 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4509 .await
4510 .unwrap();
4511
4512 // There's still only one buffer per path.
4513 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4514}
4515
4516#[gpui::test]
4517async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4518 init_test(cx);
4519
4520 let fs = FakeFs::new(cx.executor());
4521 fs.insert_tree(
4522 path!("/dir"),
4523 json!({
4524 "file1": "abc",
4525 "file2": "def",
4526 "file3": "ghi",
4527 }),
4528 )
4529 .await;
4530
4531 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4532
4533 let buffer1 = project
4534 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4535 .await
4536 .unwrap();
4537 let events = Arc::new(Mutex::new(Vec::new()));
4538
4539 // initially, the buffer isn't dirty.
4540 buffer1.update(cx, |buffer, cx| {
4541 cx.subscribe(&buffer1, {
4542 let events = events.clone();
4543 move |_, _, event, _| match event {
4544 BufferEvent::Operation { .. } => {}
4545 _ => events.lock().push(event.clone()),
4546 }
4547 })
4548 .detach();
4549
4550 assert!(!buffer.is_dirty());
4551 assert!(events.lock().is_empty());
4552
4553 buffer.edit([(1..2, "")], None, cx);
4554 });
4555
4556 // after the first edit, the buffer is dirty, and emits a dirtied event.
4557 buffer1.update(cx, |buffer, cx| {
4558 assert!(buffer.text() == "ac");
4559 assert!(buffer.is_dirty());
4560 assert_eq!(
4561 *events.lock(),
4562 &[
4563 language::BufferEvent::Edited,
4564 language::BufferEvent::DirtyChanged
4565 ]
4566 );
4567 events.lock().clear();
4568 buffer.did_save(
4569 buffer.version(),
4570 buffer.file().unwrap().disk_state().mtime(),
4571 cx,
4572 );
4573 });
4574
4575 // after saving, the buffer is not dirty, and emits a saved event.
4576 buffer1.update(cx, |buffer, cx| {
4577 assert!(!buffer.is_dirty());
4578 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4579 events.lock().clear();
4580
4581 buffer.edit([(1..1, "B")], None, cx);
4582 buffer.edit([(2..2, "D")], None, cx);
4583 });
4584
4585 // after editing again, the buffer is dirty, and emits another dirty event.
4586 buffer1.update(cx, |buffer, cx| {
4587 assert!(buffer.text() == "aBDc");
4588 assert!(buffer.is_dirty());
4589 assert_eq!(
4590 *events.lock(),
4591 &[
4592 language::BufferEvent::Edited,
4593 language::BufferEvent::DirtyChanged,
4594 language::BufferEvent::Edited,
4595 ],
4596 );
4597 events.lock().clear();
4598
4599 // After restoring the buffer to its previously-saved state,
4600 // the buffer is not considered dirty anymore.
4601 buffer.edit([(1..3, "")], None, cx);
4602 assert!(buffer.text() == "ac");
4603 assert!(!buffer.is_dirty());
4604 });
4605
4606 assert_eq!(
4607 *events.lock(),
4608 &[
4609 language::BufferEvent::Edited,
4610 language::BufferEvent::DirtyChanged
4611 ]
4612 );
4613
4614 // When a file is deleted, it is not considered dirty.
4615 let events = Arc::new(Mutex::new(Vec::new()));
4616 let buffer2 = project
4617 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4618 .await
4619 .unwrap();
4620 buffer2.update(cx, |_, cx| {
4621 cx.subscribe(&buffer2, {
4622 let events = events.clone();
4623 move |_, _, event, _| match event {
4624 BufferEvent::Operation { .. } => {}
4625 _ => events.lock().push(event.clone()),
4626 }
4627 })
4628 .detach();
4629 });
4630
4631 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4632 .await
4633 .unwrap();
4634 cx.executor().run_until_parked();
4635 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4636 assert_eq!(
4637 mem::take(&mut *events.lock()),
4638 &[language::BufferEvent::FileHandleChanged]
4639 );
4640
4641 // Buffer becomes dirty when edited.
4642 buffer2.update(cx, |buffer, cx| {
4643 buffer.edit([(2..3, "")], None, cx);
4644 assert_eq!(buffer.is_dirty(), true);
4645 });
4646 assert_eq!(
4647 mem::take(&mut *events.lock()),
4648 &[
4649 language::BufferEvent::Edited,
4650 language::BufferEvent::DirtyChanged
4651 ]
4652 );
4653
4654 // Buffer becomes clean again when all of its content is removed, because
4655 // the file was deleted.
4656 buffer2.update(cx, |buffer, cx| {
4657 buffer.edit([(0..2, "")], None, cx);
4658 assert_eq!(buffer.is_empty(), true);
4659 assert_eq!(buffer.is_dirty(), false);
4660 });
4661 assert_eq!(
4662 *events.lock(),
4663 &[
4664 language::BufferEvent::Edited,
4665 language::BufferEvent::DirtyChanged
4666 ]
4667 );
4668
4669 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4670 let events = Arc::new(Mutex::new(Vec::new()));
4671 let buffer3 = project
4672 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4673 .await
4674 .unwrap();
4675 buffer3.update(cx, |_, cx| {
4676 cx.subscribe(&buffer3, {
4677 let events = events.clone();
4678 move |_, _, event, _| match event {
4679 BufferEvent::Operation { .. } => {}
4680 _ => events.lock().push(event.clone()),
4681 }
4682 })
4683 .detach();
4684 });
4685
4686 buffer3.update(cx, |buffer, cx| {
4687 buffer.edit([(0..0, "x")], None, cx);
4688 });
4689 events.lock().clear();
4690 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4691 .await
4692 .unwrap();
4693 cx.executor().run_until_parked();
4694 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4695 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4696}
4697
4698#[gpui::test]
4699async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4700 init_test(cx);
4701
4702 let (initial_contents, initial_offsets) =
4703 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4704 let fs = FakeFs::new(cx.executor());
4705 fs.insert_tree(
4706 path!("/dir"),
4707 json!({
4708 "the-file": initial_contents,
4709 }),
4710 )
4711 .await;
4712 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4713 let buffer = project
4714 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4715 .await
4716 .unwrap();
4717
4718 let anchors = initial_offsets
4719 .iter()
4720 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4721 .collect::<Vec<_>>();
4722
4723 // Change the file on disk, adding two new lines of text, and removing
4724 // one line.
4725 buffer.update(cx, |buffer, _| {
4726 assert!(!buffer.is_dirty());
4727 assert!(!buffer.has_conflict());
4728 });
4729
4730 let (new_contents, new_offsets) =
4731 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4732 fs.save(
4733 path!("/dir/the-file").as_ref(),
4734 &new_contents.as_str().into(),
4735 LineEnding::Unix,
4736 )
4737 .await
4738 .unwrap();
4739
4740 // Because the buffer was not modified, it is reloaded from disk. Its
4741 // contents are edited according to the diff between the old and new
4742 // file contents.
4743 cx.executor().run_until_parked();
4744 buffer.update(cx, |buffer, _| {
4745 assert_eq!(buffer.text(), new_contents);
4746 assert!(!buffer.is_dirty());
4747 assert!(!buffer.has_conflict());
4748
4749 let anchor_offsets = anchors
4750 .iter()
4751 .map(|anchor| anchor.to_offset(&*buffer))
4752 .collect::<Vec<_>>();
4753 assert_eq!(anchor_offsets, new_offsets);
4754 });
4755
4756 // Modify the buffer
4757 buffer.update(cx, |buffer, cx| {
4758 buffer.edit([(0..0, " ")], None, cx);
4759 assert!(buffer.is_dirty());
4760 assert!(!buffer.has_conflict());
4761 });
4762
4763 // Change the file on disk again, adding blank lines to the beginning.
4764 fs.save(
4765 path!("/dir/the-file").as_ref(),
4766 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4767 LineEnding::Unix,
4768 )
4769 .await
4770 .unwrap();
4771
4772 // Because the buffer is modified, it doesn't reload from disk, but is
4773 // marked as having a conflict.
4774 cx.executor().run_until_parked();
4775 buffer.update(cx, |buffer, _| {
4776 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4777 assert!(buffer.has_conflict());
4778 });
4779}
4780
4781#[gpui::test]
4782async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4783 init_test(cx);
4784
4785 let fs = FakeFs::new(cx.executor());
4786 fs.insert_tree(
4787 path!("/dir"),
4788 json!({
4789 "file1": "a\nb\nc\n",
4790 "file2": "one\r\ntwo\r\nthree\r\n",
4791 }),
4792 )
4793 .await;
4794
4795 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4796 let buffer1 = project
4797 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4798 .await
4799 .unwrap();
4800 let buffer2 = project
4801 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4802 .await
4803 .unwrap();
4804
4805 buffer1.update(cx, |buffer, _| {
4806 assert_eq!(buffer.text(), "a\nb\nc\n");
4807 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4808 });
4809 buffer2.update(cx, |buffer, _| {
4810 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4811 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4812 });
4813
4814 // Change a file's line endings on disk from unix to windows. The buffer's
4815 // state updates correctly.
4816 fs.save(
4817 path!("/dir/file1").as_ref(),
4818 &"aaa\nb\nc\n".into(),
4819 LineEnding::Windows,
4820 )
4821 .await
4822 .unwrap();
4823 cx.executor().run_until_parked();
4824 buffer1.update(cx, |buffer, _| {
4825 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4826 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4827 });
4828
4829 // Save a file with windows line endings. The file is written correctly.
4830 buffer2.update(cx, |buffer, cx| {
4831 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4832 });
4833 project
4834 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4835 .await
4836 .unwrap();
4837 assert_eq!(
4838 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4839 "one\r\ntwo\r\nthree\r\nfour\r\n",
4840 );
4841}
4842
4843#[gpui::test]
4844async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4845 init_test(cx);
4846
4847 let fs = FakeFs::new(cx.executor());
4848 fs.insert_tree(
4849 path!("/dir"),
4850 json!({
4851 "a.rs": "
4852 fn foo(mut v: Vec<usize>) {
4853 for x in &v {
4854 v.push(1);
4855 }
4856 }
4857 "
4858 .unindent(),
4859 }),
4860 )
4861 .await;
4862
4863 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4864 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4865 let buffer = project
4866 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4867 .await
4868 .unwrap();
4869
4870 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4871 let message = lsp::PublishDiagnosticsParams {
4872 uri: buffer_uri.clone(),
4873 diagnostics: vec![
4874 lsp::Diagnostic {
4875 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4876 severity: Some(DiagnosticSeverity::WARNING),
4877 message: "error 1".to_string(),
4878 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4879 location: lsp::Location {
4880 uri: buffer_uri.clone(),
4881 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4882 },
4883 message: "error 1 hint 1".to_string(),
4884 }]),
4885 ..Default::default()
4886 },
4887 lsp::Diagnostic {
4888 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4889 severity: Some(DiagnosticSeverity::HINT),
4890 message: "error 1 hint 1".to_string(),
4891 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4892 location: lsp::Location {
4893 uri: buffer_uri.clone(),
4894 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4895 },
4896 message: "original diagnostic".to_string(),
4897 }]),
4898 ..Default::default()
4899 },
4900 lsp::Diagnostic {
4901 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4902 severity: Some(DiagnosticSeverity::ERROR),
4903 message: "error 2".to_string(),
4904 related_information: Some(vec![
4905 lsp::DiagnosticRelatedInformation {
4906 location: lsp::Location {
4907 uri: buffer_uri.clone(),
4908 range: lsp::Range::new(
4909 lsp::Position::new(1, 13),
4910 lsp::Position::new(1, 15),
4911 ),
4912 },
4913 message: "error 2 hint 1".to_string(),
4914 },
4915 lsp::DiagnosticRelatedInformation {
4916 location: lsp::Location {
4917 uri: buffer_uri.clone(),
4918 range: lsp::Range::new(
4919 lsp::Position::new(1, 13),
4920 lsp::Position::new(1, 15),
4921 ),
4922 },
4923 message: "error 2 hint 2".to_string(),
4924 },
4925 ]),
4926 ..Default::default()
4927 },
4928 lsp::Diagnostic {
4929 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4930 severity: Some(DiagnosticSeverity::HINT),
4931 message: "error 2 hint 1".to_string(),
4932 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4933 location: lsp::Location {
4934 uri: buffer_uri.clone(),
4935 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4936 },
4937 message: "original diagnostic".to_string(),
4938 }]),
4939 ..Default::default()
4940 },
4941 lsp::Diagnostic {
4942 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4943 severity: Some(DiagnosticSeverity::HINT),
4944 message: "error 2 hint 2".to_string(),
4945 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4946 location: lsp::Location {
4947 uri: buffer_uri,
4948 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4949 },
4950 message: "original diagnostic".to_string(),
4951 }]),
4952 ..Default::default()
4953 },
4954 ],
4955 version: None,
4956 };
4957
4958 lsp_store
4959 .update(cx, |lsp_store, cx| {
4960 lsp_store.update_diagnostics(
4961 LanguageServerId(0),
4962 message,
4963 None,
4964 DiagnosticSourceKind::Pushed,
4965 &[],
4966 cx,
4967 )
4968 })
4969 .unwrap();
4970 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
4971
4972 assert_eq!(
4973 buffer
4974 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
4975 .collect::<Vec<_>>(),
4976 &[
4977 DiagnosticEntry {
4978 range: Point::new(1, 8)..Point::new(1, 9),
4979 diagnostic: Diagnostic {
4980 severity: DiagnosticSeverity::WARNING,
4981 message: "error 1".to_string(),
4982 group_id: 1,
4983 is_primary: true,
4984 source_kind: DiagnosticSourceKind::Pushed,
4985 ..Diagnostic::default()
4986 }
4987 },
4988 DiagnosticEntry {
4989 range: Point::new(1, 8)..Point::new(1, 9),
4990 diagnostic: Diagnostic {
4991 severity: DiagnosticSeverity::HINT,
4992 message: "error 1 hint 1".to_string(),
4993 group_id: 1,
4994 is_primary: false,
4995 source_kind: DiagnosticSourceKind::Pushed,
4996 ..Diagnostic::default()
4997 }
4998 },
4999 DiagnosticEntry {
5000 range: Point::new(1, 13)..Point::new(1, 15),
5001 diagnostic: Diagnostic {
5002 severity: DiagnosticSeverity::HINT,
5003 message: "error 2 hint 1".to_string(),
5004 group_id: 0,
5005 is_primary: false,
5006 source_kind: DiagnosticSourceKind::Pushed,
5007 ..Diagnostic::default()
5008 }
5009 },
5010 DiagnosticEntry {
5011 range: Point::new(1, 13)..Point::new(1, 15),
5012 diagnostic: Diagnostic {
5013 severity: DiagnosticSeverity::HINT,
5014 message: "error 2 hint 2".to_string(),
5015 group_id: 0,
5016 is_primary: false,
5017 source_kind: DiagnosticSourceKind::Pushed,
5018 ..Diagnostic::default()
5019 }
5020 },
5021 DiagnosticEntry {
5022 range: Point::new(2, 8)..Point::new(2, 17),
5023 diagnostic: Diagnostic {
5024 severity: DiagnosticSeverity::ERROR,
5025 message: "error 2".to_string(),
5026 group_id: 0,
5027 is_primary: true,
5028 source_kind: DiagnosticSourceKind::Pushed,
5029 ..Diagnostic::default()
5030 }
5031 }
5032 ]
5033 );
5034
5035 assert_eq!(
5036 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5037 &[
5038 DiagnosticEntry {
5039 range: Point::new(1, 13)..Point::new(1, 15),
5040 diagnostic: Diagnostic {
5041 severity: DiagnosticSeverity::HINT,
5042 message: "error 2 hint 1".to_string(),
5043 group_id: 0,
5044 is_primary: false,
5045 source_kind: DiagnosticSourceKind::Pushed,
5046 ..Diagnostic::default()
5047 }
5048 },
5049 DiagnosticEntry {
5050 range: Point::new(1, 13)..Point::new(1, 15),
5051 diagnostic: Diagnostic {
5052 severity: DiagnosticSeverity::HINT,
5053 message: "error 2 hint 2".to_string(),
5054 group_id: 0,
5055 is_primary: false,
5056 source_kind: DiagnosticSourceKind::Pushed,
5057 ..Diagnostic::default()
5058 }
5059 },
5060 DiagnosticEntry {
5061 range: Point::new(2, 8)..Point::new(2, 17),
5062 diagnostic: Diagnostic {
5063 severity: DiagnosticSeverity::ERROR,
5064 message: "error 2".to_string(),
5065 group_id: 0,
5066 is_primary: true,
5067 source_kind: DiagnosticSourceKind::Pushed,
5068 ..Diagnostic::default()
5069 }
5070 }
5071 ]
5072 );
5073
5074 assert_eq!(
5075 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5076 &[
5077 DiagnosticEntry {
5078 range: Point::new(1, 8)..Point::new(1, 9),
5079 diagnostic: Diagnostic {
5080 severity: DiagnosticSeverity::WARNING,
5081 message: "error 1".to_string(),
5082 group_id: 1,
5083 is_primary: true,
5084 source_kind: DiagnosticSourceKind::Pushed,
5085 ..Diagnostic::default()
5086 }
5087 },
5088 DiagnosticEntry {
5089 range: Point::new(1, 8)..Point::new(1, 9),
5090 diagnostic: Diagnostic {
5091 severity: DiagnosticSeverity::HINT,
5092 message: "error 1 hint 1".to_string(),
5093 group_id: 1,
5094 is_primary: false,
5095 source_kind: DiagnosticSourceKind::Pushed,
5096 ..Diagnostic::default()
5097 }
5098 },
5099 ]
5100 );
5101}
5102
5103#[gpui::test]
5104async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5105 init_test(cx);
5106
5107 let fs = FakeFs::new(cx.executor());
5108 fs.insert_tree(
5109 path!("/dir"),
5110 json!({
5111 "one.rs": "const ONE: usize = 1;",
5112 "two": {
5113 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5114 }
5115
5116 }),
5117 )
5118 .await;
5119 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5120
5121 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5122 language_registry.add(rust_lang());
5123 let watched_paths = lsp::FileOperationRegistrationOptions {
5124 filters: vec![
5125 FileOperationFilter {
5126 scheme: Some("file".to_owned()),
5127 pattern: lsp::FileOperationPattern {
5128 glob: "**/*.rs".to_owned(),
5129 matches: Some(lsp::FileOperationPatternKind::File),
5130 options: None,
5131 },
5132 },
5133 FileOperationFilter {
5134 scheme: Some("file".to_owned()),
5135 pattern: lsp::FileOperationPattern {
5136 glob: "**/**".to_owned(),
5137 matches: Some(lsp::FileOperationPatternKind::Folder),
5138 options: None,
5139 },
5140 },
5141 ],
5142 };
5143 let mut fake_servers = language_registry.register_fake_lsp(
5144 "Rust",
5145 FakeLspAdapter {
5146 capabilities: lsp::ServerCapabilities {
5147 workspace: Some(lsp::WorkspaceServerCapabilities {
5148 workspace_folders: None,
5149 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5150 did_rename: Some(watched_paths.clone()),
5151 will_rename: Some(watched_paths),
5152 ..Default::default()
5153 }),
5154 }),
5155 ..Default::default()
5156 },
5157 ..Default::default()
5158 },
5159 );
5160
5161 let _ = project
5162 .update(cx, |project, cx| {
5163 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5164 })
5165 .await
5166 .unwrap();
5167
5168 let fake_server = fake_servers.next().await.unwrap();
5169 let response = project.update(cx, |project, cx| {
5170 let worktree = project.worktrees(cx).next().unwrap();
5171 let entry = worktree
5172 .read(cx)
5173 .entry_for_path(rel_path("one.rs"))
5174 .unwrap();
5175 project.rename_entry(
5176 entry.id,
5177 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5178 cx,
5179 )
5180 });
5181 let expected_edit = lsp::WorkspaceEdit {
5182 changes: None,
5183 document_changes: Some(DocumentChanges::Edits({
5184 vec![TextDocumentEdit {
5185 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5186 range: lsp::Range {
5187 start: lsp::Position {
5188 line: 0,
5189 character: 1,
5190 },
5191 end: lsp::Position {
5192 line: 0,
5193 character: 3,
5194 },
5195 },
5196 new_text: "This is not a drill".to_owned(),
5197 })],
5198 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5199 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5200 version: Some(1337),
5201 },
5202 }]
5203 })),
5204 change_annotations: None,
5205 };
5206 let resolved_workspace_edit = Arc::new(OnceLock::new());
5207 fake_server
5208 .set_request_handler::<WillRenameFiles, _, _>({
5209 let resolved_workspace_edit = resolved_workspace_edit.clone();
5210 let expected_edit = expected_edit.clone();
5211 move |params, _| {
5212 let resolved_workspace_edit = resolved_workspace_edit.clone();
5213 let expected_edit = expected_edit.clone();
5214 async move {
5215 assert_eq!(params.files.len(), 1);
5216 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5217 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5218 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5219 Ok(Some(expected_edit))
5220 }
5221 }
5222 })
5223 .next()
5224 .await
5225 .unwrap();
5226 let _ = response.await.unwrap();
5227 fake_server
5228 .handle_notification::<DidRenameFiles, _>(|params, _| {
5229 assert_eq!(params.files.len(), 1);
5230 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5231 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5232 })
5233 .next()
5234 .await
5235 .unwrap();
5236 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5237}
5238
5239#[gpui::test]
5240async fn test_rename(cx: &mut gpui::TestAppContext) {
5241 // hi
5242 init_test(cx);
5243
5244 let fs = FakeFs::new(cx.executor());
5245 fs.insert_tree(
5246 path!("/dir"),
5247 json!({
5248 "one.rs": "const ONE: usize = 1;",
5249 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5250 }),
5251 )
5252 .await;
5253
5254 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5255
5256 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5257 language_registry.add(rust_lang());
5258 let mut fake_servers = language_registry.register_fake_lsp(
5259 "Rust",
5260 FakeLspAdapter {
5261 capabilities: lsp::ServerCapabilities {
5262 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5263 prepare_provider: Some(true),
5264 work_done_progress_options: Default::default(),
5265 })),
5266 ..Default::default()
5267 },
5268 ..Default::default()
5269 },
5270 );
5271
5272 let (buffer, _handle) = project
5273 .update(cx, |project, cx| {
5274 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5275 })
5276 .await
5277 .unwrap();
5278
5279 let fake_server = fake_servers.next().await.unwrap();
5280
5281 let response = project.update(cx, |project, cx| {
5282 project.prepare_rename(buffer.clone(), 7, cx)
5283 });
5284 fake_server
5285 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5286 assert_eq!(
5287 params.text_document.uri.as_str(),
5288 uri!("file:///dir/one.rs")
5289 );
5290 assert_eq!(params.position, lsp::Position::new(0, 7));
5291 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5292 lsp::Position::new(0, 6),
5293 lsp::Position::new(0, 9),
5294 ))))
5295 })
5296 .next()
5297 .await
5298 .unwrap();
5299 let response = response.await.unwrap();
5300 let PrepareRenameResponse::Success(range) = response else {
5301 panic!("{:?}", response);
5302 };
5303 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5304 assert_eq!(range, 6..9);
5305
5306 let response = project.update(cx, |project, cx| {
5307 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5308 });
5309 fake_server
5310 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5311 assert_eq!(
5312 params.text_document_position.text_document.uri.as_str(),
5313 uri!("file:///dir/one.rs")
5314 );
5315 assert_eq!(
5316 params.text_document_position.position,
5317 lsp::Position::new(0, 7)
5318 );
5319 assert_eq!(params.new_name, "THREE");
5320 Ok(Some(lsp::WorkspaceEdit {
5321 changes: Some(
5322 [
5323 (
5324 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5325 vec![lsp::TextEdit::new(
5326 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5327 "THREE".to_string(),
5328 )],
5329 ),
5330 (
5331 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5332 vec![
5333 lsp::TextEdit::new(
5334 lsp::Range::new(
5335 lsp::Position::new(0, 24),
5336 lsp::Position::new(0, 27),
5337 ),
5338 "THREE".to_string(),
5339 ),
5340 lsp::TextEdit::new(
5341 lsp::Range::new(
5342 lsp::Position::new(0, 35),
5343 lsp::Position::new(0, 38),
5344 ),
5345 "THREE".to_string(),
5346 ),
5347 ],
5348 ),
5349 ]
5350 .into_iter()
5351 .collect(),
5352 ),
5353 ..Default::default()
5354 }))
5355 })
5356 .next()
5357 .await
5358 .unwrap();
5359 let mut transaction = response.await.unwrap().0;
5360 assert_eq!(transaction.len(), 2);
5361 assert_eq!(
5362 transaction
5363 .remove_entry(&buffer)
5364 .unwrap()
5365 .0
5366 .update(cx, |buffer, _| buffer.text()),
5367 "const THREE: usize = 1;"
5368 );
5369 assert_eq!(
5370 transaction
5371 .into_keys()
5372 .next()
5373 .unwrap()
5374 .update(cx, |buffer, _| buffer.text()),
5375 "const TWO: usize = one::THREE + one::THREE;"
5376 );
5377}
5378
5379#[gpui::test]
5380async fn test_search(cx: &mut gpui::TestAppContext) {
5381 init_test(cx);
5382
5383 let fs = FakeFs::new(cx.executor());
5384 fs.insert_tree(
5385 path!("/dir"),
5386 json!({
5387 "one.rs": "const ONE: usize = 1;",
5388 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5389 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5390 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5391 }),
5392 )
5393 .await;
5394 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5395 assert_eq!(
5396 search(
5397 &project,
5398 SearchQuery::text(
5399 "TWO",
5400 false,
5401 true,
5402 false,
5403 Default::default(),
5404 Default::default(),
5405 false,
5406 None
5407 )
5408 .unwrap(),
5409 cx
5410 )
5411 .await
5412 .unwrap(),
5413 HashMap::from_iter([
5414 (path!("dir/two.rs").to_string(), vec![6..9]),
5415 (path!("dir/three.rs").to_string(), vec![37..40])
5416 ])
5417 );
5418
5419 let buffer_4 = project
5420 .update(cx, |project, cx| {
5421 project.open_local_buffer(path!("/dir/four.rs"), cx)
5422 })
5423 .await
5424 .unwrap();
5425 buffer_4.update(cx, |buffer, cx| {
5426 let text = "two::TWO";
5427 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5428 });
5429
5430 assert_eq!(
5431 search(
5432 &project,
5433 SearchQuery::text(
5434 "TWO",
5435 false,
5436 true,
5437 false,
5438 Default::default(),
5439 Default::default(),
5440 false,
5441 None,
5442 )
5443 .unwrap(),
5444 cx
5445 )
5446 .await
5447 .unwrap(),
5448 HashMap::from_iter([
5449 (path!("dir/two.rs").to_string(), vec![6..9]),
5450 (path!("dir/three.rs").to_string(), vec![37..40]),
5451 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5452 ])
5453 );
5454}
5455
5456#[gpui::test]
5457async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5458 init_test(cx);
5459
5460 let search_query = "file";
5461
5462 let fs = FakeFs::new(cx.executor());
5463 fs.insert_tree(
5464 path!("/dir"),
5465 json!({
5466 "one.rs": r#"// Rust file one"#,
5467 "one.ts": r#"// TypeScript file one"#,
5468 "two.rs": r#"// Rust file two"#,
5469 "two.ts": r#"// TypeScript file two"#,
5470 }),
5471 )
5472 .await;
5473 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5474
5475 assert!(
5476 search(
5477 &project,
5478 SearchQuery::text(
5479 search_query,
5480 false,
5481 true,
5482 false,
5483 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5484 Default::default(),
5485 false,
5486 None
5487 )
5488 .unwrap(),
5489 cx
5490 )
5491 .await
5492 .unwrap()
5493 .is_empty(),
5494 "If no inclusions match, no files should be returned"
5495 );
5496
5497 assert_eq!(
5498 search(
5499 &project,
5500 SearchQuery::text(
5501 search_query,
5502 false,
5503 true,
5504 false,
5505 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5506 Default::default(),
5507 false,
5508 None
5509 )
5510 .unwrap(),
5511 cx
5512 )
5513 .await
5514 .unwrap(),
5515 HashMap::from_iter([
5516 (path!("dir/one.rs").to_string(), vec![8..12]),
5517 (path!("dir/two.rs").to_string(), vec![8..12]),
5518 ]),
5519 "Rust only search should give only Rust files"
5520 );
5521
5522 assert_eq!(
5523 search(
5524 &project,
5525 SearchQuery::text(
5526 search_query,
5527 false,
5528 true,
5529 false,
5530 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5531 .unwrap(),
5532 Default::default(),
5533 false,
5534 None,
5535 )
5536 .unwrap(),
5537 cx
5538 )
5539 .await
5540 .unwrap(),
5541 HashMap::from_iter([
5542 (path!("dir/one.ts").to_string(), vec![14..18]),
5543 (path!("dir/two.ts").to_string(), vec![14..18]),
5544 ]),
5545 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5546 );
5547
5548 assert_eq!(
5549 search(
5550 &project,
5551 SearchQuery::text(
5552 search_query,
5553 false,
5554 true,
5555 false,
5556 PathMatcher::new(
5557 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5558 PathStyle::local()
5559 )
5560 .unwrap(),
5561 Default::default(),
5562 false,
5563 None,
5564 )
5565 .unwrap(),
5566 cx
5567 )
5568 .await
5569 .unwrap(),
5570 HashMap::from_iter([
5571 (path!("dir/two.ts").to_string(), vec![14..18]),
5572 (path!("dir/one.rs").to_string(), vec![8..12]),
5573 (path!("dir/one.ts").to_string(), vec![14..18]),
5574 (path!("dir/two.rs").to_string(), vec![8..12]),
5575 ]),
5576 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5577 );
5578}
5579
5580#[gpui::test]
5581async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5582 init_test(cx);
5583
5584 let search_query = "file";
5585
5586 let fs = FakeFs::new(cx.executor());
5587 fs.insert_tree(
5588 path!("/dir"),
5589 json!({
5590 "one.rs": r#"// Rust file one"#,
5591 "one.ts": r#"// TypeScript file one"#,
5592 "two.rs": r#"// Rust file two"#,
5593 "two.ts": r#"// TypeScript file two"#,
5594 }),
5595 )
5596 .await;
5597 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5598
5599 assert_eq!(
5600 search(
5601 &project,
5602 SearchQuery::text(
5603 search_query,
5604 false,
5605 true,
5606 false,
5607 Default::default(),
5608 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5609 false,
5610 None,
5611 )
5612 .unwrap(),
5613 cx
5614 )
5615 .await
5616 .unwrap(),
5617 HashMap::from_iter([
5618 (path!("dir/one.rs").to_string(), vec![8..12]),
5619 (path!("dir/one.ts").to_string(), vec![14..18]),
5620 (path!("dir/two.rs").to_string(), vec![8..12]),
5621 (path!("dir/two.ts").to_string(), vec![14..18]),
5622 ]),
5623 "If no exclusions match, all files should be returned"
5624 );
5625
5626 assert_eq!(
5627 search(
5628 &project,
5629 SearchQuery::text(
5630 search_query,
5631 false,
5632 true,
5633 false,
5634 Default::default(),
5635 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5636 false,
5637 None,
5638 )
5639 .unwrap(),
5640 cx
5641 )
5642 .await
5643 .unwrap(),
5644 HashMap::from_iter([
5645 (path!("dir/one.ts").to_string(), vec![14..18]),
5646 (path!("dir/two.ts").to_string(), vec![14..18]),
5647 ]),
5648 "Rust exclusion search should give only TypeScript files"
5649 );
5650
5651 assert_eq!(
5652 search(
5653 &project,
5654 SearchQuery::text(
5655 search_query,
5656 false,
5657 true,
5658 false,
5659 Default::default(),
5660 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5661 .unwrap(),
5662 false,
5663 None,
5664 )
5665 .unwrap(),
5666 cx
5667 )
5668 .await
5669 .unwrap(),
5670 HashMap::from_iter([
5671 (path!("dir/one.rs").to_string(), vec![8..12]),
5672 (path!("dir/two.rs").to_string(), vec![8..12]),
5673 ]),
5674 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5675 );
5676
5677 assert!(
5678 search(
5679 &project,
5680 SearchQuery::text(
5681 search_query,
5682 false,
5683 true,
5684 false,
5685 Default::default(),
5686 PathMatcher::new(
5687 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5688 PathStyle::local(),
5689 )
5690 .unwrap(),
5691 false,
5692 None,
5693 )
5694 .unwrap(),
5695 cx
5696 )
5697 .await
5698 .unwrap()
5699 .is_empty(),
5700 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5701 );
5702}
5703
5704#[gpui::test]
5705async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5706 init_test(cx);
5707
5708 let search_query = "file";
5709
5710 let fs = FakeFs::new(cx.executor());
5711 fs.insert_tree(
5712 path!("/dir"),
5713 json!({
5714 "one.rs": r#"// Rust file one"#,
5715 "one.ts": r#"// TypeScript file one"#,
5716 "two.rs": r#"// Rust file two"#,
5717 "two.ts": r#"// TypeScript file two"#,
5718 }),
5719 )
5720 .await;
5721
5722 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5723 let path_style = PathStyle::local();
5724 let _buffer = project.update(cx, |project, cx| {
5725 project.create_local_buffer("file", None, false, cx)
5726 });
5727
5728 assert_eq!(
5729 search(
5730 &project,
5731 SearchQuery::text(
5732 search_query,
5733 false,
5734 true,
5735 false,
5736 Default::default(),
5737 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5738 false,
5739 None,
5740 )
5741 .unwrap(),
5742 cx
5743 )
5744 .await
5745 .unwrap(),
5746 HashMap::from_iter([
5747 (path!("dir/one.rs").to_string(), vec![8..12]),
5748 (path!("dir/one.ts").to_string(), vec![14..18]),
5749 (path!("dir/two.rs").to_string(), vec![8..12]),
5750 (path!("dir/two.ts").to_string(), vec![14..18]),
5751 ]),
5752 "If no exclusions match, all files should be returned"
5753 );
5754
5755 assert_eq!(
5756 search(
5757 &project,
5758 SearchQuery::text(
5759 search_query,
5760 false,
5761 true,
5762 false,
5763 Default::default(),
5764 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5765 false,
5766 None,
5767 )
5768 .unwrap(),
5769 cx
5770 )
5771 .await
5772 .unwrap(),
5773 HashMap::from_iter([
5774 (path!("dir/one.ts").to_string(), vec![14..18]),
5775 (path!("dir/two.ts").to_string(), vec![14..18]),
5776 ]),
5777 "Rust exclusion search should give only TypeScript files"
5778 );
5779
5780 assert_eq!(
5781 search(
5782 &project,
5783 SearchQuery::text(
5784 search_query,
5785 false,
5786 true,
5787 false,
5788 Default::default(),
5789 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5790 false,
5791 None,
5792 )
5793 .unwrap(),
5794 cx
5795 )
5796 .await
5797 .unwrap(),
5798 HashMap::from_iter([
5799 (path!("dir/one.rs").to_string(), vec![8..12]),
5800 (path!("dir/two.rs").to_string(), vec![8..12]),
5801 ]),
5802 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5803 );
5804
5805 assert!(
5806 search(
5807 &project,
5808 SearchQuery::text(
5809 search_query,
5810 false,
5811 true,
5812 false,
5813 Default::default(),
5814 PathMatcher::new(
5815 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5816 PathStyle::local(),
5817 )
5818 .unwrap(),
5819 false,
5820 None,
5821 )
5822 .unwrap(),
5823 cx
5824 )
5825 .await
5826 .unwrap()
5827 .is_empty(),
5828 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5829 );
5830}
5831
5832#[gpui::test]
5833async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5834 init_test(cx);
5835
5836 let search_query = "file";
5837
5838 let fs = FakeFs::new(cx.executor());
5839 fs.insert_tree(
5840 path!("/dir"),
5841 json!({
5842 "one.rs": r#"// Rust file one"#,
5843 "one.ts": r#"// TypeScript file one"#,
5844 "two.rs": r#"// Rust file two"#,
5845 "two.ts": r#"// TypeScript file two"#,
5846 }),
5847 )
5848 .await;
5849 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5850 assert!(
5851 search(
5852 &project,
5853 SearchQuery::text(
5854 search_query,
5855 false,
5856 true,
5857 false,
5858 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5859 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5860 false,
5861 None,
5862 )
5863 .unwrap(),
5864 cx
5865 )
5866 .await
5867 .unwrap()
5868 .is_empty(),
5869 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5870 );
5871
5872 assert!(
5873 search(
5874 &project,
5875 SearchQuery::text(
5876 search_query,
5877 false,
5878 true,
5879 false,
5880 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5881 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5882 false,
5883 None,
5884 )
5885 .unwrap(),
5886 cx
5887 )
5888 .await
5889 .unwrap()
5890 .is_empty(),
5891 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5892 );
5893
5894 assert!(
5895 search(
5896 &project,
5897 SearchQuery::text(
5898 search_query,
5899 false,
5900 true,
5901 false,
5902 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5903 .unwrap(),
5904 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5905 .unwrap(),
5906 false,
5907 None,
5908 )
5909 .unwrap(),
5910 cx
5911 )
5912 .await
5913 .unwrap()
5914 .is_empty(),
5915 "Non-matching inclusions and exclusions should not change that."
5916 );
5917
5918 assert_eq!(
5919 search(
5920 &project,
5921 SearchQuery::text(
5922 search_query,
5923 false,
5924 true,
5925 false,
5926 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5927 .unwrap(),
5928 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
5929 .unwrap(),
5930 false,
5931 None,
5932 )
5933 .unwrap(),
5934 cx
5935 )
5936 .await
5937 .unwrap(),
5938 HashMap::from_iter([
5939 (path!("dir/one.ts").to_string(), vec![14..18]),
5940 (path!("dir/two.ts").to_string(), vec![14..18]),
5941 ]),
5942 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
5943 );
5944}
5945
5946#[gpui::test]
5947async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
5948 init_test(cx);
5949
5950 let fs = FakeFs::new(cx.executor());
5951 fs.insert_tree(
5952 path!("/worktree-a"),
5953 json!({
5954 "haystack.rs": r#"// NEEDLE"#,
5955 "haystack.ts": r#"// NEEDLE"#,
5956 }),
5957 )
5958 .await;
5959 fs.insert_tree(
5960 path!("/worktree-b"),
5961 json!({
5962 "haystack.rs": r#"// NEEDLE"#,
5963 "haystack.ts": r#"// NEEDLE"#,
5964 }),
5965 )
5966 .await;
5967
5968 let path_style = PathStyle::local();
5969 let project = Project::test(
5970 fs.clone(),
5971 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
5972 cx,
5973 )
5974 .await;
5975
5976 assert_eq!(
5977 search(
5978 &project,
5979 SearchQuery::text(
5980 "NEEDLE",
5981 false,
5982 true,
5983 false,
5984 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
5985 Default::default(),
5986 true,
5987 None,
5988 )
5989 .unwrap(),
5990 cx
5991 )
5992 .await
5993 .unwrap(),
5994 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
5995 "should only return results from included worktree"
5996 );
5997 assert_eq!(
5998 search(
5999 &project,
6000 SearchQuery::text(
6001 "NEEDLE",
6002 false,
6003 true,
6004 false,
6005 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6006 Default::default(),
6007 true,
6008 None,
6009 )
6010 .unwrap(),
6011 cx
6012 )
6013 .await
6014 .unwrap(),
6015 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6016 "should only return results from included worktree"
6017 );
6018
6019 assert_eq!(
6020 search(
6021 &project,
6022 SearchQuery::text(
6023 "NEEDLE",
6024 false,
6025 true,
6026 false,
6027 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6028 Default::default(),
6029 false,
6030 None,
6031 )
6032 .unwrap(),
6033 cx
6034 )
6035 .await
6036 .unwrap(),
6037 HashMap::from_iter([
6038 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6039 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6040 ]),
6041 "should return results from both worktrees"
6042 );
6043}
6044
6045#[gpui::test]
6046async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6047 init_test(cx);
6048
6049 let fs = FakeFs::new(cx.background_executor.clone());
6050 fs.insert_tree(
6051 path!("/dir"),
6052 json!({
6053 ".git": {},
6054 ".gitignore": "**/target\n/node_modules\n",
6055 "target": {
6056 "index.txt": "index_key:index_value"
6057 },
6058 "node_modules": {
6059 "eslint": {
6060 "index.ts": "const eslint_key = 'eslint value'",
6061 "package.json": r#"{ "some_key": "some value" }"#,
6062 },
6063 "prettier": {
6064 "index.ts": "const prettier_key = 'prettier value'",
6065 "package.json": r#"{ "other_key": "other value" }"#,
6066 },
6067 },
6068 "package.json": r#"{ "main_key": "main value" }"#,
6069 }),
6070 )
6071 .await;
6072 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6073
6074 let query = "key";
6075 assert_eq!(
6076 search(
6077 &project,
6078 SearchQuery::text(
6079 query,
6080 false,
6081 false,
6082 false,
6083 Default::default(),
6084 Default::default(),
6085 false,
6086 None,
6087 )
6088 .unwrap(),
6089 cx
6090 )
6091 .await
6092 .unwrap(),
6093 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6094 "Only one non-ignored file should have the query"
6095 );
6096
6097 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6098 let path_style = PathStyle::local();
6099 assert_eq!(
6100 search(
6101 &project,
6102 SearchQuery::text(
6103 query,
6104 false,
6105 false,
6106 true,
6107 Default::default(),
6108 Default::default(),
6109 false,
6110 None,
6111 )
6112 .unwrap(),
6113 cx
6114 )
6115 .await
6116 .unwrap(),
6117 HashMap::from_iter([
6118 (path!("dir/package.json").to_string(), vec![8..11]),
6119 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6120 (
6121 path!("dir/node_modules/prettier/package.json").to_string(),
6122 vec![9..12]
6123 ),
6124 (
6125 path!("dir/node_modules/prettier/index.ts").to_string(),
6126 vec![15..18]
6127 ),
6128 (
6129 path!("dir/node_modules/eslint/index.ts").to_string(),
6130 vec![13..16]
6131 ),
6132 (
6133 path!("dir/node_modules/eslint/package.json").to_string(),
6134 vec![8..11]
6135 ),
6136 ]),
6137 "Unrestricted search with ignored directories should find every file with the query"
6138 );
6139
6140 let files_to_include =
6141 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6142 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6143 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6144 assert_eq!(
6145 search(
6146 &project,
6147 SearchQuery::text(
6148 query,
6149 false,
6150 false,
6151 true,
6152 files_to_include,
6153 files_to_exclude,
6154 false,
6155 None,
6156 )
6157 .unwrap(),
6158 cx
6159 )
6160 .await
6161 .unwrap(),
6162 HashMap::from_iter([(
6163 path!("dir/node_modules/prettier/package.json").to_string(),
6164 vec![9..12]
6165 )]),
6166 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6167 );
6168}
6169
6170#[gpui::test]
6171async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6172 init_test(cx);
6173
6174 let fs = FakeFs::new(cx.executor());
6175 fs.insert_tree(
6176 path!("/dir"),
6177 json!({
6178 "one.rs": "// ПРИВЕТ? привет!",
6179 "two.rs": "// ПРИВЕТ.",
6180 "three.rs": "// привет",
6181 }),
6182 )
6183 .await;
6184 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6185 let unicode_case_sensitive_query = SearchQuery::text(
6186 "привет",
6187 false,
6188 true,
6189 false,
6190 Default::default(),
6191 Default::default(),
6192 false,
6193 None,
6194 );
6195 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6196 assert_eq!(
6197 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6198 .await
6199 .unwrap(),
6200 HashMap::from_iter([
6201 (path!("dir/one.rs").to_string(), vec![17..29]),
6202 (path!("dir/three.rs").to_string(), vec![3..15]),
6203 ])
6204 );
6205
6206 let unicode_case_insensitive_query = SearchQuery::text(
6207 "привет",
6208 false,
6209 false,
6210 false,
6211 Default::default(),
6212 Default::default(),
6213 false,
6214 None,
6215 );
6216 assert_matches!(
6217 unicode_case_insensitive_query,
6218 Ok(SearchQuery::Regex { .. })
6219 );
6220 assert_eq!(
6221 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6222 .await
6223 .unwrap(),
6224 HashMap::from_iter([
6225 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6226 (path!("dir/two.rs").to_string(), vec![3..15]),
6227 (path!("dir/three.rs").to_string(), vec![3..15]),
6228 ])
6229 );
6230
6231 assert_eq!(
6232 search(
6233 &project,
6234 SearchQuery::text(
6235 "привет.",
6236 false,
6237 false,
6238 false,
6239 Default::default(),
6240 Default::default(),
6241 false,
6242 None,
6243 )
6244 .unwrap(),
6245 cx
6246 )
6247 .await
6248 .unwrap(),
6249 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6250 );
6251}
6252
6253#[gpui::test]
6254async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6255 init_test(cx);
6256
6257 let fs = FakeFs::new(cx.executor());
6258 fs.insert_tree(
6259 "/one/two",
6260 json!({
6261 "three": {
6262 "a.txt": "",
6263 "four": {}
6264 },
6265 "c.rs": ""
6266 }),
6267 )
6268 .await;
6269
6270 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6271 project
6272 .update(cx, |project, cx| {
6273 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6274 project.create_entry((id, rel_path("b..")), true, cx)
6275 })
6276 .await
6277 .unwrap()
6278 .into_included()
6279 .unwrap();
6280
6281 assert_eq!(
6282 fs.paths(true),
6283 vec![
6284 PathBuf::from(path!("/")),
6285 PathBuf::from(path!("/one")),
6286 PathBuf::from(path!("/one/two")),
6287 PathBuf::from(path!("/one/two/c.rs")),
6288 PathBuf::from(path!("/one/two/three")),
6289 PathBuf::from(path!("/one/two/three/a.txt")),
6290 PathBuf::from(path!("/one/two/three/b..")),
6291 PathBuf::from(path!("/one/two/three/four")),
6292 ]
6293 );
6294}
6295
6296#[gpui::test]
6297async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6298 init_test(cx);
6299
6300 let fs = FakeFs::new(cx.executor());
6301 fs.insert_tree(
6302 path!("/dir"),
6303 json!({
6304 "a.tsx": "a",
6305 }),
6306 )
6307 .await;
6308
6309 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6310
6311 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6312 language_registry.add(tsx_lang());
6313 let language_server_names = [
6314 "TypeScriptServer",
6315 "TailwindServer",
6316 "ESLintServer",
6317 "NoHoverCapabilitiesServer",
6318 ];
6319 let mut language_servers = [
6320 language_registry.register_fake_lsp(
6321 "tsx",
6322 FakeLspAdapter {
6323 name: language_server_names[0],
6324 capabilities: lsp::ServerCapabilities {
6325 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6326 ..lsp::ServerCapabilities::default()
6327 },
6328 ..FakeLspAdapter::default()
6329 },
6330 ),
6331 language_registry.register_fake_lsp(
6332 "tsx",
6333 FakeLspAdapter {
6334 name: language_server_names[1],
6335 capabilities: lsp::ServerCapabilities {
6336 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6337 ..lsp::ServerCapabilities::default()
6338 },
6339 ..FakeLspAdapter::default()
6340 },
6341 ),
6342 language_registry.register_fake_lsp(
6343 "tsx",
6344 FakeLspAdapter {
6345 name: language_server_names[2],
6346 capabilities: lsp::ServerCapabilities {
6347 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6348 ..lsp::ServerCapabilities::default()
6349 },
6350 ..FakeLspAdapter::default()
6351 },
6352 ),
6353 language_registry.register_fake_lsp(
6354 "tsx",
6355 FakeLspAdapter {
6356 name: language_server_names[3],
6357 capabilities: lsp::ServerCapabilities {
6358 hover_provider: None,
6359 ..lsp::ServerCapabilities::default()
6360 },
6361 ..FakeLspAdapter::default()
6362 },
6363 ),
6364 ];
6365
6366 let (buffer, _handle) = project
6367 .update(cx, |p, cx| {
6368 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6369 })
6370 .await
6371 .unwrap();
6372 cx.executor().run_until_parked();
6373
6374 let mut servers_with_hover_requests = HashMap::default();
6375 for i in 0..language_server_names.len() {
6376 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6377 panic!(
6378 "Failed to get language server #{i} with name {}",
6379 &language_server_names[i]
6380 )
6381 });
6382 let new_server_name = new_server.server.name();
6383 assert!(
6384 !servers_with_hover_requests.contains_key(&new_server_name),
6385 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6386 );
6387 match new_server_name.as_ref() {
6388 "TailwindServer" | "TypeScriptServer" => {
6389 servers_with_hover_requests.insert(
6390 new_server_name.clone(),
6391 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6392 move |_, _| {
6393 let name = new_server_name.clone();
6394 async move {
6395 Ok(Some(lsp::Hover {
6396 contents: lsp::HoverContents::Scalar(
6397 lsp::MarkedString::String(format!("{name} hover")),
6398 ),
6399 range: None,
6400 }))
6401 }
6402 },
6403 ),
6404 );
6405 }
6406 "ESLintServer" => {
6407 servers_with_hover_requests.insert(
6408 new_server_name,
6409 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6410 |_, _| async move { Ok(None) },
6411 ),
6412 );
6413 }
6414 "NoHoverCapabilitiesServer" => {
6415 let _never_handled = new_server
6416 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6417 panic!(
6418 "Should not call for hovers server with no corresponding capabilities"
6419 )
6420 });
6421 }
6422 unexpected => panic!("Unexpected server name: {unexpected}"),
6423 }
6424 }
6425
6426 let hover_task = project.update(cx, |project, cx| {
6427 project.hover(&buffer, Point::new(0, 0), cx)
6428 });
6429 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6430 |mut hover_request| async move {
6431 hover_request
6432 .next()
6433 .await
6434 .expect("All hover requests should have been triggered")
6435 },
6436 ))
6437 .await;
6438 assert_eq!(
6439 vec!["TailwindServer hover", "TypeScriptServer hover"],
6440 hover_task
6441 .await
6442 .into_iter()
6443 .flatten()
6444 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6445 .sorted()
6446 .collect::<Vec<_>>(),
6447 "Should receive hover responses from all related servers with hover capabilities"
6448 );
6449}
6450
6451#[gpui::test]
6452async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6453 init_test(cx);
6454
6455 let fs = FakeFs::new(cx.executor());
6456 fs.insert_tree(
6457 path!("/dir"),
6458 json!({
6459 "a.ts": "a",
6460 }),
6461 )
6462 .await;
6463
6464 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6465
6466 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6467 language_registry.add(typescript_lang());
6468 let mut fake_language_servers = language_registry.register_fake_lsp(
6469 "TypeScript",
6470 FakeLspAdapter {
6471 capabilities: lsp::ServerCapabilities {
6472 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6473 ..lsp::ServerCapabilities::default()
6474 },
6475 ..FakeLspAdapter::default()
6476 },
6477 );
6478
6479 let (buffer, _handle) = project
6480 .update(cx, |p, cx| {
6481 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6482 })
6483 .await
6484 .unwrap();
6485 cx.executor().run_until_parked();
6486
6487 let fake_server = fake_language_servers
6488 .next()
6489 .await
6490 .expect("failed to get the language server");
6491
6492 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6493 move |_, _| async move {
6494 Ok(Some(lsp::Hover {
6495 contents: lsp::HoverContents::Array(vec![
6496 lsp::MarkedString::String("".to_string()),
6497 lsp::MarkedString::String(" ".to_string()),
6498 lsp::MarkedString::String("\n\n\n".to_string()),
6499 ]),
6500 range: None,
6501 }))
6502 },
6503 );
6504
6505 let hover_task = project.update(cx, |project, cx| {
6506 project.hover(&buffer, Point::new(0, 0), cx)
6507 });
6508 let () = request_handled
6509 .next()
6510 .await
6511 .expect("All hover requests should have been triggered");
6512 assert_eq!(
6513 Vec::<String>::new(),
6514 hover_task
6515 .await
6516 .into_iter()
6517 .flatten()
6518 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6519 .sorted()
6520 .collect::<Vec<_>>(),
6521 "Empty hover parts should be ignored"
6522 );
6523}
6524
6525#[gpui::test]
6526async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6527 init_test(cx);
6528
6529 let fs = FakeFs::new(cx.executor());
6530 fs.insert_tree(
6531 path!("/dir"),
6532 json!({
6533 "a.ts": "a",
6534 }),
6535 )
6536 .await;
6537
6538 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6539
6540 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6541 language_registry.add(typescript_lang());
6542 let mut fake_language_servers = language_registry.register_fake_lsp(
6543 "TypeScript",
6544 FakeLspAdapter {
6545 capabilities: lsp::ServerCapabilities {
6546 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6547 ..lsp::ServerCapabilities::default()
6548 },
6549 ..FakeLspAdapter::default()
6550 },
6551 );
6552
6553 let (buffer, _handle) = project
6554 .update(cx, |p, cx| {
6555 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6556 })
6557 .await
6558 .unwrap();
6559 cx.executor().run_until_parked();
6560
6561 let fake_server = fake_language_servers
6562 .next()
6563 .await
6564 .expect("failed to get the language server");
6565
6566 let mut request_handled = fake_server
6567 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6568 Ok(Some(vec![
6569 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6570 title: "organize imports".to_string(),
6571 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6572 ..lsp::CodeAction::default()
6573 }),
6574 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6575 title: "fix code".to_string(),
6576 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6577 ..lsp::CodeAction::default()
6578 }),
6579 ]))
6580 });
6581
6582 let code_actions_task = project.update(cx, |project, cx| {
6583 project.code_actions(
6584 &buffer,
6585 0..buffer.read(cx).len(),
6586 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6587 cx,
6588 )
6589 });
6590
6591 let () = request_handled
6592 .next()
6593 .await
6594 .expect("The code action request should have been triggered");
6595
6596 let code_actions = code_actions_task.await.unwrap().unwrap();
6597 assert_eq!(code_actions.len(), 1);
6598 assert_eq!(
6599 code_actions[0].lsp_action.action_kind(),
6600 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6601 );
6602}
6603
6604#[gpui::test]
6605async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6606 init_test(cx);
6607
6608 let fs = FakeFs::new(cx.executor());
6609 fs.insert_tree(
6610 path!("/dir"),
6611 json!({
6612 "a.tsx": "a",
6613 }),
6614 )
6615 .await;
6616
6617 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6618
6619 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6620 language_registry.add(tsx_lang());
6621 let language_server_names = [
6622 "TypeScriptServer",
6623 "TailwindServer",
6624 "ESLintServer",
6625 "NoActionsCapabilitiesServer",
6626 ];
6627
6628 let mut language_server_rxs = [
6629 language_registry.register_fake_lsp(
6630 "tsx",
6631 FakeLspAdapter {
6632 name: language_server_names[0],
6633 capabilities: lsp::ServerCapabilities {
6634 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6635 ..lsp::ServerCapabilities::default()
6636 },
6637 ..FakeLspAdapter::default()
6638 },
6639 ),
6640 language_registry.register_fake_lsp(
6641 "tsx",
6642 FakeLspAdapter {
6643 name: language_server_names[1],
6644 capabilities: lsp::ServerCapabilities {
6645 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6646 ..lsp::ServerCapabilities::default()
6647 },
6648 ..FakeLspAdapter::default()
6649 },
6650 ),
6651 language_registry.register_fake_lsp(
6652 "tsx",
6653 FakeLspAdapter {
6654 name: language_server_names[2],
6655 capabilities: lsp::ServerCapabilities {
6656 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6657 ..lsp::ServerCapabilities::default()
6658 },
6659 ..FakeLspAdapter::default()
6660 },
6661 ),
6662 language_registry.register_fake_lsp(
6663 "tsx",
6664 FakeLspAdapter {
6665 name: language_server_names[3],
6666 capabilities: lsp::ServerCapabilities {
6667 code_action_provider: None,
6668 ..lsp::ServerCapabilities::default()
6669 },
6670 ..FakeLspAdapter::default()
6671 },
6672 ),
6673 ];
6674
6675 let (buffer, _handle) = project
6676 .update(cx, |p, cx| {
6677 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6678 })
6679 .await
6680 .unwrap();
6681 cx.executor().run_until_parked();
6682
6683 let mut servers_with_actions_requests = HashMap::default();
6684 for i in 0..language_server_names.len() {
6685 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6686 panic!(
6687 "Failed to get language server #{i} with name {}",
6688 &language_server_names[i]
6689 )
6690 });
6691 let new_server_name = new_server.server.name();
6692
6693 assert!(
6694 !servers_with_actions_requests.contains_key(&new_server_name),
6695 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6696 );
6697 match new_server_name.0.as_ref() {
6698 "TailwindServer" | "TypeScriptServer" => {
6699 servers_with_actions_requests.insert(
6700 new_server_name.clone(),
6701 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6702 move |_, _| {
6703 let name = new_server_name.clone();
6704 async move {
6705 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6706 lsp::CodeAction {
6707 title: format!("{name} code action"),
6708 ..lsp::CodeAction::default()
6709 },
6710 )]))
6711 }
6712 },
6713 ),
6714 );
6715 }
6716 "ESLintServer" => {
6717 servers_with_actions_requests.insert(
6718 new_server_name,
6719 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6720 |_, _| async move { Ok(None) },
6721 ),
6722 );
6723 }
6724 "NoActionsCapabilitiesServer" => {
6725 let _never_handled = new_server
6726 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6727 panic!(
6728 "Should not call for code actions server with no corresponding capabilities"
6729 )
6730 });
6731 }
6732 unexpected => panic!("Unexpected server name: {unexpected}"),
6733 }
6734 }
6735
6736 let code_actions_task = project.update(cx, |project, cx| {
6737 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6738 });
6739
6740 // cx.run_until_parked();
6741 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6742 |mut code_actions_request| async move {
6743 code_actions_request
6744 .next()
6745 .await
6746 .expect("All code actions requests should have been triggered")
6747 },
6748 ))
6749 .await;
6750 assert_eq!(
6751 vec!["TailwindServer code action", "TypeScriptServer code action"],
6752 code_actions_task
6753 .await
6754 .unwrap()
6755 .unwrap()
6756 .into_iter()
6757 .map(|code_action| code_action.lsp_action.title().to_owned())
6758 .sorted()
6759 .collect::<Vec<_>>(),
6760 "Should receive code actions responses from all related servers with hover capabilities"
6761 );
6762}
6763
6764#[gpui::test]
6765async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6766 init_test(cx);
6767
6768 let fs = FakeFs::new(cx.executor());
6769 fs.insert_tree(
6770 "/dir",
6771 json!({
6772 "a.rs": "let a = 1;",
6773 "b.rs": "let b = 2;",
6774 "c.rs": "let c = 2;",
6775 }),
6776 )
6777 .await;
6778
6779 let project = Project::test(
6780 fs,
6781 [
6782 "/dir/a.rs".as_ref(),
6783 "/dir/b.rs".as_ref(),
6784 "/dir/c.rs".as_ref(),
6785 ],
6786 cx,
6787 )
6788 .await;
6789
6790 // check the initial state and get the worktrees
6791 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6792 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6793 assert_eq!(worktrees.len(), 3);
6794
6795 let worktree_a = worktrees[0].read(cx);
6796 let worktree_b = worktrees[1].read(cx);
6797 let worktree_c = worktrees[2].read(cx);
6798
6799 // check they start in the right order
6800 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6801 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6802 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6803
6804 (
6805 worktrees[0].clone(),
6806 worktrees[1].clone(),
6807 worktrees[2].clone(),
6808 )
6809 });
6810
6811 // move first worktree to after the second
6812 // [a, b, c] -> [b, a, c]
6813 project
6814 .update(cx, |project, cx| {
6815 let first = worktree_a.read(cx);
6816 let second = worktree_b.read(cx);
6817 project.move_worktree(first.id(), second.id(), cx)
6818 })
6819 .expect("moving first after second");
6820
6821 // check the state after moving
6822 project.update(cx, |project, cx| {
6823 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6824 assert_eq!(worktrees.len(), 3);
6825
6826 let first = worktrees[0].read(cx);
6827 let second = worktrees[1].read(cx);
6828 let third = worktrees[2].read(cx);
6829
6830 // check they are now in the right order
6831 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6832 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6833 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6834 });
6835
6836 // move the second worktree to before the first
6837 // [b, a, c] -> [a, b, c]
6838 project
6839 .update(cx, |project, cx| {
6840 let second = worktree_a.read(cx);
6841 let first = worktree_b.read(cx);
6842 project.move_worktree(first.id(), second.id(), cx)
6843 })
6844 .expect("moving second before first");
6845
6846 // check the state after moving
6847 project.update(cx, |project, cx| {
6848 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6849 assert_eq!(worktrees.len(), 3);
6850
6851 let first = worktrees[0].read(cx);
6852 let second = worktrees[1].read(cx);
6853 let third = worktrees[2].read(cx);
6854
6855 // check they are now in the right order
6856 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6857 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6858 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6859 });
6860
6861 // move the second worktree to after the third
6862 // [a, b, c] -> [a, c, b]
6863 project
6864 .update(cx, |project, cx| {
6865 let second = worktree_b.read(cx);
6866 let third = worktree_c.read(cx);
6867 project.move_worktree(second.id(), third.id(), cx)
6868 })
6869 .expect("moving second after third");
6870
6871 // check the state after moving
6872 project.update(cx, |project, cx| {
6873 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6874 assert_eq!(worktrees.len(), 3);
6875
6876 let first = worktrees[0].read(cx);
6877 let second = worktrees[1].read(cx);
6878 let third = worktrees[2].read(cx);
6879
6880 // check they are now in the right order
6881 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6882 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6883 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6884 });
6885
6886 // move the third worktree to before the second
6887 // [a, c, b] -> [a, b, c]
6888 project
6889 .update(cx, |project, cx| {
6890 let third = worktree_c.read(cx);
6891 let second = worktree_b.read(cx);
6892 project.move_worktree(third.id(), second.id(), cx)
6893 })
6894 .expect("moving third before second");
6895
6896 // check the state after moving
6897 project.update(cx, |project, cx| {
6898 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6899 assert_eq!(worktrees.len(), 3);
6900
6901 let first = worktrees[0].read(cx);
6902 let second = worktrees[1].read(cx);
6903 let third = worktrees[2].read(cx);
6904
6905 // check they are now in the right order
6906 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6907 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6908 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6909 });
6910
6911 // move the first worktree to after the third
6912 // [a, b, c] -> [b, c, a]
6913 project
6914 .update(cx, |project, cx| {
6915 let first = worktree_a.read(cx);
6916 let third = worktree_c.read(cx);
6917 project.move_worktree(first.id(), third.id(), cx)
6918 })
6919 .expect("moving first after third");
6920
6921 // check the state after moving
6922 project.update(cx, |project, cx| {
6923 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6924 assert_eq!(worktrees.len(), 3);
6925
6926 let first = worktrees[0].read(cx);
6927 let second = worktrees[1].read(cx);
6928 let third = worktrees[2].read(cx);
6929
6930 // check they are now in the right order
6931 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6932 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6933 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
6934 });
6935
6936 // move the third worktree to before the first
6937 // [b, c, a] -> [a, b, c]
6938 project
6939 .update(cx, |project, cx| {
6940 let third = worktree_a.read(cx);
6941 let first = worktree_b.read(cx);
6942 project.move_worktree(third.id(), first.id(), cx)
6943 })
6944 .expect("moving third before first");
6945
6946 // check the state after moving
6947 project.update(cx, |project, cx| {
6948 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6949 assert_eq!(worktrees.len(), 3);
6950
6951 let first = worktrees[0].read(cx);
6952 let second = worktrees[1].read(cx);
6953 let third = worktrees[2].read(cx);
6954
6955 // check they are now in the right order
6956 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6957 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6958 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6959 });
6960}
6961
6962#[gpui::test]
6963async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
6964 init_test(cx);
6965
6966 let staged_contents = r#"
6967 fn main() {
6968 println!("hello world");
6969 }
6970 "#
6971 .unindent();
6972 let file_contents = r#"
6973 // print goodbye
6974 fn main() {
6975 println!("goodbye world");
6976 }
6977 "#
6978 .unindent();
6979
6980 let fs = FakeFs::new(cx.background_executor.clone());
6981 fs.insert_tree(
6982 "/dir",
6983 json!({
6984 ".git": {},
6985 "src": {
6986 "main.rs": file_contents,
6987 }
6988 }),
6989 )
6990 .await;
6991
6992 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
6993
6994 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
6995
6996 let buffer = project
6997 .update(cx, |project, cx| {
6998 project.open_local_buffer("/dir/src/main.rs", cx)
6999 })
7000 .await
7001 .unwrap();
7002 let unstaged_diff = project
7003 .update(cx, |project, cx| {
7004 project.open_unstaged_diff(buffer.clone(), cx)
7005 })
7006 .await
7007 .unwrap();
7008
7009 cx.run_until_parked();
7010 unstaged_diff.update(cx, |unstaged_diff, cx| {
7011 let snapshot = buffer.read(cx).snapshot();
7012 assert_hunks(
7013 unstaged_diff.hunks(&snapshot, cx),
7014 &snapshot,
7015 &unstaged_diff.base_text_string().unwrap(),
7016 &[
7017 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7018 (
7019 2..3,
7020 " println!(\"hello world\");\n",
7021 " println!(\"goodbye world\");\n",
7022 DiffHunkStatus::modified_none(),
7023 ),
7024 ],
7025 );
7026 });
7027
7028 let staged_contents = r#"
7029 // print goodbye
7030 fn main() {
7031 }
7032 "#
7033 .unindent();
7034
7035 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7036
7037 cx.run_until_parked();
7038 unstaged_diff.update(cx, |unstaged_diff, cx| {
7039 let snapshot = buffer.read(cx).snapshot();
7040 assert_hunks(
7041 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7042 &snapshot,
7043 &unstaged_diff.base_text().text(),
7044 &[(
7045 2..3,
7046 "",
7047 " println!(\"goodbye world\");\n",
7048 DiffHunkStatus::added_none(),
7049 )],
7050 );
7051 });
7052}
7053
7054#[gpui::test]
7055async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7056 init_test(cx);
7057
7058 let committed_contents = r#"
7059 fn main() {
7060 println!("hello world");
7061 }
7062 "#
7063 .unindent();
7064 let staged_contents = r#"
7065 fn main() {
7066 println!("goodbye world");
7067 }
7068 "#
7069 .unindent();
7070 let file_contents = r#"
7071 // print goodbye
7072 fn main() {
7073 println!("goodbye world");
7074 }
7075 "#
7076 .unindent();
7077
7078 let fs = FakeFs::new(cx.background_executor.clone());
7079 fs.insert_tree(
7080 "/dir",
7081 json!({
7082 ".git": {},
7083 "src": {
7084 "modification.rs": file_contents,
7085 }
7086 }),
7087 )
7088 .await;
7089
7090 fs.set_head_for_repo(
7091 Path::new("/dir/.git"),
7092 &[
7093 ("src/modification.rs", committed_contents),
7094 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7095 ],
7096 "deadbeef",
7097 );
7098 fs.set_index_for_repo(
7099 Path::new("/dir/.git"),
7100 &[
7101 ("src/modification.rs", staged_contents),
7102 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7103 ],
7104 );
7105
7106 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7107 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7108 let language = rust_lang();
7109 language_registry.add(language.clone());
7110
7111 let buffer_1 = project
7112 .update(cx, |project, cx| {
7113 project.open_local_buffer("/dir/src/modification.rs", cx)
7114 })
7115 .await
7116 .unwrap();
7117 let diff_1 = project
7118 .update(cx, |project, cx| {
7119 project.open_uncommitted_diff(buffer_1.clone(), cx)
7120 })
7121 .await
7122 .unwrap();
7123 diff_1.read_with(cx, |diff, _| {
7124 assert_eq!(diff.base_text().language().cloned(), Some(language))
7125 });
7126 cx.run_until_parked();
7127 diff_1.update(cx, |diff, cx| {
7128 let snapshot = buffer_1.read(cx).snapshot();
7129 assert_hunks(
7130 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7131 &snapshot,
7132 &diff.base_text_string().unwrap(),
7133 &[
7134 (
7135 0..1,
7136 "",
7137 "// print goodbye\n",
7138 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7139 ),
7140 (
7141 2..3,
7142 " println!(\"hello world\");\n",
7143 " println!(\"goodbye world\");\n",
7144 DiffHunkStatus::modified_none(),
7145 ),
7146 ],
7147 );
7148 });
7149
7150 // Reset HEAD to a version that differs from both the buffer and the index.
7151 let committed_contents = r#"
7152 // print goodbye
7153 fn main() {
7154 }
7155 "#
7156 .unindent();
7157 fs.set_head_for_repo(
7158 Path::new("/dir/.git"),
7159 &[
7160 ("src/modification.rs", committed_contents.clone()),
7161 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7162 ],
7163 "deadbeef",
7164 );
7165
7166 // Buffer now has an unstaged hunk.
7167 cx.run_until_parked();
7168 diff_1.update(cx, |diff, cx| {
7169 let snapshot = buffer_1.read(cx).snapshot();
7170 assert_hunks(
7171 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7172 &snapshot,
7173 &diff.base_text().text(),
7174 &[(
7175 2..3,
7176 "",
7177 " println!(\"goodbye world\");\n",
7178 DiffHunkStatus::added_none(),
7179 )],
7180 );
7181 });
7182
7183 // Open a buffer for a file that's been deleted.
7184 let buffer_2 = project
7185 .update(cx, |project, cx| {
7186 project.open_local_buffer("/dir/src/deletion.rs", cx)
7187 })
7188 .await
7189 .unwrap();
7190 let diff_2 = project
7191 .update(cx, |project, cx| {
7192 project.open_uncommitted_diff(buffer_2.clone(), cx)
7193 })
7194 .await
7195 .unwrap();
7196 cx.run_until_parked();
7197 diff_2.update(cx, |diff, cx| {
7198 let snapshot = buffer_2.read(cx).snapshot();
7199 assert_hunks(
7200 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7201 &snapshot,
7202 &diff.base_text_string().unwrap(),
7203 &[(
7204 0..0,
7205 "// the-deleted-contents\n",
7206 "",
7207 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7208 )],
7209 );
7210 });
7211
7212 // Stage the deletion of this file
7213 fs.set_index_for_repo(
7214 Path::new("/dir/.git"),
7215 &[("src/modification.rs", committed_contents.clone())],
7216 );
7217 cx.run_until_parked();
7218 diff_2.update(cx, |diff, cx| {
7219 let snapshot = buffer_2.read(cx).snapshot();
7220 assert_hunks(
7221 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7222 &snapshot,
7223 &diff.base_text_string().unwrap(),
7224 &[(
7225 0..0,
7226 "// the-deleted-contents\n",
7227 "",
7228 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7229 )],
7230 );
7231 });
7232}
7233
7234#[gpui::test]
7235async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7236 use DiffHunkSecondaryStatus::*;
7237 init_test(cx);
7238
7239 let committed_contents = r#"
7240 zero
7241 one
7242 two
7243 three
7244 four
7245 five
7246 "#
7247 .unindent();
7248 let file_contents = r#"
7249 one
7250 TWO
7251 three
7252 FOUR
7253 five
7254 "#
7255 .unindent();
7256
7257 let fs = FakeFs::new(cx.background_executor.clone());
7258 fs.insert_tree(
7259 "/dir",
7260 json!({
7261 ".git": {},
7262 "file.txt": file_contents.clone()
7263 }),
7264 )
7265 .await;
7266
7267 fs.set_head_and_index_for_repo(
7268 path!("/dir/.git").as_ref(),
7269 &[("file.txt", committed_contents.clone())],
7270 );
7271
7272 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7273
7274 let buffer = project
7275 .update(cx, |project, cx| {
7276 project.open_local_buffer("/dir/file.txt", cx)
7277 })
7278 .await
7279 .unwrap();
7280 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7281 let uncommitted_diff = project
7282 .update(cx, |project, cx| {
7283 project.open_uncommitted_diff(buffer.clone(), cx)
7284 })
7285 .await
7286 .unwrap();
7287 let mut diff_events = cx.events(&uncommitted_diff);
7288
7289 // The hunks are initially unstaged.
7290 uncommitted_diff.read_with(cx, |diff, cx| {
7291 assert_hunks(
7292 diff.hunks(&snapshot, cx),
7293 &snapshot,
7294 &diff.base_text_string().unwrap(),
7295 &[
7296 (
7297 0..0,
7298 "zero\n",
7299 "",
7300 DiffHunkStatus::deleted(HasSecondaryHunk),
7301 ),
7302 (
7303 1..2,
7304 "two\n",
7305 "TWO\n",
7306 DiffHunkStatus::modified(HasSecondaryHunk),
7307 ),
7308 (
7309 3..4,
7310 "four\n",
7311 "FOUR\n",
7312 DiffHunkStatus::modified(HasSecondaryHunk),
7313 ),
7314 ],
7315 );
7316 });
7317
7318 // Stage a hunk. It appears as optimistically staged.
7319 uncommitted_diff.update(cx, |diff, cx| {
7320 let range =
7321 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7322 let hunks = diff
7323 .hunks_intersecting_range(range, &snapshot, cx)
7324 .collect::<Vec<_>>();
7325 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7326
7327 assert_hunks(
7328 diff.hunks(&snapshot, cx),
7329 &snapshot,
7330 &diff.base_text_string().unwrap(),
7331 &[
7332 (
7333 0..0,
7334 "zero\n",
7335 "",
7336 DiffHunkStatus::deleted(HasSecondaryHunk),
7337 ),
7338 (
7339 1..2,
7340 "two\n",
7341 "TWO\n",
7342 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7343 ),
7344 (
7345 3..4,
7346 "four\n",
7347 "FOUR\n",
7348 DiffHunkStatus::modified(HasSecondaryHunk),
7349 ),
7350 ],
7351 );
7352 });
7353
7354 // The diff emits a change event for the range of the staged hunk.
7355 assert!(matches!(
7356 diff_events.next().await.unwrap(),
7357 BufferDiffEvent::HunksStagedOrUnstaged(_)
7358 ));
7359 let event = diff_events.next().await.unwrap();
7360 if let BufferDiffEvent::DiffChanged {
7361 changed_range: Some(changed_range),
7362 } = event
7363 {
7364 let changed_range = changed_range.to_point(&snapshot);
7365 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7366 } else {
7367 panic!("Unexpected event {event:?}");
7368 }
7369
7370 // When the write to the index completes, it appears as staged.
7371 cx.run_until_parked();
7372 uncommitted_diff.update(cx, |diff, cx| {
7373 assert_hunks(
7374 diff.hunks(&snapshot, cx),
7375 &snapshot,
7376 &diff.base_text_string().unwrap(),
7377 &[
7378 (
7379 0..0,
7380 "zero\n",
7381 "",
7382 DiffHunkStatus::deleted(HasSecondaryHunk),
7383 ),
7384 (
7385 1..2,
7386 "two\n",
7387 "TWO\n",
7388 DiffHunkStatus::modified(NoSecondaryHunk),
7389 ),
7390 (
7391 3..4,
7392 "four\n",
7393 "FOUR\n",
7394 DiffHunkStatus::modified(HasSecondaryHunk),
7395 ),
7396 ],
7397 );
7398 });
7399
7400 // The diff emits a change event for the changed index text.
7401 let event = diff_events.next().await.unwrap();
7402 if let BufferDiffEvent::DiffChanged {
7403 changed_range: Some(changed_range),
7404 } = event
7405 {
7406 let changed_range = changed_range.to_point(&snapshot);
7407 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7408 } else {
7409 panic!("Unexpected event {event:?}");
7410 }
7411
7412 // Simulate a problem writing to the git index.
7413 fs.set_error_message_for_index_write(
7414 "/dir/.git".as_ref(),
7415 Some("failed to write git index".into()),
7416 );
7417
7418 // Stage another hunk.
7419 uncommitted_diff.update(cx, |diff, cx| {
7420 let range =
7421 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7422 let hunks = diff
7423 .hunks_intersecting_range(range, &snapshot, cx)
7424 .collect::<Vec<_>>();
7425 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7426
7427 assert_hunks(
7428 diff.hunks(&snapshot, cx),
7429 &snapshot,
7430 &diff.base_text_string().unwrap(),
7431 &[
7432 (
7433 0..0,
7434 "zero\n",
7435 "",
7436 DiffHunkStatus::deleted(HasSecondaryHunk),
7437 ),
7438 (
7439 1..2,
7440 "two\n",
7441 "TWO\n",
7442 DiffHunkStatus::modified(NoSecondaryHunk),
7443 ),
7444 (
7445 3..4,
7446 "four\n",
7447 "FOUR\n",
7448 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7449 ),
7450 ],
7451 );
7452 });
7453 assert!(matches!(
7454 diff_events.next().await.unwrap(),
7455 BufferDiffEvent::HunksStagedOrUnstaged(_)
7456 ));
7457 let event = diff_events.next().await.unwrap();
7458 if let BufferDiffEvent::DiffChanged {
7459 changed_range: Some(changed_range),
7460 } = event
7461 {
7462 let changed_range = changed_range.to_point(&snapshot);
7463 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7464 } else {
7465 panic!("Unexpected event {event:?}");
7466 }
7467
7468 // When the write fails, the hunk returns to being unstaged.
7469 cx.run_until_parked();
7470 uncommitted_diff.update(cx, |diff, cx| {
7471 assert_hunks(
7472 diff.hunks(&snapshot, cx),
7473 &snapshot,
7474 &diff.base_text_string().unwrap(),
7475 &[
7476 (
7477 0..0,
7478 "zero\n",
7479 "",
7480 DiffHunkStatus::deleted(HasSecondaryHunk),
7481 ),
7482 (
7483 1..2,
7484 "two\n",
7485 "TWO\n",
7486 DiffHunkStatus::modified(NoSecondaryHunk),
7487 ),
7488 (
7489 3..4,
7490 "four\n",
7491 "FOUR\n",
7492 DiffHunkStatus::modified(HasSecondaryHunk),
7493 ),
7494 ],
7495 );
7496 });
7497
7498 let event = diff_events.next().await.unwrap();
7499 if let BufferDiffEvent::DiffChanged {
7500 changed_range: Some(changed_range),
7501 } = event
7502 {
7503 let changed_range = changed_range.to_point(&snapshot);
7504 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7505 } else {
7506 panic!("Unexpected event {event:?}");
7507 }
7508
7509 // Allow writing to the git index to succeed again.
7510 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7511
7512 // Stage two hunks with separate operations.
7513 uncommitted_diff.update(cx, |diff, cx| {
7514 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7515 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7516 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7517 });
7518
7519 // Both staged hunks appear as pending.
7520 uncommitted_diff.update(cx, |diff, cx| {
7521 assert_hunks(
7522 diff.hunks(&snapshot, cx),
7523 &snapshot,
7524 &diff.base_text_string().unwrap(),
7525 &[
7526 (
7527 0..0,
7528 "zero\n",
7529 "",
7530 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7531 ),
7532 (
7533 1..2,
7534 "two\n",
7535 "TWO\n",
7536 DiffHunkStatus::modified(NoSecondaryHunk),
7537 ),
7538 (
7539 3..4,
7540 "four\n",
7541 "FOUR\n",
7542 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7543 ),
7544 ],
7545 );
7546 });
7547
7548 // Both staging operations take effect.
7549 cx.run_until_parked();
7550 uncommitted_diff.update(cx, |diff, cx| {
7551 assert_hunks(
7552 diff.hunks(&snapshot, cx),
7553 &snapshot,
7554 &diff.base_text_string().unwrap(),
7555 &[
7556 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7557 (
7558 1..2,
7559 "two\n",
7560 "TWO\n",
7561 DiffHunkStatus::modified(NoSecondaryHunk),
7562 ),
7563 (
7564 3..4,
7565 "four\n",
7566 "FOUR\n",
7567 DiffHunkStatus::modified(NoSecondaryHunk),
7568 ),
7569 ],
7570 );
7571 });
7572}
7573
7574#[gpui::test(seeds(340, 472))]
7575async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7576 use DiffHunkSecondaryStatus::*;
7577 init_test(cx);
7578
7579 let committed_contents = r#"
7580 zero
7581 one
7582 two
7583 three
7584 four
7585 five
7586 "#
7587 .unindent();
7588 let file_contents = r#"
7589 one
7590 TWO
7591 three
7592 FOUR
7593 five
7594 "#
7595 .unindent();
7596
7597 let fs = FakeFs::new(cx.background_executor.clone());
7598 fs.insert_tree(
7599 "/dir",
7600 json!({
7601 ".git": {},
7602 "file.txt": file_contents.clone()
7603 }),
7604 )
7605 .await;
7606
7607 fs.set_head_for_repo(
7608 "/dir/.git".as_ref(),
7609 &[("file.txt", committed_contents.clone())],
7610 "deadbeef",
7611 );
7612 fs.set_index_for_repo(
7613 "/dir/.git".as_ref(),
7614 &[("file.txt", committed_contents.clone())],
7615 );
7616
7617 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7618
7619 let buffer = project
7620 .update(cx, |project, cx| {
7621 project.open_local_buffer("/dir/file.txt", cx)
7622 })
7623 .await
7624 .unwrap();
7625 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7626 let uncommitted_diff = project
7627 .update(cx, |project, cx| {
7628 project.open_uncommitted_diff(buffer.clone(), cx)
7629 })
7630 .await
7631 .unwrap();
7632
7633 // The hunks are initially unstaged.
7634 uncommitted_diff.read_with(cx, |diff, cx| {
7635 assert_hunks(
7636 diff.hunks(&snapshot, cx),
7637 &snapshot,
7638 &diff.base_text_string().unwrap(),
7639 &[
7640 (
7641 0..0,
7642 "zero\n",
7643 "",
7644 DiffHunkStatus::deleted(HasSecondaryHunk),
7645 ),
7646 (
7647 1..2,
7648 "two\n",
7649 "TWO\n",
7650 DiffHunkStatus::modified(HasSecondaryHunk),
7651 ),
7652 (
7653 3..4,
7654 "four\n",
7655 "FOUR\n",
7656 DiffHunkStatus::modified(HasSecondaryHunk),
7657 ),
7658 ],
7659 );
7660 });
7661
7662 // Pause IO events
7663 fs.pause_events();
7664
7665 // Stage the first hunk.
7666 uncommitted_diff.update(cx, |diff, cx| {
7667 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7668 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7669 assert_hunks(
7670 diff.hunks(&snapshot, cx),
7671 &snapshot,
7672 &diff.base_text_string().unwrap(),
7673 &[
7674 (
7675 0..0,
7676 "zero\n",
7677 "",
7678 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7679 ),
7680 (
7681 1..2,
7682 "two\n",
7683 "TWO\n",
7684 DiffHunkStatus::modified(HasSecondaryHunk),
7685 ),
7686 (
7687 3..4,
7688 "four\n",
7689 "FOUR\n",
7690 DiffHunkStatus::modified(HasSecondaryHunk),
7691 ),
7692 ],
7693 );
7694 });
7695
7696 // Stage the second hunk *before* receiving the FS event for the first hunk.
7697 cx.run_until_parked();
7698 uncommitted_diff.update(cx, |diff, cx| {
7699 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7700 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7701 assert_hunks(
7702 diff.hunks(&snapshot, cx),
7703 &snapshot,
7704 &diff.base_text_string().unwrap(),
7705 &[
7706 (
7707 0..0,
7708 "zero\n",
7709 "",
7710 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7711 ),
7712 (
7713 1..2,
7714 "two\n",
7715 "TWO\n",
7716 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7717 ),
7718 (
7719 3..4,
7720 "four\n",
7721 "FOUR\n",
7722 DiffHunkStatus::modified(HasSecondaryHunk),
7723 ),
7724 ],
7725 );
7726 });
7727
7728 // Process the FS event for staging the first hunk (second event is still pending).
7729 fs.flush_events(1);
7730 cx.run_until_parked();
7731
7732 // Stage the third hunk before receiving the second FS event.
7733 uncommitted_diff.update(cx, |diff, cx| {
7734 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7735 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7736 });
7737
7738 // Wait for all remaining IO.
7739 cx.run_until_parked();
7740 fs.flush_events(fs.buffered_event_count());
7741
7742 // Now all hunks are staged.
7743 cx.run_until_parked();
7744 uncommitted_diff.update(cx, |diff, cx| {
7745 assert_hunks(
7746 diff.hunks(&snapshot, cx),
7747 &snapshot,
7748 &diff.base_text_string().unwrap(),
7749 &[
7750 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7751 (
7752 1..2,
7753 "two\n",
7754 "TWO\n",
7755 DiffHunkStatus::modified(NoSecondaryHunk),
7756 ),
7757 (
7758 3..4,
7759 "four\n",
7760 "FOUR\n",
7761 DiffHunkStatus::modified(NoSecondaryHunk),
7762 ),
7763 ],
7764 );
7765 });
7766}
7767
7768#[gpui::test(iterations = 25)]
7769async fn test_staging_random_hunks(
7770 mut rng: StdRng,
7771 executor: BackgroundExecutor,
7772 cx: &mut gpui::TestAppContext,
7773) {
7774 let operations = env::var("OPERATIONS")
7775 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7776 .unwrap_or(20);
7777
7778 // Try to induce races between diff recalculation and index writes.
7779 if rng.random_bool(0.5) {
7780 executor.deprioritize(*CALCULATE_DIFF_TASK);
7781 }
7782
7783 use DiffHunkSecondaryStatus::*;
7784 init_test(cx);
7785
7786 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7787 let index_text = committed_text.clone();
7788 let buffer_text = (0..30)
7789 .map(|i| match i % 5 {
7790 0 => format!("line {i} (modified)\n"),
7791 _ => format!("line {i}\n"),
7792 })
7793 .collect::<String>();
7794
7795 let fs = FakeFs::new(cx.background_executor.clone());
7796 fs.insert_tree(
7797 path!("/dir"),
7798 json!({
7799 ".git": {},
7800 "file.txt": buffer_text.clone()
7801 }),
7802 )
7803 .await;
7804 fs.set_head_for_repo(
7805 path!("/dir/.git").as_ref(),
7806 &[("file.txt", committed_text.clone())],
7807 "deadbeef",
7808 );
7809 fs.set_index_for_repo(
7810 path!("/dir/.git").as_ref(),
7811 &[("file.txt", index_text.clone())],
7812 );
7813 let repo = fs
7814 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7815 .unwrap();
7816
7817 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7818 let buffer = project
7819 .update(cx, |project, cx| {
7820 project.open_local_buffer(path!("/dir/file.txt"), cx)
7821 })
7822 .await
7823 .unwrap();
7824 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7825 let uncommitted_diff = project
7826 .update(cx, |project, cx| {
7827 project.open_uncommitted_diff(buffer.clone(), cx)
7828 })
7829 .await
7830 .unwrap();
7831
7832 let mut hunks =
7833 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7834 assert_eq!(hunks.len(), 6);
7835
7836 for _i in 0..operations {
7837 let hunk_ix = rng.random_range(0..hunks.len());
7838 let hunk = &mut hunks[hunk_ix];
7839 let row = hunk.range.start.row;
7840
7841 if hunk.status().has_secondary_hunk() {
7842 log::info!("staging hunk at {row}");
7843 uncommitted_diff.update(cx, |diff, cx| {
7844 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7845 });
7846 hunk.secondary_status = SecondaryHunkRemovalPending;
7847 } else {
7848 log::info!("unstaging hunk at {row}");
7849 uncommitted_diff.update(cx, |diff, cx| {
7850 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7851 });
7852 hunk.secondary_status = SecondaryHunkAdditionPending;
7853 }
7854
7855 for _ in 0..rng.random_range(0..10) {
7856 log::info!("yielding");
7857 cx.executor().simulate_random_delay().await;
7858 }
7859 }
7860
7861 cx.executor().run_until_parked();
7862
7863 for hunk in &mut hunks {
7864 if hunk.secondary_status == SecondaryHunkRemovalPending {
7865 hunk.secondary_status = NoSecondaryHunk;
7866 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7867 hunk.secondary_status = HasSecondaryHunk;
7868 }
7869 }
7870
7871 log::info!(
7872 "index text:\n{}",
7873 repo.load_index_text(rel_path("file.txt").into())
7874 .await
7875 .unwrap()
7876 );
7877
7878 uncommitted_diff.update(cx, |diff, cx| {
7879 let expected_hunks = hunks
7880 .iter()
7881 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7882 .collect::<Vec<_>>();
7883 let actual_hunks = diff
7884 .hunks(&snapshot, cx)
7885 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7886 .collect::<Vec<_>>();
7887 assert_eq!(actual_hunks, expected_hunks);
7888 });
7889}
7890
7891#[gpui::test]
7892async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7893 init_test(cx);
7894
7895 let committed_contents = r#"
7896 fn main() {
7897 println!("hello from HEAD");
7898 }
7899 "#
7900 .unindent();
7901 let file_contents = r#"
7902 fn main() {
7903 println!("hello from the working copy");
7904 }
7905 "#
7906 .unindent();
7907
7908 let fs = FakeFs::new(cx.background_executor.clone());
7909 fs.insert_tree(
7910 "/dir",
7911 json!({
7912 ".git": {},
7913 "src": {
7914 "main.rs": file_contents,
7915 }
7916 }),
7917 )
7918 .await;
7919
7920 fs.set_head_for_repo(
7921 Path::new("/dir/.git"),
7922 &[("src/main.rs", committed_contents.clone())],
7923 "deadbeef",
7924 );
7925 fs.set_index_for_repo(
7926 Path::new("/dir/.git"),
7927 &[("src/main.rs", committed_contents.clone())],
7928 );
7929
7930 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7931
7932 let buffer = project
7933 .update(cx, |project, cx| {
7934 project.open_local_buffer("/dir/src/main.rs", cx)
7935 })
7936 .await
7937 .unwrap();
7938 let uncommitted_diff = project
7939 .update(cx, |project, cx| {
7940 project.open_uncommitted_diff(buffer.clone(), cx)
7941 })
7942 .await
7943 .unwrap();
7944
7945 cx.run_until_parked();
7946 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
7947 let snapshot = buffer.read(cx).snapshot();
7948 assert_hunks(
7949 uncommitted_diff.hunks(&snapshot, cx),
7950 &snapshot,
7951 &uncommitted_diff.base_text_string().unwrap(),
7952 &[(
7953 1..2,
7954 " println!(\"hello from HEAD\");\n",
7955 " println!(\"hello from the working copy\");\n",
7956 DiffHunkStatus {
7957 kind: DiffHunkStatusKind::Modified,
7958 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
7959 },
7960 )],
7961 );
7962 });
7963}
7964
7965#[gpui::test]
7966async fn test_repository_and_path_for_project_path(
7967 background_executor: BackgroundExecutor,
7968 cx: &mut gpui::TestAppContext,
7969) {
7970 init_test(cx);
7971 let fs = FakeFs::new(background_executor);
7972 fs.insert_tree(
7973 path!("/root"),
7974 json!({
7975 "c.txt": "",
7976 "dir1": {
7977 ".git": {},
7978 "deps": {
7979 "dep1": {
7980 ".git": {},
7981 "src": {
7982 "a.txt": ""
7983 }
7984 }
7985 },
7986 "src": {
7987 "b.txt": ""
7988 }
7989 },
7990 }),
7991 )
7992 .await;
7993
7994 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
7995 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
7996 let tree_id = tree.read_with(cx, |tree, _| tree.id());
7997 project
7998 .update(cx, |project, cx| project.git_scans_complete(cx))
7999 .await;
8000 cx.run_until_parked();
8001
8002 project.read_with(cx, |project, cx| {
8003 let git_store = project.git_store().read(cx);
8004 let pairs = [
8005 ("c.txt", None),
8006 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8007 (
8008 "dir1/deps/dep1/src/a.txt",
8009 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8010 ),
8011 ];
8012 let expected = pairs
8013 .iter()
8014 .map(|(path, result)| {
8015 (
8016 path,
8017 result.map(|(repo, repo_path)| {
8018 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8019 }),
8020 )
8021 })
8022 .collect::<Vec<_>>();
8023 let actual = pairs
8024 .iter()
8025 .map(|(path, _)| {
8026 let project_path = (tree_id, rel_path(path)).into();
8027 let result = maybe!({
8028 let (repo, repo_path) =
8029 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8030 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8031 });
8032 (path, result)
8033 })
8034 .collect::<Vec<_>>();
8035 pretty_assertions::assert_eq!(expected, actual);
8036 });
8037
8038 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8039 .await
8040 .unwrap();
8041 cx.run_until_parked();
8042
8043 project.read_with(cx, |project, cx| {
8044 let git_store = project.git_store().read(cx);
8045 assert_eq!(
8046 git_store.repository_and_path_for_project_path(
8047 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8048 cx
8049 ),
8050 None
8051 );
8052 });
8053}
8054
8055#[gpui::test]
8056async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8057 init_test(cx);
8058 let fs = FakeFs::new(cx.background_executor.clone());
8059 let home = paths::home_dir();
8060 fs.insert_tree(
8061 home,
8062 json!({
8063 ".git": {},
8064 "project": {
8065 "a.txt": "A"
8066 },
8067 }),
8068 )
8069 .await;
8070
8071 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8072 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8073 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8074
8075 project
8076 .update(cx, |project, cx| project.git_scans_complete(cx))
8077 .await;
8078 tree.flush_fs_events(cx).await;
8079
8080 project.read_with(cx, |project, cx| {
8081 let containing = project
8082 .git_store()
8083 .read(cx)
8084 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8085 assert!(containing.is_none());
8086 });
8087
8088 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8089 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8090 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8091 project
8092 .update(cx, |project, cx| project.git_scans_complete(cx))
8093 .await;
8094 tree.flush_fs_events(cx).await;
8095
8096 project.read_with(cx, |project, cx| {
8097 let containing = project
8098 .git_store()
8099 .read(cx)
8100 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8101 assert_eq!(
8102 containing
8103 .unwrap()
8104 .0
8105 .read(cx)
8106 .work_directory_abs_path
8107 .as_ref(),
8108 home,
8109 );
8110 });
8111}
8112
8113#[gpui::test]
8114async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8115 init_test(cx);
8116 cx.executor().allow_parking();
8117
8118 let root = TempTree::new(json!({
8119 "project": {
8120 "a.txt": "a", // Modified
8121 "b.txt": "bb", // Added
8122 "c.txt": "ccc", // Unchanged
8123 "d.txt": "dddd", // Deleted
8124 },
8125 }));
8126
8127 // Set up git repository before creating the project.
8128 let work_dir = root.path().join("project");
8129 let repo = git_init(work_dir.as_path());
8130 git_add("a.txt", &repo);
8131 git_add("c.txt", &repo);
8132 git_add("d.txt", &repo);
8133 git_commit("Initial commit", &repo);
8134 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8135 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8136
8137 let project = Project::test(
8138 Arc::new(RealFs::new(None, cx.executor())),
8139 [root.path()],
8140 cx,
8141 )
8142 .await;
8143
8144 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8145 tree.flush_fs_events(cx).await;
8146 project
8147 .update(cx, |project, cx| project.git_scans_complete(cx))
8148 .await;
8149 cx.executor().run_until_parked();
8150
8151 let repository = project.read_with(cx, |project, cx| {
8152 project.repositories(cx).values().next().unwrap().clone()
8153 });
8154
8155 // Check that the right git state is observed on startup
8156 repository.read_with(cx, |repository, _| {
8157 let entries = repository.cached_status().collect::<Vec<_>>();
8158 assert_eq!(
8159 entries,
8160 [
8161 StatusEntry {
8162 repo_path: repo_path("a.txt"),
8163 status: StatusCode::Modified.worktree(),
8164 },
8165 StatusEntry {
8166 repo_path: repo_path("b.txt"),
8167 status: FileStatus::Untracked,
8168 },
8169 StatusEntry {
8170 repo_path: repo_path("d.txt"),
8171 status: StatusCode::Deleted.worktree(),
8172 },
8173 ]
8174 );
8175 });
8176
8177 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8178
8179 tree.flush_fs_events(cx).await;
8180 project
8181 .update(cx, |project, cx| project.git_scans_complete(cx))
8182 .await;
8183 cx.executor().run_until_parked();
8184
8185 repository.read_with(cx, |repository, _| {
8186 let entries = repository.cached_status().collect::<Vec<_>>();
8187 assert_eq!(
8188 entries,
8189 [
8190 StatusEntry {
8191 repo_path: repo_path("a.txt"),
8192 status: StatusCode::Modified.worktree(),
8193 },
8194 StatusEntry {
8195 repo_path: repo_path("b.txt"),
8196 status: FileStatus::Untracked,
8197 },
8198 StatusEntry {
8199 repo_path: repo_path("c.txt"),
8200 status: StatusCode::Modified.worktree(),
8201 },
8202 StatusEntry {
8203 repo_path: repo_path("d.txt"),
8204 status: StatusCode::Deleted.worktree(),
8205 },
8206 ]
8207 );
8208 });
8209
8210 git_add("a.txt", &repo);
8211 git_add("c.txt", &repo);
8212 git_remove_index(Path::new("d.txt"), &repo);
8213 git_commit("Another commit", &repo);
8214 tree.flush_fs_events(cx).await;
8215 project
8216 .update(cx, |project, cx| project.git_scans_complete(cx))
8217 .await;
8218 cx.executor().run_until_parked();
8219
8220 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8221 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8222 tree.flush_fs_events(cx).await;
8223 project
8224 .update(cx, |project, cx| project.git_scans_complete(cx))
8225 .await;
8226 cx.executor().run_until_parked();
8227
8228 repository.read_with(cx, |repository, _cx| {
8229 let entries = repository.cached_status().collect::<Vec<_>>();
8230
8231 // Deleting an untracked entry, b.txt, should leave no status
8232 // a.txt was tracked, and so should have a status
8233 assert_eq!(
8234 entries,
8235 [StatusEntry {
8236 repo_path: repo_path("a.txt"),
8237 status: StatusCode::Deleted.worktree(),
8238 }]
8239 );
8240 });
8241}
8242
8243#[gpui::test]
8244async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8245 init_test(cx);
8246 cx.executor().allow_parking();
8247
8248 let root = TempTree::new(json!({
8249 "project": {
8250 "sub": {},
8251 "a.txt": "",
8252 },
8253 }));
8254
8255 let work_dir = root.path().join("project");
8256 let repo = git_init(work_dir.as_path());
8257 // a.txt exists in HEAD and the working copy but is deleted in the index.
8258 git_add("a.txt", &repo);
8259 git_commit("Initial commit", &repo);
8260 git_remove_index("a.txt".as_ref(), &repo);
8261 // `sub` is a nested git repository.
8262 let _sub = git_init(&work_dir.join("sub"));
8263
8264 let project = Project::test(
8265 Arc::new(RealFs::new(None, cx.executor())),
8266 [root.path()],
8267 cx,
8268 )
8269 .await;
8270
8271 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8272 tree.flush_fs_events(cx).await;
8273 project
8274 .update(cx, |project, cx| project.git_scans_complete(cx))
8275 .await;
8276 cx.executor().run_until_parked();
8277
8278 let repository = project.read_with(cx, |project, cx| {
8279 project
8280 .repositories(cx)
8281 .values()
8282 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8283 .unwrap()
8284 .clone()
8285 });
8286
8287 repository.read_with(cx, |repository, _cx| {
8288 let entries = repository.cached_status().collect::<Vec<_>>();
8289
8290 // `sub` doesn't appear in our computed statuses.
8291 // a.txt appears with a combined `DA` status.
8292 assert_eq!(
8293 entries,
8294 [StatusEntry {
8295 repo_path: repo_path("a.txt"),
8296 status: TrackedStatus {
8297 index_status: StatusCode::Deleted,
8298 worktree_status: StatusCode::Added
8299 }
8300 .into(),
8301 }]
8302 )
8303 });
8304}
8305
8306#[gpui::test]
8307async fn test_repository_subfolder_git_status(
8308 executor: gpui::BackgroundExecutor,
8309 cx: &mut gpui::TestAppContext,
8310) {
8311 init_test(cx);
8312
8313 let fs = FakeFs::new(executor);
8314 fs.insert_tree(
8315 path!("/root"),
8316 json!({
8317 "my-repo": {
8318 ".git": {},
8319 "a.txt": "a",
8320 "sub-folder-1": {
8321 "sub-folder-2": {
8322 "c.txt": "cc",
8323 "d": {
8324 "e.txt": "eee"
8325 }
8326 },
8327 }
8328 },
8329 }),
8330 )
8331 .await;
8332
8333 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8334 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8335
8336 fs.set_status_for_repo(
8337 path!("/root/my-repo/.git").as_ref(),
8338 &[(E_TXT, FileStatus::Untracked)],
8339 );
8340
8341 let project = Project::test(
8342 fs.clone(),
8343 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8344 cx,
8345 )
8346 .await;
8347
8348 project
8349 .update(cx, |project, cx| project.git_scans_complete(cx))
8350 .await;
8351 cx.run_until_parked();
8352
8353 let repository = project.read_with(cx, |project, cx| {
8354 project.repositories(cx).values().next().unwrap().clone()
8355 });
8356
8357 // Ensure that the git status is loaded correctly
8358 repository.read_with(cx, |repository, _cx| {
8359 assert_eq!(
8360 repository.work_directory_abs_path,
8361 Path::new(path!("/root/my-repo")).into()
8362 );
8363
8364 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8365 assert_eq!(
8366 repository
8367 .status_for_path(&repo_path(E_TXT))
8368 .unwrap()
8369 .status,
8370 FileStatus::Untracked
8371 );
8372 });
8373
8374 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8375 project
8376 .update(cx, |project, cx| project.git_scans_complete(cx))
8377 .await;
8378 cx.run_until_parked();
8379
8380 repository.read_with(cx, |repository, _cx| {
8381 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8382 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8383 });
8384}
8385
8386// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8387#[cfg(any())]
8388#[gpui::test]
8389async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8390 init_test(cx);
8391 cx.executor().allow_parking();
8392
8393 let root = TempTree::new(json!({
8394 "project": {
8395 "a.txt": "a",
8396 },
8397 }));
8398 let root_path = root.path();
8399
8400 let repo = git_init(&root_path.join("project"));
8401 git_add("a.txt", &repo);
8402 git_commit("init", &repo);
8403
8404 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8405
8406 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8407 tree.flush_fs_events(cx).await;
8408 project
8409 .update(cx, |project, cx| project.git_scans_complete(cx))
8410 .await;
8411 cx.executor().run_until_parked();
8412
8413 let repository = project.read_with(cx, |project, cx| {
8414 project.repositories(cx).values().next().unwrap().clone()
8415 });
8416
8417 git_branch("other-branch", &repo);
8418 git_checkout("refs/heads/other-branch", &repo);
8419 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8420 git_add("a.txt", &repo);
8421 git_commit("capitalize", &repo);
8422 let commit = repo
8423 .head()
8424 .expect("Failed to get HEAD")
8425 .peel_to_commit()
8426 .expect("HEAD is not a commit");
8427 git_checkout("refs/heads/main", &repo);
8428 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8429 git_add("a.txt", &repo);
8430 git_commit("improve letter", &repo);
8431 git_cherry_pick(&commit, &repo);
8432 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8433 .expect("No CHERRY_PICK_HEAD");
8434 pretty_assertions::assert_eq!(
8435 git_status(&repo),
8436 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8437 );
8438 tree.flush_fs_events(cx).await;
8439 project
8440 .update(cx, |project, cx| project.git_scans_complete(cx))
8441 .await;
8442 cx.executor().run_until_parked();
8443 let conflicts = repository.update(cx, |repository, _| {
8444 repository
8445 .merge_conflicts
8446 .iter()
8447 .cloned()
8448 .collect::<Vec<_>>()
8449 });
8450 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8451
8452 git_add("a.txt", &repo);
8453 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8454 git_commit("whatevs", &repo);
8455 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8456 .expect("Failed to remove CHERRY_PICK_HEAD");
8457 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8458 tree.flush_fs_events(cx).await;
8459 let conflicts = repository.update(cx, |repository, _| {
8460 repository
8461 .merge_conflicts
8462 .iter()
8463 .cloned()
8464 .collect::<Vec<_>>()
8465 });
8466 pretty_assertions::assert_eq!(conflicts, []);
8467}
8468
8469#[gpui::test]
8470async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8471 init_test(cx);
8472 let fs = FakeFs::new(cx.background_executor.clone());
8473 fs.insert_tree(
8474 path!("/root"),
8475 json!({
8476 ".git": {},
8477 ".gitignore": "*.txt\n",
8478 "a.xml": "<a></a>",
8479 "b.txt": "Some text"
8480 }),
8481 )
8482 .await;
8483
8484 fs.set_head_and_index_for_repo(
8485 path!("/root/.git").as_ref(),
8486 &[
8487 (".gitignore", "*.txt\n".into()),
8488 ("a.xml", "<a></a>".into()),
8489 ],
8490 );
8491
8492 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8493
8494 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8495 tree.flush_fs_events(cx).await;
8496 project
8497 .update(cx, |project, cx| project.git_scans_complete(cx))
8498 .await;
8499 cx.executor().run_until_parked();
8500
8501 let repository = project.read_with(cx, |project, cx| {
8502 project.repositories(cx).values().next().unwrap().clone()
8503 });
8504
8505 // One file is unmodified, the other is ignored.
8506 cx.read(|cx| {
8507 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8508 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8509 });
8510
8511 // Change the gitignore, and stage the newly non-ignored file.
8512 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8513 .await
8514 .unwrap();
8515 fs.set_index_for_repo(
8516 Path::new(path!("/root/.git")),
8517 &[
8518 (".gitignore", "*.txt\n".into()),
8519 ("a.xml", "<a></a>".into()),
8520 ("b.txt", "Some text".into()),
8521 ],
8522 );
8523
8524 cx.executor().run_until_parked();
8525 cx.read(|cx| {
8526 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8527 assert_entry_git_state(
8528 tree.read(cx),
8529 repository.read(cx),
8530 "b.txt",
8531 Some(StatusCode::Added),
8532 false,
8533 );
8534 });
8535}
8536
8537// NOTE:
8538// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8539// a directory which some program has already open.
8540// This is a limitation of the Windows.
8541// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8542// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8543#[gpui::test]
8544#[cfg_attr(target_os = "windows", ignore)]
8545async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8546 init_test(cx);
8547 cx.executor().allow_parking();
8548 let root = TempTree::new(json!({
8549 "projects": {
8550 "project1": {
8551 "a": "",
8552 "b": "",
8553 }
8554 },
8555
8556 }));
8557 let root_path = root.path();
8558
8559 let repo = git_init(&root_path.join("projects/project1"));
8560 git_add("a", &repo);
8561 git_commit("init", &repo);
8562 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8563
8564 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8565
8566 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8567 tree.flush_fs_events(cx).await;
8568 project
8569 .update(cx, |project, cx| project.git_scans_complete(cx))
8570 .await;
8571 cx.executor().run_until_parked();
8572
8573 let repository = project.read_with(cx, |project, cx| {
8574 project.repositories(cx).values().next().unwrap().clone()
8575 });
8576
8577 repository.read_with(cx, |repository, _| {
8578 assert_eq!(
8579 repository.work_directory_abs_path.as_ref(),
8580 root_path.join("projects/project1").as_path()
8581 );
8582 assert_eq!(
8583 repository
8584 .status_for_path(&repo_path("a"))
8585 .map(|entry| entry.status),
8586 Some(StatusCode::Modified.worktree()),
8587 );
8588 assert_eq!(
8589 repository
8590 .status_for_path(&repo_path("b"))
8591 .map(|entry| entry.status),
8592 Some(FileStatus::Untracked),
8593 );
8594 });
8595
8596 std::fs::rename(
8597 root_path.join("projects/project1"),
8598 root_path.join("projects/project2"),
8599 )
8600 .unwrap();
8601 tree.flush_fs_events(cx).await;
8602
8603 repository.read_with(cx, |repository, _| {
8604 assert_eq!(
8605 repository.work_directory_abs_path.as_ref(),
8606 root_path.join("projects/project2").as_path()
8607 );
8608 assert_eq!(
8609 repository.status_for_path(&repo_path("a")).unwrap().status,
8610 StatusCode::Modified.worktree(),
8611 );
8612 assert_eq!(
8613 repository.status_for_path(&repo_path("b")).unwrap().status,
8614 FileStatus::Untracked,
8615 );
8616 });
8617}
8618
8619// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8620// you can't rename a directory which some program has already open. This is a
8621// limitation of the Windows. See:
8622// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8623// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8624#[gpui::test]
8625#[cfg_attr(target_os = "windows", ignore)]
8626async fn test_file_status(cx: &mut gpui::TestAppContext) {
8627 init_test(cx);
8628 cx.executor().allow_parking();
8629 const IGNORE_RULE: &str = "**/target";
8630
8631 let root = TempTree::new(json!({
8632 "project": {
8633 "a.txt": "a",
8634 "b.txt": "bb",
8635 "c": {
8636 "d": {
8637 "e.txt": "eee"
8638 }
8639 },
8640 "f.txt": "ffff",
8641 "target": {
8642 "build_file": "???"
8643 },
8644 ".gitignore": IGNORE_RULE
8645 },
8646
8647 }));
8648 let root_path = root.path();
8649
8650 const A_TXT: &str = "a.txt";
8651 const B_TXT: &str = "b.txt";
8652 const E_TXT: &str = "c/d/e.txt";
8653 const F_TXT: &str = "f.txt";
8654 const DOTGITIGNORE: &str = ".gitignore";
8655 const BUILD_FILE: &str = "target/build_file";
8656
8657 // Set up git repository before creating the worktree.
8658 let work_dir = root.path().join("project");
8659 let mut repo = git_init(work_dir.as_path());
8660 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8661 git_add(A_TXT, &repo);
8662 git_add(E_TXT, &repo);
8663 git_add(DOTGITIGNORE, &repo);
8664 git_commit("Initial commit", &repo);
8665
8666 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8667
8668 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8669 tree.flush_fs_events(cx).await;
8670 project
8671 .update(cx, |project, cx| project.git_scans_complete(cx))
8672 .await;
8673 cx.executor().run_until_parked();
8674
8675 let repository = project.read_with(cx, |project, cx| {
8676 project.repositories(cx).values().next().unwrap().clone()
8677 });
8678
8679 // Check that the right git state is observed on startup
8680 repository.read_with(cx, |repository, _cx| {
8681 assert_eq!(
8682 repository.work_directory_abs_path.as_ref(),
8683 root_path.join("project").as_path()
8684 );
8685
8686 assert_eq!(
8687 repository
8688 .status_for_path(&repo_path(B_TXT))
8689 .unwrap()
8690 .status,
8691 FileStatus::Untracked,
8692 );
8693 assert_eq!(
8694 repository
8695 .status_for_path(&repo_path(F_TXT))
8696 .unwrap()
8697 .status,
8698 FileStatus::Untracked,
8699 );
8700 });
8701
8702 // Modify a file in the working copy.
8703 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8704 tree.flush_fs_events(cx).await;
8705 project
8706 .update(cx, |project, cx| project.git_scans_complete(cx))
8707 .await;
8708 cx.executor().run_until_parked();
8709
8710 // The worktree detects that the file's git status has changed.
8711 repository.read_with(cx, |repository, _| {
8712 assert_eq!(
8713 repository
8714 .status_for_path(&repo_path(A_TXT))
8715 .unwrap()
8716 .status,
8717 StatusCode::Modified.worktree(),
8718 );
8719 });
8720
8721 // Create a commit in the git repository.
8722 git_add(A_TXT, &repo);
8723 git_add(B_TXT, &repo);
8724 git_commit("Committing modified and added", &repo);
8725 tree.flush_fs_events(cx).await;
8726 project
8727 .update(cx, |project, cx| project.git_scans_complete(cx))
8728 .await;
8729 cx.executor().run_until_parked();
8730
8731 // The worktree detects that the files' git status have changed.
8732 repository.read_with(cx, |repository, _cx| {
8733 assert_eq!(
8734 repository
8735 .status_for_path(&repo_path(F_TXT))
8736 .unwrap()
8737 .status,
8738 FileStatus::Untracked,
8739 );
8740 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8741 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8742 });
8743
8744 // Modify files in the working copy and perform git operations on other files.
8745 git_reset(0, &repo);
8746 git_remove_index(Path::new(B_TXT), &repo);
8747 git_stash(&mut repo);
8748 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8749 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8750 tree.flush_fs_events(cx).await;
8751 project
8752 .update(cx, |project, cx| project.git_scans_complete(cx))
8753 .await;
8754 cx.executor().run_until_parked();
8755
8756 // Check that more complex repo changes are tracked
8757 repository.read_with(cx, |repository, _cx| {
8758 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8759 assert_eq!(
8760 repository
8761 .status_for_path(&repo_path(B_TXT))
8762 .unwrap()
8763 .status,
8764 FileStatus::Untracked,
8765 );
8766 assert_eq!(
8767 repository
8768 .status_for_path(&repo_path(E_TXT))
8769 .unwrap()
8770 .status,
8771 StatusCode::Modified.worktree(),
8772 );
8773 });
8774
8775 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8776 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8777 std::fs::write(
8778 work_dir.join(DOTGITIGNORE),
8779 [IGNORE_RULE, "f.txt"].join("\n"),
8780 )
8781 .unwrap();
8782
8783 git_add(Path::new(DOTGITIGNORE), &repo);
8784 git_commit("Committing modified git ignore", &repo);
8785
8786 tree.flush_fs_events(cx).await;
8787 cx.executor().run_until_parked();
8788
8789 let mut renamed_dir_name = "first_directory/second_directory";
8790 const RENAMED_FILE: &str = "rf.txt";
8791
8792 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8793 std::fs::write(
8794 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8795 "new-contents",
8796 )
8797 .unwrap();
8798
8799 tree.flush_fs_events(cx).await;
8800 project
8801 .update(cx, |project, cx| project.git_scans_complete(cx))
8802 .await;
8803 cx.executor().run_until_parked();
8804
8805 repository.read_with(cx, |repository, _cx| {
8806 assert_eq!(
8807 repository
8808 .status_for_path(
8809 &rel_path(renamed_dir_name)
8810 .join(rel_path(RENAMED_FILE))
8811 .into()
8812 )
8813 .unwrap()
8814 .status,
8815 FileStatus::Untracked,
8816 );
8817 });
8818
8819 renamed_dir_name = "new_first_directory/second_directory";
8820
8821 std::fs::rename(
8822 work_dir.join("first_directory"),
8823 work_dir.join("new_first_directory"),
8824 )
8825 .unwrap();
8826
8827 tree.flush_fs_events(cx).await;
8828 project
8829 .update(cx, |project, cx| project.git_scans_complete(cx))
8830 .await;
8831 cx.executor().run_until_parked();
8832
8833 repository.read_with(cx, |repository, _cx| {
8834 assert_eq!(
8835 repository
8836 .status_for_path(
8837 &rel_path(renamed_dir_name)
8838 .join(rel_path(RENAMED_FILE))
8839 .into()
8840 )
8841 .unwrap()
8842 .status,
8843 FileStatus::Untracked,
8844 );
8845 });
8846}
8847
8848#[gpui::test]
8849#[cfg_attr(target_os = "windows", ignore)]
8850async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8851 init_test(cx);
8852 cx.executor().allow_parking();
8853
8854 const IGNORE_RULE: &str = "**/target";
8855
8856 let root = TempTree::new(json!({
8857 "project": {
8858 "src": {
8859 "main.rs": "fn main() {}"
8860 },
8861 "target": {
8862 "debug": {
8863 "important_text.txt": "important text",
8864 },
8865 },
8866 ".gitignore": IGNORE_RULE
8867 },
8868
8869 }));
8870 let root_path = root.path();
8871
8872 // Set up git repository before creating the worktree.
8873 let work_dir = root.path().join("project");
8874 let repo = git_init(work_dir.as_path());
8875 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8876 git_add("src/main.rs", &repo);
8877 git_add(".gitignore", &repo);
8878 git_commit("Initial commit", &repo);
8879
8880 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8881 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8882 let project_events = Arc::new(Mutex::new(Vec::new()));
8883 project.update(cx, |project, cx| {
8884 let repo_events = repository_updates.clone();
8885 cx.subscribe(project.git_store(), move |_, _, e, _| {
8886 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8887 repo_events.lock().push(e.clone());
8888 }
8889 })
8890 .detach();
8891 let project_events = project_events.clone();
8892 cx.subscribe_self(move |_, e, _| {
8893 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8894 project_events.lock().extend(
8895 updates
8896 .iter()
8897 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8898 .filter(|(path, _)| path != "fs-event-sentinel"),
8899 );
8900 }
8901 })
8902 .detach();
8903 });
8904
8905 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8906 tree.flush_fs_events(cx).await;
8907 tree.update(cx, |tree, cx| {
8908 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
8909 })
8910 .await
8911 .unwrap();
8912 tree.update(cx, |tree, _| {
8913 assert_eq!(
8914 tree.entries(true, 0)
8915 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8916 .collect::<Vec<_>>(),
8917 vec![
8918 (rel_path(""), false),
8919 (rel_path("project/"), false),
8920 (rel_path("project/.gitignore"), false),
8921 (rel_path("project/src"), false),
8922 (rel_path("project/src/main.rs"), false),
8923 (rel_path("project/target"), true),
8924 (rel_path("project/target/debug"), true),
8925 (rel_path("project/target/debug/important_text.txt"), true),
8926 ]
8927 );
8928 });
8929
8930 assert_eq!(
8931 repository_updates.lock().drain(..).collect::<Vec<_>>(),
8932 vec![
8933 RepositoryEvent::Updated {
8934 full_scan: true,
8935 new_instance: false,
8936 },
8937 RepositoryEvent::MergeHeadsChanged,
8938 ],
8939 "Initial worktree scan should produce a repo update event"
8940 );
8941 assert_eq!(
8942 project_events.lock().drain(..).collect::<Vec<_>>(),
8943 vec![
8944 ("project/target".to_string(), PathChange::Loaded),
8945 ("project/target/debug".to_string(), PathChange::Loaded),
8946 (
8947 "project/target/debug/important_text.txt".to_string(),
8948 PathChange::Loaded
8949 ),
8950 ],
8951 "Initial project changes should show that all not-ignored and all opened files are loaded"
8952 );
8953
8954 let deps_dir = work_dir.join("target").join("debug").join("deps");
8955 std::fs::create_dir_all(&deps_dir).unwrap();
8956 tree.flush_fs_events(cx).await;
8957 project
8958 .update(cx, |project, cx| project.git_scans_complete(cx))
8959 .await;
8960 cx.executor().run_until_parked();
8961 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
8962 tree.flush_fs_events(cx).await;
8963 project
8964 .update(cx, |project, cx| project.git_scans_complete(cx))
8965 .await;
8966 cx.executor().run_until_parked();
8967 std::fs::remove_dir_all(&deps_dir).unwrap();
8968 tree.flush_fs_events(cx).await;
8969 project
8970 .update(cx, |project, cx| project.git_scans_complete(cx))
8971 .await;
8972 cx.executor().run_until_parked();
8973
8974 tree.update(cx, |tree, _| {
8975 assert_eq!(
8976 tree.entries(true, 0)
8977 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8978 .collect::<Vec<_>>(),
8979 vec![
8980 (rel_path(""), false),
8981 (rel_path("project/"), false),
8982 (rel_path("project/.gitignore"), false),
8983 (rel_path("project/src"), false),
8984 (rel_path("project/src/main.rs"), false),
8985 (rel_path("project/target"), true),
8986 (rel_path("project/target/debug"), true),
8987 (rel_path("project/target/debug/important_text.txt"), true),
8988 ],
8989 "No stray temp files should be left after the flycheck changes"
8990 );
8991 });
8992
8993 assert_eq!(
8994 repository_updates
8995 .lock()
8996 .iter()
8997 .filter(|update| !matches!(update, RepositoryEvent::PathsChanged))
8998 .cloned()
8999 .collect::<Vec<_>>(),
9000 Vec::new(),
9001 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9002 );
9003 assert_eq!(
9004 project_events.lock().as_slice(),
9005 vec![
9006 ("project/target/debug/deps".to_string(), PathChange::Added),
9007 ("project/target/debug/deps".to_string(), PathChange::Removed),
9008 ],
9009 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9010 No updates for more nested directories should happen as those are ignored",
9011 );
9012}
9013
9014#[gpui::test]
9015async fn test_odd_events_for_ignored_dirs(
9016 executor: BackgroundExecutor,
9017 cx: &mut gpui::TestAppContext,
9018) {
9019 init_test(cx);
9020 let fs = FakeFs::new(executor);
9021 fs.insert_tree(
9022 path!("/root"),
9023 json!({
9024 ".git": {},
9025 ".gitignore": "**/target/",
9026 "src": {
9027 "main.rs": "fn main() {}",
9028 },
9029 "target": {
9030 "debug": {
9031 "foo.txt": "foo",
9032 "deps": {}
9033 }
9034 }
9035 }),
9036 )
9037 .await;
9038 fs.set_head_and_index_for_repo(
9039 path!("/root/.git").as_ref(),
9040 &[
9041 (".gitignore", "**/target/".into()),
9042 ("src/main.rs", "fn main() {}".into()),
9043 ],
9044 );
9045
9046 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9047 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9048 let project_events = Arc::new(Mutex::new(Vec::new()));
9049 project.update(cx, |project, cx| {
9050 let repository_updates = repository_updates.clone();
9051 cx.subscribe(project.git_store(), move |_, _, e, _| {
9052 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9053 repository_updates.lock().push(e.clone());
9054 }
9055 })
9056 .detach();
9057 let project_events = project_events.clone();
9058 cx.subscribe_self(move |_, e, _| {
9059 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9060 project_events.lock().extend(
9061 updates
9062 .iter()
9063 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9064 .filter(|(path, _)| path != "fs-event-sentinel"),
9065 );
9066 }
9067 })
9068 .detach();
9069 });
9070
9071 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9072 tree.update(cx, |tree, cx| {
9073 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9074 })
9075 .await
9076 .unwrap();
9077 tree.flush_fs_events(cx).await;
9078 project
9079 .update(cx, |project, cx| project.git_scans_complete(cx))
9080 .await;
9081 cx.run_until_parked();
9082 tree.update(cx, |tree, _| {
9083 assert_eq!(
9084 tree.entries(true, 0)
9085 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9086 .collect::<Vec<_>>(),
9087 vec![
9088 (rel_path(""), false),
9089 (rel_path(".gitignore"), false),
9090 (rel_path("src"), false),
9091 (rel_path("src/main.rs"), false),
9092 (rel_path("target"), true),
9093 (rel_path("target/debug"), true),
9094 (rel_path("target/debug/deps"), true),
9095 (rel_path("target/debug/foo.txt"), true),
9096 ]
9097 );
9098 });
9099
9100 assert_eq!(
9101 repository_updates
9102 .lock()
9103 .drain(..)
9104 .filter(|update| !matches!(update, RepositoryEvent::PathsChanged))
9105 .collect::<Vec<_>>(),
9106 vec![
9107 RepositoryEvent::Updated {
9108 full_scan: true,
9109 new_instance: false,
9110 },
9111 RepositoryEvent::MergeHeadsChanged,
9112 ],
9113 "Initial worktree scan should produce a repo update event"
9114 );
9115 assert_eq!(
9116 project_events.lock().drain(..).collect::<Vec<_>>(),
9117 vec![
9118 ("target".to_string(), PathChange::Loaded),
9119 ("target/debug".to_string(), PathChange::Loaded),
9120 ("target/debug/deps".to_string(), PathChange::Loaded),
9121 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9122 ],
9123 "All non-ignored entries and all opened firs should be getting a project event",
9124 );
9125
9126 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9127 // This may happen multiple times during a single flycheck, but once is enough for testing.
9128 fs.emit_fs_event("/root/target/debug/deps", None);
9129 tree.flush_fs_events(cx).await;
9130 project
9131 .update(cx, |project, cx| project.git_scans_complete(cx))
9132 .await;
9133 cx.executor().run_until_parked();
9134
9135 assert_eq!(
9136 repository_updates
9137 .lock()
9138 .iter()
9139 .filter(|update| !matches!(update, RepositoryEvent::PathsChanged))
9140 .cloned()
9141 .collect::<Vec<_>>(),
9142 Vec::new(),
9143 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9144 );
9145 assert_eq!(
9146 project_events.lock().as_slice(),
9147 Vec::new(),
9148 "No further project events should happen, as only ignored dirs received FS events",
9149 );
9150}
9151
9152#[gpui::test]
9153async fn test_repos_in_invisible_worktrees(
9154 executor: BackgroundExecutor,
9155 cx: &mut gpui::TestAppContext,
9156) {
9157 init_test(cx);
9158 let fs = FakeFs::new(executor);
9159 fs.insert_tree(
9160 path!("/root"),
9161 json!({
9162 "dir1": {
9163 ".git": {},
9164 "dep1": {
9165 ".git": {},
9166 "src": {
9167 "a.txt": "",
9168 },
9169 },
9170 "b.txt": "",
9171 },
9172 }),
9173 )
9174 .await;
9175
9176 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9177 let _visible_worktree =
9178 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9179 project
9180 .update(cx, |project, cx| project.git_scans_complete(cx))
9181 .await;
9182
9183 let repos = project.read_with(cx, |project, cx| {
9184 project
9185 .repositories(cx)
9186 .values()
9187 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9188 .collect::<Vec<_>>()
9189 });
9190 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9191
9192 let (_invisible_worktree, _) = project
9193 .update(cx, |project, cx| {
9194 project.worktree_store.update(cx, |worktree_store, cx| {
9195 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9196 })
9197 })
9198 .await
9199 .expect("failed to create worktree");
9200 project
9201 .update(cx, |project, cx| project.git_scans_complete(cx))
9202 .await;
9203
9204 let repos = project.read_with(cx, |project, cx| {
9205 project
9206 .repositories(cx)
9207 .values()
9208 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9209 .collect::<Vec<_>>()
9210 });
9211 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9212}
9213
9214#[gpui::test(iterations = 10)]
9215async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9216 init_test(cx);
9217 cx.update(|cx| {
9218 cx.update_global::<SettingsStore, _>(|store, cx| {
9219 store.update_user_settings(cx, |settings| {
9220 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9221 });
9222 });
9223 });
9224 let fs = FakeFs::new(cx.background_executor.clone());
9225 fs.insert_tree(
9226 path!("/root"),
9227 json!({
9228 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9229 "tree": {
9230 ".git": {},
9231 ".gitignore": "ignored-dir\n",
9232 "tracked-dir": {
9233 "tracked-file1": "",
9234 "ancestor-ignored-file1": "",
9235 },
9236 "ignored-dir": {
9237 "ignored-file1": ""
9238 }
9239 }
9240 }),
9241 )
9242 .await;
9243 fs.set_head_and_index_for_repo(
9244 path!("/root/tree/.git").as_ref(),
9245 &[
9246 (".gitignore", "ignored-dir\n".into()),
9247 ("tracked-dir/tracked-file1", "".into()),
9248 ],
9249 );
9250
9251 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9252
9253 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9254 tree.flush_fs_events(cx).await;
9255 project
9256 .update(cx, |project, cx| project.git_scans_complete(cx))
9257 .await;
9258 cx.executor().run_until_parked();
9259
9260 let repository = project.read_with(cx, |project, cx| {
9261 project.repositories(cx).values().next().unwrap().clone()
9262 });
9263
9264 tree.read_with(cx, |tree, _| {
9265 tree.as_local()
9266 .unwrap()
9267 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9268 })
9269 .recv()
9270 .await;
9271
9272 cx.read(|cx| {
9273 assert_entry_git_state(
9274 tree.read(cx),
9275 repository.read(cx),
9276 "tracked-dir/tracked-file1",
9277 None,
9278 false,
9279 );
9280 assert_entry_git_state(
9281 tree.read(cx),
9282 repository.read(cx),
9283 "tracked-dir/ancestor-ignored-file1",
9284 None,
9285 false,
9286 );
9287 assert_entry_git_state(
9288 tree.read(cx),
9289 repository.read(cx),
9290 "ignored-dir/ignored-file1",
9291 None,
9292 true,
9293 );
9294 });
9295
9296 fs.create_file(
9297 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9298 Default::default(),
9299 )
9300 .await
9301 .unwrap();
9302 fs.set_index_for_repo(
9303 path!("/root/tree/.git").as_ref(),
9304 &[
9305 (".gitignore", "ignored-dir\n".into()),
9306 ("tracked-dir/tracked-file1", "".into()),
9307 ("tracked-dir/tracked-file2", "".into()),
9308 ],
9309 );
9310 fs.create_file(
9311 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9312 Default::default(),
9313 )
9314 .await
9315 .unwrap();
9316 fs.create_file(
9317 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9318 Default::default(),
9319 )
9320 .await
9321 .unwrap();
9322
9323 cx.executor().run_until_parked();
9324 cx.read(|cx| {
9325 assert_entry_git_state(
9326 tree.read(cx),
9327 repository.read(cx),
9328 "tracked-dir/tracked-file2",
9329 Some(StatusCode::Added),
9330 false,
9331 );
9332 assert_entry_git_state(
9333 tree.read(cx),
9334 repository.read(cx),
9335 "tracked-dir/ancestor-ignored-file2",
9336 None,
9337 false,
9338 );
9339 assert_entry_git_state(
9340 tree.read(cx),
9341 repository.read(cx),
9342 "ignored-dir/ignored-file2",
9343 None,
9344 true,
9345 );
9346 assert!(
9347 tree.read(cx)
9348 .entry_for_path(&rel_path(".git"))
9349 .unwrap()
9350 .is_ignored
9351 );
9352 });
9353}
9354
9355#[gpui::test]
9356async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9357 init_test(cx);
9358
9359 let fs = FakeFs::new(cx.executor());
9360 fs.insert_tree(
9361 path!("/project"),
9362 json!({
9363 ".git": {
9364 "worktrees": {
9365 "some-worktree": {
9366 "commondir": "../..\n",
9367 // For is_git_dir
9368 "HEAD": "",
9369 "config": ""
9370 }
9371 },
9372 "modules": {
9373 "subdir": {
9374 "some-submodule": {
9375 // For is_git_dir
9376 "HEAD": "",
9377 "config": "",
9378 }
9379 }
9380 }
9381 },
9382 "src": {
9383 "a.txt": "A",
9384 },
9385 "some-worktree": {
9386 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9387 "src": {
9388 "b.txt": "B",
9389 }
9390 },
9391 "subdir": {
9392 "some-submodule": {
9393 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9394 "c.txt": "C",
9395 }
9396 }
9397 }),
9398 )
9399 .await;
9400
9401 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9402 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9403 scan_complete.await;
9404
9405 let mut repositories = project.update(cx, |project, cx| {
9406 project
9407 .repositories(cx)
9408 .values()
9409 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9410 .collect::<Vec<_>>()
9411 });
9412 repositories.sort();
9413 pretty_assertions::assert_eq!(
9414 repositories,
9415 [
9416 Path::new(path!("/project")).into(),
9417 Path::new(path!("/project/some-worktree")).into(),
9418 Path::new(path!("/project/subdir/some-submodule")).into(),
9419 ]
9420 );
9421
9422 // Generate a git-related event for the worktree and check that it's refreshed.
9423 fs.with_git_state(
9424 path!("/project/some-worktree/.git").as_ref(),
9425 true,
9426 |state| {
9427 state
9428 .head_contents
9429 .insert(repo_path("src/b.txt"), "b".to_owned());
9430 state
9431 .index_contents
9432 .insert(repo_path("src/b.txt"), "b".to_owned());
9433 },
9434 )
9435 .unwrap();
9436 cx.run_until_parked();
9437
9438 let buffer = project
9439 .update(cx, |project, cx| {
9440 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9441 })
9442 .await
9443 .unwrap();
9444 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9445 let (repo, _) = project
9446 .git_store()
9447 .read(cx)
9448 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9449 .unwrap();
9450 pretty_assertions::assert_eq!(
9451 repo.read(cx).work_directory_abs_path,
9452 Path::new(path!("/project/some-worktree")).into(),
9453 );
9454 let barrier = repo.update(cx, |repo, _| repo.barrier());
9455 (repo.clone(), barrier)
9456 });
9457 barrier.await.unwrap();
9458 worktree_repo.update(cx, |repo, _| {
9459 pretty_assertions::assert_eq!(
9460 repo.status_for_path(&repo_path("src/b.txt"))
9461 .unwrap()
9462 .status,
9463 StatusCode::Modified.worktree(),
9464 );
9465 });
9466
9467 // The same for the submodule.
9468 fs.with_git_state(
9469 path!("/project/subdir/some-submodule/.git").as_ref(),
9470 true,
9471 |state| {
9472 state
9473 .head_contents
9474 .insert(repo_path("c.txt"), "c".to_owned());
9475 state
9476 .index_contents
9477 .insert(repo_path("c.txt"), "c".to_owned());
9478 },
9479 )
9480 .unwrap();
9481 cx.run_until_parked();
9482
9483 let buffer = project
9484 .update(cx, |project, cx| {
9485 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9486 })
9487 .await
9488 .unwrap();
9489 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9490 let (repo, _) = project
9491 .git_store()
9492 .read(cx)
9493 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9494 .unwrap();
9495 pretty_assertions::assert_eq!(
9496 repo.read(cx).work_directory_abs_path,
9497 Path::new(path!("/project/subdir/some-submodule")).into(),
9498 );
9499 let barrier = repo.update(cx, |repo, _| repo.barrier());
9500 (repo.clone(), barrier)
9501 });
9502 barrier.await.unwrap();
9503 submodule_repo.update(cx, |repo, _| {
9504 pretty_assertions::assert_eq!(
9505 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9506 StatusCode::Modified.worktree(),
9507 );
9508 });
9509}
9510
9511#[gpui::test]
9512async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9513 init_test(cx);
9514 let fs = FakeFs::new(cx.background_executor.clone());
9515 fs.insert_tree(
9516 path!("/root"),
9517 json!({
9518 "project": {
9519 ".git": {},
9520 "child1": {
9521 "a.txt": "A",
9522 },
9523 "child2": {
9524 "b.txt": "B",
9525 }
9526 }
9527 }),
9528 )
9529 .await;
9530
9531 let project = Project::test(
9532 fs.clone(),
9533 [
9534 path!("/root/project/child1").as_ref(),
9535 path!("/root/project/child2").as_ref(),
9536 ],
9537 cx,
9538 )
9539 .await;
9540
9541 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9542 tree.flush_fs_events(cx).await;
9543 project
9544 .update(cx, |project, cx| project.git_scans_complete(cx))
9545 .await;
9546 cx.executor().run_until_parked();
9547
9548 let repos = project.read_with(cx, |project, cx| {
9549 project
9550 .repositories(cx)
9551 .values()
9552 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9553 .collect::<Vec<_>>()
9554 });
9555 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9556}
9557
9558async fn search(
9559 project: &Entity<Project>,
9560 query: SearchQuery,
9561 cx: &mut gpui::TestAppContext,
9562) -> Result<HashMap<String, Vec<Range<usize>>>> {
9563 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9564 let mut results = HashMap::default();
9565 while let Ok(search_result) = search_rx.recv().await {
9566 match search_result {
9567 SearchResult::Buffer { buffer, ranges } => {
9568 results.entry(buffer).or_insert(ranges);
9569 }
9570 SearchResult::LimitReached => {}
9571 }
9572 }
9573 Ok(results
9574 .into_iter()
9575 .map(|(buffer, ranges)| {
9576 buffer.update(cx, |buffer, cx| {
9577 let path = buffer
9578 .file()
9579 .unwrap()
9580 .full_path(cx)
9581 .to_string_lossy()
9582 .to_string();
9583 let ranges = ranges
9584 .into_iter()
9585 .map(|range| range.to_offset(buffer))
9586 .collect::<Vec<_>>();
9587 (path, ranges)
9588 })
9589 })
9590 .collect())
9591}
9592
9593pub fn init_test(cx: &mut gpui::TestAppContext) {
9594 zlog::init_test();
9595
9596 cx.update(|cx| {
9597 let settings_store = SettingsStore::test(cx);
9598 cx.set_global(settings_store);
9599 release_channel::init(SemanticVersion::default(), cx);
9600 language::init(cx);
9601 Project::init_settings(cx);
9602 });
9603}
9604
9605fn json_lang() -> Arc<Language> {
9606 Arc::new(Language::new(
9607 LanguageConfig {
9608 name: "JSON".into(),
9609 matcher: LanguageMatcher {
9610 path_suffixes: vec!["json".to_string()],
9611 ..Default::default()
9612 },
9613 ..Default::default()
9614 },
9615 None,
9616 ))
9617}
9618
9619fn js_lang() -> Arc<Language> {
9620 Arc::new(Language::new(
9621 LanguageConfig {
9622 name: "JavaScript".into(),
9623 matcher: LanguageMatcher {
9624 path_suffixes: vec!["js".to_string()],
9625 ..Default::default()
9626 },
9627 ..Default::default()
9628 },
9629 None,
9630 ))
9631}
9632
9633fn rust_lang() -> Arc<Language> {
9634 Arc::new(Language::new(
9635 LanguageConfig {
9636 name: "Rust".into(),
9637 matcher: LanguageMatcher {
9638 path_suffixes: vec!["rs".to_string()],
9639 ..Default::default()
9640 },
9641 ..Default::default()
9642 },
9643 Some(tree_sitter_rust::LANGUAGE.into()),
9644 ))
9645}
9646
9647fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9648 struct PythonMootToolchainLister(Arc<FakeFs>);
9649 #[async_trait]
9650 impl ToolchainLister for PythonMootToolchainLister {
9651 async fn list(
9652 &self,
9653 worktree_root: PathBuf,
9654 subroot_relative_path: Arc<RelPath>,
9655 _: Option<HashMap<String, String>>,
9656 ) -> ToolchainList {
9657 // This lister will always return a path .venv directories within ancestors
9658 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9659 let mut toolchains = vec![];
9660 for ancestor in ancestors {
9661 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9662 if self.0.is_dir(&venv_path).await {
9663 toolchains.push(Toolchain {
9664 name: SharedString::new("Python Venv"),
9665 path: venv_path.to_string_lossy().into_owned().into(),
9666 language_name: LanguageName(SharedString::new_static("Python")),
9667 as_json: serde_json::Value::Null,
9668 })
9669 }
9670 }
9671 ToolchainList {
9672 toolchains,
9673 ..Default::default()
9674 }
9675 }
9676 async fn resolve(
9677 &self,
9678 _: PathBuf,
9679 _: Option<HashMap<String, String>>,
9680 ) -> anyhow::Result<Toolchain> {
9681 Err(anyhow::anyhow!("Not implemented"))
9682 }
9683 fn meta(&self) -> ToolchainMetadata {
9684 ToolchainMetadata {
9685 term: SharedString::new_static("Virtual Environment"),
9686 new_toolchain_placeholder: SharedString::new_static(
9687 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9688 ),
9689 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9690 }
9691 }
9692 async fn activation_script(&self, _: &Toolchain, _: ShellKind, _: &dyn Fs) -> Vec<String> {
9693 vec![]
9694 }
9695 }
9696 Arc::new(
9697 Language::new(
9698 LanguageConfig {
9699 name: "Python".into(),
9700 matcher: LanguageMatcher {
9701 path_suffixes: vec!["py".to_string()],
9702 ..Default::default()
9703 },
9704 ..Default::default()
9705 },
9706 None, // We're not testing Python parsing with this language.
9707 )
9708 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9709 "pyproject.toml",
9710 ))))
9711 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9712 )
9713}
9714
9715fn typescript_lang() -> Arc<Language> {
9716 Arc::new(Language::new(
9717 LanguageConfig {
9718 name: "TypeScript".into(),
9719 matcher: LanguageMatcher {
9720 path_suffixes: vec!["ts".to_string()],
9721 ..Default::default()
9722 },
9723 ..Default::default()
9724 },
9725 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9726 ))
9727}
9728
9729fn tsx_lang() -> Arc<Language> {
9730 Arc::new(Language::new(
9731 LanguageConfig {
9732 name: "tsx".into(),
9733 matcher: LanguageMatcher {
9734 path_suffixes: vec!["tsx".to_string()],
9735 ..Default::default()
9736 },
9737 ..Default::default()
9738 },
9739 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9740 ))
9741}
9742
9743fn get_all_tasks(
9744 project: &Entity<Project>,
9745 task_contexts: Arc<TaskContexts>,
9746 cx: &mut App,
9747) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9748 let new_tasks = project.update(cx, |project, cx| {
9749 project.task_store.update(cx, |task_store, cx| {
9750 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9751 this.used_and_current_resolved_tasks(task_contexts, cx)
9752 })
9753 })
9754 });
9755
9756 cx.background_spawn(async move {
9757 let (mut old, new) = new_tasks.await;
9758 old.extend(new);
9759 old
9760 })
9761}
9762
9763#[track_caller]
9764fn assert_entry_git_state(
9765 tree: &Worktree,
9766 repository: &Repository,
9767 path: &str,
9768 index_status: Option<StatusCode>,
9769 is_ignored: bool,
9770) {
9771 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9772 let entry = tree
9773 .entry_for_path(&rel_path(path))
9774 .unwrap_or_else(|| panic!("entry {path} not found"));
9775 let status = repository
9776 .status_for_path(&repo_path(path))
9777 .map(|entry| entry.status);
9778 let expected = index_status.map(|index_status| {
9779 TrackedStatus {
9780 index_status,
9781 worktree_status: StatusCode::Unmodified,
9782 }
9783 .into()
9784 });
9785 assert_eq!(
9786 status, expected,
9787 "expected {path} to have git status: {expected:?}"
9788 );
9789 assert_eq!(
9790 entry.is_ignored, is_ignored,
9791 "expected {path} to have is_ignored: {is_ignored}"
9792 );
9793}
9794
9795#[track_caller]
9796fn git_init(path: &Path) -> git2::Repository {
9797 let mut init_opts = RepositoryInitOptions::new();
9798 init_opts.initial_head("main");
9799 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9800}
9801
9802#[track_caller]
9803fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9804 let path = path.as_ref();
9805 let mut index = repo.index().expect("Failed to get index");
9806 index.add_path(path).expect("Failed to add file");
9807 index.write().expect("Failed to write index");
9808}
9809
9810#[track_caller]
9811fn git_remove_index(path: &Path, repo: &git2::Repository) {
9812 let mut index = repo.index().expect("Failed to get index");
9813 index.remove_path(path).expect("Failed to add file");
9814 index.write().expect("Failed to write index");
9815}
9816
9817#[track_caller]
9818fn git_commit(msg: &'static str, repo: &git2::Repository) {
9819 use git2::Signature;
9820
9821 let signature = Signature::now("test", "test@zed.dev").unwrap();
9822 let oid = repo.index().unwrap().write_tree().unwrap();
9823 let tree = repo.find_tree(oid).unwrap();
9824 if let Ok(head) = repo.head() {
9825 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9826
9827 let parent_commit = parent_obj.as_commit().unwrap();
9828
9829 repo.commit(
9830 Some("HEAD"),
9831 &signature,
9832 &signature,
9833 msg,
9834 &tree,
9835 &[parent_commit],
9836 )
9837 .expect("Failed to commit with parent");
9838 } else {
9839 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9840 .expect("Failed to commit");
9841 }
9842}
9843
9844#[cfg(any())]
9845#[track_caller]
9846fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9847 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9848}
9849
9850#[track_caller]
9851fn git_stash(repo: &mut git2::Repository) {
9852 use git2::Signature;
9853
9854 let signature = Signature::now("test", "test@zed.dev").unwrap();
9855 repo.stash_save(&signature, "N/A", None)
9856 .expect("Failed to stash");
9857}
9858
9859#[track_caller]
9860fn git_reset(offset: usize, repo: &git2::Repository) {
9861 let head = repo.head().expect("Couldn't get repo head");
9862 let object = head.peel(git2::ObjectType::Commit).unwrap();
9863 let commit = object.as_commit().unwrap();
9864 let new_head = commit
9865 .parents()
9866 .inspect(|parnet| {
9867 parnet.message();
9868 })
9869 .nth(offset)
9870 .expect("Not enough history");
9871 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9872 .expect("Could not reset");
9873}
9874
9875#[cfg(any())]
9876#[track_caller]
9877fn git_branch(name: &str, repo: &git2::Repository) {
9878 let head = repo
9879 .head()
9880 .expect("Couldn't get repo head")
9881 .peel_to_commit()
9882 .expect("HEAD is not a commit");
9883 repo.branch(name, &head, false).expect("Failed to commit");
9884}
9885
9886#[cfg(any())]
9887#[track_caller]
9888fn git_checkout(name: &str, repo: &git2::Repository) {
9889 repo.set_head(name).expect("Failed to set head");
9890 repo.checkout_head(None).expect("Failed to check out head");
9891}
9892
9893#[cfg(any())]
9894#[track_caller]
9895fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9896 repo.statuses(None)
9897 .unwrap()
9898 .iter()
9899 .map(|status| (status.path().unwrap().to_string(), status.status()))
9900 .collect()
9901}
9902
9903#[gpui::test]
9904async fn test_find_project_path_abs(
9905 background_executor: BackgroundExecutor,
9906 cx: &mut gpui::TestAppContext,
9907) {
9908 // find_project_path should work with absolute paths
9909 init_test(cx);
9910
9911 let fs = FakeFs::new(background_executor);
9912 fs.insert_tree(
9913 path!("/root"),
9914 json!({
9915 "project1": {
9916 "file1.txt": "content1",
9917 "subdir": {
9918 "file2.txt": "content2"
9919 }
9920 },
9921 "project2": {
9922 "file3.txt": "content3"
9923 }
9924 }),
9925 )
9926 .await;
9927
9928 let project = Project::test(
9929 fs.clone(),
9930 [
9931 path!("/root/project1").as_ref(),
9932 path!("/root/project2").as_ref(),
9933 ],
9934 cx,
9935 )
9936 .await;
9937
9938 // Make sure the worktrees are fully initialized
9939 project
9940 .update(cx, |project, cx| project.git_scans_complete(cx))
9941 .await;
9942 cx.run_until_parked();
9943
9944 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
9945 project.read_with(cx, |project, cx| {
9946 let worktrees: Vec<_> = project.worktrees(cx).collect();
9947 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
9948 let id1 = worktrees[0].read(cx).id();
9949 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
9950 let id2 = worktrees[1].read(cx).id();
9951 (abs_path1, id1, abs_path2, id2)
9952 });
9953
9954 project.update(cx, |project, cx| {
9955 let abs_path = project1_abs_path.join("file1.txt");
9956 let found_path = project.find_project_path(abs_path, cx).unwrap();
9957 assert_eq!(found_path.worktree_id, project1_id);
9958 assert_eq!(&*found_path.path, rel_path("file1.txt"));
9959
9960 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
9961 let found_path = project.find_project_path(abs_path, cx).unwrap();
9962 assert_eq!(found_path.worktree_id, project1_id);
9963 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
9964
9965 let abs_path = project2_abs_path.join("file3.txt");
9966 let found_path = project.find_project_path(abs_path, cx).unwrap();
9967 assert_eq!(found_path.worktree_id, project2_id);
9968 assert_eq!(&*found_path.path, rel_path("file3.txt"));
9969
9970 let abs_path = project1_abs_path.join("nonexistent.txt");
9971 let found_path = project.find_project_path(abs_path, cx);
9972 assert!(
9973 found_path.is_some(),
9974 "Should find project path for nonexistent file in worktree"
9975 );
9976
9977 // Test with an absolute path outside any worktree
9978 let abs_path = Path::new("/some/other/path");
9979 let found_path = project.find_project_path(abs_path, cx);
9980 assert!(
9981 found_path.is_none(),
9982 "Should not find project path for path outside any worktree"
9983 );
9984 });
9985}