1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .background_executor()
214 .block(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250 cx.update(|cx| {
251 GitHostingProviderRegistry::default_global(cx);
252 git_hosting_providers::init(cx);
253 });
254
255 let fs = FakeFs::new(cx.executor());
256 let str_path = path!("/dir");
257 let path = Path::new(str_path);
258
259 fs.insert_tree(
260 path!("/dir"),
261 json!({
262 ".zed": {
263 "settings.json": r#"{
264 "git_hosting_providers": [
265 {
266 "provider": "gitlab",
267 "base_url": "https://google.com",
268 "name": "foo"
269 }
270 ]
271 }"#
272 },
273 }),
274 )
275 .await;
276
277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
278 let (_worktree, _) =
279 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
280 cx.executor().run_until_parked();
281
282 cx.update(|cx| {
283 let provider = GitHostingProviderRegistry::global(cx);
284 assert!(
285 provider
286 .list_hosting_providers()
287 .into_iter()
288 .any(|provider| provider.name() == "foo")
289 );
290 });
291
292 fs.atomic_write(
293 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
294 "{}".into(),
295 )
296 .await
297 .unwrap();
298
299 cx.run_until_parked();
300
301 cx.update(|cx| {
302 let provider = GitHostingProviderRegistry::global(cx);
303 assert!(
304 !provider
305 .list_hosting_providers()
306 .into_iter()
307 .any(|provider| provider.name() == "foo")
308 );
309 });
310}
311
312#[gpui::test]
313async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315 TaskStore::init(None);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 path!("/dir"),
320 json!({
321 ".zed": {
322 "settings.json": r#"{ "tab_size": 8 }"#,
323 "tasks.json": r#"[{
324 "label": "cargo check all",
325 "command": "cargo",
326 "args": ["check", "--all"]
327 },]"#,
328 },
329 "a": {
330 "a.rs": "fn a() {\n A\n}"
331 },
332 "b": {
333 ".zed": {
334 "settings.json": r#"{ "tab_size": 2 }"#,
335 "tasks.json": r#"[{
336 "label": "cargo check",
337 "command": "cargo",
338 "args": ["check"]
339 },]"#,
340 },
341 "b.rs": "fn b() {\n B\n}"
342 }
343 }),
344 )
345 .await;
346
347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
348 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
349
350 cx.executor().run_until_parked();
351 let worktree_id = cx.update(|cx| {
352 project.update(cx, |project, cx| {
353 project.worktrees(cx).next().unwrap().read(cx).id()
354 })
355 });
356
357 let mut task_contexts = TaskContexts::default();
358 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
359 let task_contexts = Arc::new(task_contexts);
360
361 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
362 id: worktree_id,
363 directory_in_worktree: rel_path(".zed").into(),
364 id_base: "local worktree tasks from directory \".zed\"".into(),
365 };
366
367 let all_tasks = cx
368 .update(|cx| {
369 let tree = worktree.read(cx);
370
371 let file_a = File::for_entry(
372 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
373 worktree.clone(),
374 ) as _;
375 let settings_a = language_settings(None, Some(&file_a), cx);
376 let file_b = File::for_entry(
377 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
378 worktree.clone(),
379 ) as _;
380 let settings_b = language_settings(None, Some(&file_b), cx);
381
382 assert_eq!(settings_a.tab_size.get(), 8);
383 assert_eq!(settings_b.tab_size.get(), 2);
384
385 get_all_tasks(&project, task_contexts.clone(), cx)
386 })
387 .await
388 .into_iter()
389 .map(|(source_kind, task)| {
390 let resolved = task.resolved;
391 (
392 source_kind,
393 task.resolved_label,
394 resolved.args,
395 resolved.env,
396 )
397 })
398 .collect::<Vec<_>>();
399 assert_eq!(
400 all_tasks,
401 vec![
402 (
403 TaskSourceKind::Worktree {
404 id: worktree_id,
405 directory_in_worktree: rel_path("b/.zed").into(),
406 id_base: "local worktree tasks from directory \"b/.zed\"".into()
407 },
408 "cargo check".to_string(),
409 vec!["check".to_string()],
410 HashMap::default(),
411 ),
412 (
413 topmost_local_task_source_kind.clone(),
414 "cargo check all".to_string(),
415 vec!["check".to_string(), "--all".to_string()],
416 HashMap::default(),
417 ),
418 ]
419 );
420
421 let (_, resolved_task) = cx
422 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
423 .await
424 .into_iter()
425 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
426 .expect("should have one global task");
427 project.update(cx, |project, cx| {
428 let task_inventory = project
429 .task_store
430 .read(cx)
431 .task_inventory()
432 .cloned()
433 .unwrap();
434 task_inventory.update(cx, |inventory, _| {
435 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
436 inventory
437 .update_file_based_tasks(
438 TaskSettingsLocation::Global(tasks_file()),
439 Some(
440 &json!([{
441 "label": "cargo check unstable",
442 "command": "cargo",
443 "args": [
444 "check",
445 "--all",
446 "--all-targets"
447 ],
448 "env": {
449 "RUSTFLAGS": "-Zunstable-options"
450 }
451 }])
452 .to_string(),
453 ),
454 )
455 .unwrap();
456 });
457 });
458 cx.run_until_parked();
459
460 let all_tasks = cx
461 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
462 .await
463 .into_iter()
464 .map(|(source_kind, task)| {
465 let resolved = task.resolved;
466 (
467 source_kind,
468 task.resolved_label,
469 resolved.args,
470 resolved.env,
471 )
472 })
473 .collect::<Vec<_>>();
474 assert_eq!(
475 all_tasks,
476 vec![
477 (
478 topmost_local_task_source_kind.clone(),
479 "cargo check all".to_string(),
480 vec!["check".to_string(), "--all".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: rel_path("b/.zed").into(),
487 id_base: "local worktree tasks from directory \"b/.zed\"".into()
488 },
489 "cargo check".to_string(),
490 vec!["check".to_string()],
491 HashMap::default(),
492 ),
493 (
494 TaskSourceKind::AbsPath {
495 abs_path: paths::tasks_file().clone(),
496 id_base: "global tasks.json".into(),
497 },
498 "cargo check unstable".to_string(),
499 vec![
500 "check".to_string(),
501 "--all".to_string(),
502 "--all-targets".to_string(),
503 ],
504 HashMap::from_iter(Some((
505 "RUSTFLAGS".to_string(),
506 "-Zunstable-options".to_string()
507 ))),
508 ),
509 ]
510 );
511}
512
513#[gpui::test]
514async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
515 init_test(cx);
516 TaskStore::init(None);
517
518 let fs = FakeFs::new(cx.executor());
519 fs.insert_tree(
520 path!("/dir"),
521 json!({
522 ".zed": {
523 "tasks.json": r#"[{
524 "label": "test worktree root",
525 "command": "echo $ZED_WORKTREE_ROOT"
526 }]"#,
527 },
528 "a": {
529 "a.rs": "fn a() {\n A\n}"
530 },
531 }),
532 )
533 .await;
534
535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
536 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
537
538 cx.executor().run_until_parked();
539 let worktree_id = cx.update(|cx| {
540 project.update(cx, |project, cx| {
541 project.worktrees(cx).next().unwrap().read(cx).id()
542 })
543 });
544
545 let active_non_worktree_item_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: None,
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 }),
556 cx,
557 )
558 })
559 .await;
560 assert!(
561 active_non_worktree_item_tasks.is_empty(),
562 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
563 );
564
565 let active_worktree_tasks = cx
566 .update(|cx| {
567 get_all_tasks(
568 &project,
569 Arc::new(TaskContexts {
570 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
571 active_worktree_context: Some((worktree_id, {
572 let mut worktree_context = TaskContext::default();
573 worktree_context
574 .task_variables
575 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
576 worktree_context
577 })),
578 other_worktree_contexts: Vec::new(),
579 lsp_task_sources: HashMap::default(),
580 latest_selection: None,
581 }),
582 cx,
583 )
584 })
585 .await;
586 assert_eq!(
587 active_worktree_tasks
588 .into_iter()
589 .map(|(source_kind, task)| {
590 let resolved = task.resolved;
591 (source_kind, resolved.command.unwrap())
592 })
593 .collect::<Vec<_>>(),
594 vec![(
595 TaskSourceKind::Worktree {
596 id: worktree_id,
597 directory_in_worktree: rel_path(".zed").into(),
598 id_base: "local worktree tasks from directory \".zed\"".into(),
599 },
600 "echo /dir".to_string(),
601 )]
602 );
603}
604
605#[gpui::test]
606async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
607 cx: &mut gpui::TestAppContext,
608) {
609 pub(crate) struct PyprojectTomlManifestProvider;
610
611 impl ManifestProvider for PyprojectTomlManifestProvider {
612 fn name(&self) -> ManifestName {
613 SharedString::new_static("pyproject.toml").into()
614 }
615
616 fn search(
617 &self,
618 ManifestQuery {
619 path,
620 depth,
621 delegate,
622 }: ManifestQuery,
623 ) -> Option<Arc<RelPath>> {
624 for path in path.ancestors().take(depth) {
625 let p = path.join(rel_path("pyproject.toml"));
626 if delegate.exists(&p, Some(false)) {
627 return Some(path.into());
628 }
629 }
630
631 None
632 }
633 }
634
635 init_test(cx);
636 let fs = FakeFs::new(cx.executor());
637
638 fs.insert_tree(
639 path!("/the-root"),
640 json!({
641 ".zed": {
642 "settings.json": r#"
643 {
644 "languages": {
645 "Python": {
646 "language_servers": ["ty"]
647 }
648 }
649 }"#
650 },
651 "project-a": {
652 ".venv": {},
653 "file.py": "",
654 "pyproject.toml": ""
655 },
656 "project-b": {
657 ".venv": {},
658 "source_file.py":"",
659 "another_file.py": "",
660 "pyproject.toml": ""
661 }
662 }),
663 )
664 .await;
665 cx.update(|cx| {
666 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
667 });
668
669 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
671 let _fake_python_server = language_registry.register_fake_lsp(
672 "Python",
673 FakeLspAdapter {
674 name: "ty",
675 capabilities: lsp::ServerCapabilities {
676 ..Default::default()
677 },
678 ..Default::default()
679 },
680 );
681
682 language_registry.add(python_lang(fs.clone()));
683 let (first_buffer, _handle) = project
684 .update(cx, |project, cx| {
685 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
686 })
687 .await
688 .unwrap();
689 cx.executor().run_until_parked();
690 let servers = project.update(cx, |project, cx| {
691 project.lsp_store.update(cx, |this, cx| {
692 first_buffer.update(cx, |buffer, cx| {
693 this.language_servers_for_local_buffer(buffer, cx)
694 .map(|(adapter, server)| (adapter.clone(), server.clone()))
695 .collect::<Vec<_>>()
696 })
697 })
698 });
699 cx.executor().run_until_parked();
700 assert_eq!(servers.len(), 1);
701 let (adapter, server) = servers.into_iter().next().unwrap();
702 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
703 assert_eq!(server.server_id(), LanguageServerId(0));
704 // `workspace_folders` are set to the rooting point.
705 assert_eq!(
706 server.workspace_folders(),
707 BTreeSet::from_iter(
708 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
709 )
710 );
711
712 let (second_project_buffer, _other_handle) = project
713 .update(cx, |project, cx| {
714 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
715 })
716 .await
717 .unwrap();
718 cx.executor().run_until_parked();
719 let servers = project.update(cx, |project, cx| {
720 project.lsp_store.update(cx, |this, cx| {
721 second_project_buffer.update(cx, |buffer, cx| {
722 this.language_servers_for_local_buffer(buffer, cx)
723 .map(|(adapter, server)| (adapter.clone(), server.clone()))
724 .collect::<Vec<_>>()
725 })
726 })
727 });
728 cx.executor().run_until_parked();
729 assert_eq!(servers.len(), 1);
730 let (adapter, server) = servers.into_iter().next().unwrap();
731 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
732 // We're not using venvs at all here, so both folders should fall under the same root.
733 assert_eq!(server.server_id(), LanguageServerId(0));
734 // Now, let's select a different toolchain for one of subprojects.
735
736 let Toolchains {
737 toolchains: available_toolchains_for_b,
738 root_path,
739 ..
740 } = project
741 .update(cx, |this, cx| {
742 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
743 this.available_toolchains(
744 ProjectPath {
745 worktree_id,
746 path: rel_path("project-b/source_file.py").into(),
747 },
748 LanguageName::new("Python"),
749 cx,
750 )
751 })
752 .await
753 .expect("A toolchain to be discovered");
754 assert_eq!(root_path.as_ref(), rel_path("project-b"));
755 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
756 let currently_active_toolchain = project
757 .update(cx, |this, cx| {
758 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
759 this.active_toolchain(
760 ProjectPath {
761 worktree_id,
762 path: rel_path("project-b/source_file.py").into(),
763 },
764 LanguageName::new("Python"),
765 cx,
766 )
767 })
768 .await;
769
770 assert!(currently_active_toolchain.is_none());
771 let _ = project
772 .update(cx, |this, cx| {
773 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
774 this.activate_toolchain(
775 ProjectPath {
776 worktree_id,
777 path: root_path,
778 },
779 available_toolchains_for_b
780 .toolchains
781 .into_iter()
782 .next()
783 .unwrap(),
784 cx,
785 )
786 })
787 .await
788 .unwrap();
789 cx.run_until_parked();
790 let servers = project.update(cx, |project, cx| {
791 project.lsp_store.update(cx, |this, cx| {
792 second_project_buffer.update(cx, |buffer, cx| {
793 this.language_servers_for_local_buffer(buffer, cx)
794 .map(|(adapter, server)| (adapter.clone(), server.clone()))
795 .collect::<Vec<_>>()
796 })
797 })
798 });
799 cx.executor().run_until_parked();
800 assert_eq!(servers.len(), 1);
801 let (adapter, server) = servers.into_iter().next().unwrap();
802 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
803 // There's a new language server in town.
804 assert_eq!(server.server_id(), LanguageServerId(1));
805}
806
807#[gpui::test]
808async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
809 init_test(cx);
810
811 let fs = FakeFs::new(cx.executor());
812 fs.insert_tree(
813 path!("/dir"),
814 json!({
815 "test.rs": "const A: i32 = 1;",
816 "test2.rs": "",
817 "Cargo.toml": "a = 1",
818 "package.json": "{\"a\": 1}",
819 }),
820 )
821 .await;
822
823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825
826 let mut fake_rust_servers = language_registry.register_fake_lsp(
827 "Rust",
828 FakeLspAdapter {
829 name: "the-rust-language-server",
830 capabilities: lsp::ServerCapabilities {
831 completion_provider: Some(lsp::CompletionOptions {
832 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
833 ..Default::default()
834 }),
835 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
836 lsp::TextDocumentSyncOptions {
837 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
838 ..Default::default()
839 },
840 )),
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 );
846 let mut fake_json_servers = language_registry.register_fake_lsp(
847 "JSON",
848 FakeLspAdapter {
849 name: "the-json-language-server",
850 capabilities: lsp::ServerCapabilities {
851 completion_provider: Some(lsp::CompletionOptions {
852 trigger_characters: Some(vec![":".to_string()]),
853 ..Default::default()
854 }),
855 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
856 lsp::TextDocumentSyncOptions {
857 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
858 ..Default::default()
859 },
860 )),
861 ..Default::default()
862 },
863 ..Default::default()
864 },
865 );
866
867 // Open a buffer without an associated language server.
868 let (toml_buffer, _handle) = project
869 .update(cx, |project, cx| {
870 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
871 })
872 .await
873 .unwrap();
874
875 // Open a buffer with an associated language server before the language for it has been loaded.
876 let (rust_buffer, _handle2) = project
877 .update(cx, |project, cx| {
878 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
879 })
880 .await
881 .unwrap();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), None);
884 });
885
886 // Now we add the languages to the project, and ensure they get assigned to all
887 // the relevant open buffers.
888 language_registry.add(json_lang());
889 language_registry.add(rust_lang());
890 cx.executor().run_until_parked();
891 rust_buffer.update(cx, |buffer, _| {
892 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
893 });
894
895 // A server is started up, and it is notified about Rust files.
896 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
897 assert_eq!(
898 fake_rust_server
899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
900 .await
901 .text_document,
902 lsp::TextDocumentItem {
903 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
904 version: 0,
905 text: "const A: i32 = 1;".to_string(),
906 language_id: "rust".to_string(),
907 }
908 );
909
910 // The buffer is configured based on the language server's capabilities.
911 rust_buffer.update(cx, |buffer, _| {
912 assert_eq!(
913 buffer
914 .completion_triggers()
915 .iter()
916 .cloned()
917 .collect::<Vec<_>>(),
918 &[".".to_string(), "::".to_string()]
919 );
920 });
921 toml_buffer.update(cx, |buffer, _| {
922 assert!(buffer.completion_triggers().is_empty());
923 });
924
925 // Edit a buffer. The changes are reported to the language server.
926 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
927 assert_eq!(
928 fake_rust_server
929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
930 .await
931 .text_document,
932 lsp::VersionedTextDocumentIdentifier::new(
933 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
934 1
935 )
936 );
937
938 // Open a third buffer with a different associated language server.
939 let (json_buffer, _json_handle) = project
940 .update(cx, |project, cx| {
941 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
942 })
943 .await
944 .unwrap();
945
946 // A json language server is started up and is only notified about the json buffer.
947 let mut fake_json_server = fake_json_servers.next().await.unwrap();
948 assert_eq!(
949 fake_json_server
950 .receive_notification::<lsp::notification::DidOpenTextDocument>()
951 .await
952 .text_document,
953 lsp::TextDocumentItem {
954 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
955 version: 0,
956 text: "{\"a\": 1}".to_string(),
957 language_id: "json".to_string(),
958 }
959 );
960
961 // This buffer is configured based on the second language server's
962 // capabilities.
963 json_buffer.update(cx, |buffer, _| {
964 assert_eq!(
965 buffer
966 .completion_triggers()
967 .iter()
968 .cloned()
969 .collect::<Vec<_>>(),
970 &[":".to_string()]
971 );
972 });
973
974 // When opening another buffer whose language server is already running,
975 // it is also configured based on the existing language server's capabilities.
976 let (rust_buffer2, _handle4) = project
977 .update(cx, |project, cx| {
978 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
979 })
980 .await
981 .unwrap();
982 rust_buffer2.update(cx, |buffer, _| {
983 assert_eq!(
984 buffer
985 .completion_triggers()
986 .iter()
987 .cloned()
988 .collect::<Vec<_>>(),
989 &[".".to_string(), "::".to_string()]
990 );
991 });
992
993 // Changes are reported only to servers matching the buffer's language.
994 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
995 rust_buffer2.update(cx, |buffer, cx| {
996 buffer.edit([(0..0, "let x = 1;")], None, cx)
997 });
998 assert_eq!(
999 fake_rust_server
1000 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1001 .await
1002 .text_document,
1003 lsp::VersionedTextDocumentIdentifier::new(
1004 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1005 1
1006 )
1007 );
1008
1009 // Save notifications are reported to all servers.
1010 project
1011 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1012 .await
1013 .unwrap();
1014 assert_eq!(
1015 fake_rust_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023 assert_eq!(
1024 fake_json_server
1025 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1026 .await
1027 .text_document,
1028 lsp::TextDocumentIdentifier::new(
1029 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1030 )
1031 );
1032
1033 // Renames are reported only to servers matching the buffer's language.
1034 fs.rename(
1035 Path::new(path!("/dir/test2.rs")),
1036 Path::new(path!("/dir/test3.rs")),
1037 Default::default(),
1038 )
1039 .await
1040 .unwrap();
1041 assert_eq!(
1042 fake_rust_server
1043 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1044 .await
1045 .text_document,
1046 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1047 );
1048 assert_eq!(
1049 fake_rust_server
1050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1051 .await
1052 .text_document,
1053 lsp::TextDocumentItem {
1054 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1055 version: 0,
1056 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1057 language_id: "rust".to_string(),
1058 },
1059 );
1060
1061 rust_buffer2.update(cx, |buffer, cx| {
1062 buffer.update_diagnostics(
1063 LanguageServerId(0),
1064 DiagnosticSet::from_sorted_entries(
1065 vec![DiagnosticEntry {
1066 diagnostic: Default::default(),
1067 range: Anchor::MIN..Anchor::MAX,
1068 }],
1069 &buffer.snapshot(),
1070 ),
1071 cx,
1072 );
1073 assert_eq!(
1074 buffer
1075 .snapshot()
1076 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1077 .count(),
1078 1
1079 );
1080 });
1081
1082 // When the rename changes the extension of the file, the buffer gets closed on the old
1083 // language server and gets opened on the new one.
1084 fs.rename(
1085 Path::new(path!("/dir/test3.rs")),
1086 Path::new(path!("/dir/test3.json")),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091 assert_eq!(
1092 fake_rust_server
1093 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1094 .await
1095 .text_document,
1096 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1097 );
1098 assert_eq!(
1099 fake_json_server
1100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1101 .await
1102 .text_document,
1103 lsp::TextDocumentItem {
1104 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1105 version: 0,
1106 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1107 language_id: "json".to_string(),
1108 },
1109 );
1110
1111 // We clear the diagnostics, since the language has changed.
1112 rust_buffer2.update(cx, |buffer, _| {
1113 assert_eq!(
1114 buffer
1115 .snapshot()
1116 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1117 .count(),
1118 0
1119 );
1120 });
1121
1122 // The renamed file's version resets after changing language server.
1123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1124 assert_eq!(
1125 fake_json_server
1126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1127 .await
1128 .text_document,
1129 lsp::VersionedTextDocumentIdentifier::new(
1130 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1131 1
1132 )
1133 );
1134
1135 // Restart language servers
1136 project.update(cx, |project, cx| {
1137 project.restart_language_servers_for_buffers(
1138 vec![rust_buffer.clone(), json_buffer.clone()],
1139 HashSet::default(),
1140 cx,
1141 );
1142 });
1143
1144 let mut rust_shutdown_requests = fake_rust_server
1145 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1146 let mut json_shutdown_requests = fake_json_server
1147 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1148 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1149
1150 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1151 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1152
1153 // Ensure rust document is reopened in new rust language server
1154 assert_eq!(
1155 fake_rust_server
1156 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1157 .await
1158 .text_document,
1159 lsp::TextDocumentItem {
1160 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1161 version: 0,
1162 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1163 language_id: "rust".to_string(),
1164 }
1165 );
1166
1167 // Ensure json documents are reopened in new json language server
1168 assert_set_eq!(
1169 [
1170 fake_json_server
1171 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1172 .await
1173 .text_document,
1174 fake_json_server
1175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1176 .await
1177 .text_document,
1178 ],
1179 [
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1182 version: 0,
1183 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 },
1186 lsp::TextDocumentItem {
1187 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1188 version: 0,
1189 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1190 language_id: "json".to_string(),
1191 }
1192 ]
1193 );
1194
1195 // Close notifications are reported only to servers matching the buffer's language.
1196 cx.update(|_| drop(_json_handle));
1197 let close_message = lsp::DidCloseTextDocumentParams {
1198 text_document: lsp::TextDocumentIdentifier::new(
1199 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1200 ),
1201 };
1202 assert_eq!(
1203 fake_json_server
1204 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1205 .await,
1206 close_message,
1207 );
1208}
1209
1210#[gpui::test]
1211async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let fs = FakeFs::new(cx.executor());
1215 fs.insert_tree(
1216 path!("/the-root"),
1217 json!({
1218 ".gitignore": "target\n",
1219 "Cargo.lock": "",
1220 "src": {
1221 "a.rs": "",
1222 "b.rs": "",
1223 },
1224 "target": {
1225 "x": {
1226 "out": {
1227 "x.rs": ""
1228 }
1229 },
1230 "y": {
1231 "out": {
1232 "y.rs": "",
1233 }
1234 },
1235 "z": {
1236 "out": {
1237 "z.rs": ""
1238 }
1239 }
1240 }
1241 }),
1242 )
1243 .await;
1244 fs.insert_tree(
1245 path!("/the-registry"),
1246 json!({
1247 "dep1": {
1248 "src": {
1249 "dep1.rs": "",
1250 }
1251 },
1252 "dep2": {
1253 "src": {
1254 "dep2.rs": "",
1255 }
1256 },
1257 }),
1258 )
1259 .await;
1260 fs.insert_tree(
1261 path!("/the/stdlib"),
1262 json!({
1263 "LICENSE": "",
1264 "src": {
1265 "string.rs": "",
1266 }
1267 }),
1268 )
1269 .await;
1270
1271 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1272 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1273 (project.languages().clone(), project.lsp_store())
1274 });
1275 language_registry.add(rust_lang());
1276 let mut fake_servers = language_registry.register_fake_lsp(
1277 "Rust",
1278 FakeLspAdapter {
1279 name: "the-language-server",
1280 ..Default::default()
1281 },
1282 );
1283
1284 cx.executor().run_until_parked();
1285
1286 // Start the language server by opening a buffer with a compatible file extension.
1287 project
1288 .update(cx, |project, cx| {
1289 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1290 })
1291 .await
1292 .unwrap();
1293
1294 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1295 project.update(cx, |project, cx| {
1296 let worktree = project.worktrees(cx).next().unwrap();
1297 assert_eq!(
1298 worktree
1299 .read(cx)
1300 .snapshot()
1301 .entries(true, 0)
1302 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1303 .collect::<Vec<_>>(),
1304 &[
1305 ("", false),
1306 (".gitignore", false),
1307 ("Cargo.lock", false),
1308 ("src", false),
1309 ("src/a.rs", false),
1310 ("src/b.rs", false),
1311 ("target", true),
1312 ]
1313 );
1314 });
1315
1316 let prev_read_dir_count = fs.read_dir_call_count();
1317
1318 let fake_server = fake_servers.next().await.unwrap();
1319 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1320 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1321 id
1322 });
1323
1324 // Simulate jumping to a definition in a dependency outside of the worktree.
1325 let _out_of_worktree_buffer = project
1326 .update(cx, |project, cx| {
1327 project.open_local_buffer_via_lsp(
1328 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1329 server_id,
1330 cx,
1331 )
1332 })
1333 .await
1334 .unwrap();
1335
1336 // Keep track of the FS events reported to the language server.
1337 let file_changes = Arc::new(Mutex::new(Vec::new()));
1338 fake_server
1339 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1340 registrations: vec![lsp::Registration {
1341 id: Default::default(),
1342 method: "workspace/didChangeWatchedFiles".to_string(),
1343 register_options: serde_json::to_value(
1344 lsp::DidChangeWatchedFilesRegistrationOptions {
1345 watchers: vec![
1346 lsp::FileSystemWatcher {
1347 glob_pattern: lsp::GlobPattern::String(
1348 path!("/the-root/Cargo.toml").to_string(),
1349 ),
1350 kind: None,
1351 },
1352 lsp::FileSystemWatcher {
1353 glob_pattern: lsp::GlobPattern::String(
1354 path!("/the-root/src/*.{rs,c}").to_string(),
1355 ),
1356 kind: None,
1357 },
1358 lsp::FileSystemWatcher {
1359 glob_pattern: lsp::GlobPattern::String(
1360 path!("/the-root/target/y/**/*.rs").to_string(),
1361 ),
1362 kind: None,
1363 },
1364 lsp::FileSystemWatcher {
1365 glob_pattern: lsp::GlobPattern::String(
1366 path!("/the/stdlib/src/**/*.rs").to_string(),
1367 ),
1368 kind: None,
1369 },
1370 lsp::FileSystemWatcher {
1371 glob_pattern: lsp::GlobPattern::String(
1372 path!("**/Cargo.lock").to_string(),
1373 ),
1374 kind: None,
1375 },
1376 ],
1377 },
1378 )
1379 .ok(),
1380 }],
1381 })
1382 .await
1383 .into_response()
1384 .unwrap();
1385 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1386 let file_changes = file_changes.clone();
1387 move |params, _| {
1388 let mut file_changes = file_changes.lock();
1389 file_changes.extend(params.changes);
1390 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1391 }
1392 });
1393
1394 cx.executor().run_until_parked();
1395 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1396 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1397
1398 let mut new_watched_paths = fs.watched_paths();
1399 new_watched_paths.retain(|path| {
1400 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1401 });
1402 assert_eq!(
1403 &new_watched_paths,
1404 &[
1405 Path::new(path!("/the-root")),
1406 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1407 Path::new(path!("/the/stdlib/src"))
1408 ]
1409 );
1410
1411 // Now the language server has asked us to watch an ignored directory path,
1412 // so we recursively load it.
1413 project.update(cx, |project, cx| {
1414 let worktree = project.visible_worktrees(cx).next().unwrap();
1415 assert_eq!(
1416 worktree
1417 .read(cx)
1418 .snapshot()
1419 .entries(true, 0)
1420 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1421 .collect::<Vec<_>>(),
1422 &[
1423 ("", false),
1424 (".gitignore", false),
1425 ("Cargo.lock", false),
1426 ("src", false),
1427 ("src/a.rs", false),
1428 ("src/b.rs", false),
1429 ("target", true),
1430 ("target/x", true),
1431 ("target/y", true),
1432 ("target/y/out", true),
1433 ("target/y/out/y.rs", true),
1434 ("target/z", true),
1435 ]
1436 );
1437 });
1438
1439 // Perform some file system mutations, two of which match the watched patterns,
1440 // and one of which does not.
1441 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1442 .await
1443 .unwrap();
1444 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1445 .await
1446 .unwrap();
1447 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1448 .await
1449 .unwrap();
1450 fs.create_file(
1451 path!("/the-root/target/x/out/x2.rs").as_ref(),
1452 Default::default(),
1453 )
1454 .await
1455 .unwrap();
1456 fs.create_file(
1457 path!("/the-root/target/y/out/y2.rs").as_ref(),
1458 Default::default(),
1459 )
1460 .await
1461 .unwrap();
1462 fs.save(
1463 path!("/the-root/Cargo.lock").as_ref(),
1464 &"".into(),
1465 Default::default(),
1466 )
1467 .await
1468 .unwrap();
1469 fs.save(
1470 path!("/the-stdlib/LICENSE").as_ref(),
1471 &"".into(),
1472 Default::default(),
1473 )
1474 .await
1475 .unwrap();
1476 fs.save(
1477 path!("/the/stdlib/src/string.rs").as_ref(),
1478 &"".into(),
1479 Default::default(),
1480 )
1481 .await
1482 .unwrap();
1483
1484 // The language server receives events for the FS mutations that match its watch patterns.
1485 cx.executor().run_until_parked();
1486 assert_eq!(
1487 &*file_changes.lock(),
1488 &[
1489 lsp::FileEvent {
1490 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1491 typ: lsp::FileChangeType::CHANGED,
1492 },
1493 lsp::FileEvent {
1494 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1495 typ: lsp::FileChangeType::DELETED,
1496 },
1497 lsp::FileEvent {
1498 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1499 typ: lsp::FileChangeType::CREATED,
1500 },
1501 lsp::FileEvent {
1502 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1503 typ: lsp::FileChangeType::CREATED,
1504 },
1505 lsp::FileEvent {
1506 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1507 typ: lsp::FileChangeType::CHANGED,
1508 },
1509 ]
1510 );
1511}
1512
1513#[gpui::test]
1514async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1515 init_test(cx);
1516
1517 let fs = FakeFs::new(cx.executor());
1518 fs.insert_tree(
1519 path!("/dir"),
1520 json!({
1521 "a.rs": "let a = 1;",
1522 "b.rs": "let b = 2;"
1523 }),
1524 )
1525 .await;
1526
1527 let project = Project::test(
1528 fs,
1529 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1530 cx,
1531 )
1532 .await;
1533 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1534
1535 let buffer_a = project
1536 .update(cx, |project, cx| {
1537 project.open_local_buffer(path!("/dir/a.rs"), cx)
1538 })
1539 .await
1540 .unwrap();
1541 let buffer_b = project
1542 .update(cx, |project, cx| {
1543 project.open_local_buffer(path!("/dir/b.rs"), cx)
1544 })
1545 .await
1546 .unwrap();
1547
1548 lsp_store.update(cx, |lsp_store, cx| {
1549 lsp_store
1550 .update_diagnostics(
1551 LanguageServerId(0),
1552 lsp::PublishDiagnosticsParams {
1553 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1554 version: None,
1555 diagnostics: vec![lsp::Diagnostic {
1556 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1557 severity: Some(lsp::DiagnosticSeverity::ERROR),
1558 message: "error 1".to_string(),
1559 ..Default::default()
1560 }],
1561 },
1562 None,
1563 DiagnosticSourceKind::Pushed,
1564 &[],
1565 cx,
1566 )
1567 .unwrap();
1568 lsp_store
1569 .update_diagnostics(
1570 LanguageServerId(0),
1571 lsp::PublishDiagnosticsParams {
1572 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1573 version: None,
1574 diagnostics: vec![lsp::Diagnostic {
1575 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1576 severity: Some(DiagnosticSeverity::WARNING),
1577 message: "error 2".to_string(),
1578 ..Default::default()
1579 }],
1580 },
1581 None,
1582 DiagnosticSourceKind::Pushed,
1583 &[],
1584 cx,
1585 )
1586 .unwrap();
1587 });
1588
1589 buffer_a.update(cx, |buffer, _| {
1590 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1591 assert_eq!(
1592 chunks
1593 .iter()
1594 .map(|(s, d)| (s.as_str(), *d))
1595 .collect::<Vec<_>>(),
1596 &[
1597 ("let ", None),
1598 ("a", Some(DiagnosticSeverity::ERROR)),
1599 (" = 1;", None),
1600 ]
1601 );
1602 });
1603 buffer_b.update(cx, |buffer, _| {
1604 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1605 assert_eq!(
1606 chunks
1607 .iter()
1608 .map(|(s, d)| (s.as_str(), *d))
1609 .collect::<Vec<_>>(),
1610 &[
1611 ("let ", None),
1612 ("b", Some(DiagnosticSeverity::WARNING)),
1613 (" = 2;", None),
1614 ]
1615 );
1616 });
1617}
1618
1619#[gpui::test]
1620async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1621 init_test(cx);
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree(
1625 path!("/root"),
1626 json!({
1627 "dir": {
1628 ".git": {
1629 "HEAD": "ref: refs/heads/main",
1630 },
1631 ".gitignore": "b.rs",
1632 "a.rs": "let a = 1;",
1633 "b.rs": "let b = 2;",
1634 },
1635 "other.rs": "let b = c;"
1636 }),
1637 )
1638 .await;
1639
1640 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1641 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1642 let (worktree, _) = project
1643 .update(cx, |project, cx| {
1644 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1645 })
1646 .await
1647 .unwrap();
1648 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1649
1650 let (worktree, _) = project
1651 .update(cx, |project, cx| {
1652 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1653 })
1654 .await
1655 .unwrap();
1656 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1657
1658 let server_id = LanguageServerId(0);
1659 lsp_store.update(cx, |lsp_store, cx| {
1660 lsp_store
1661 .update_diagnostics(
1662 server_id,
1663 lsp::PublishDiagnosticsParams {
1664 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1665 version: None,
1666 diagnostics: vec![lsp::Diagnostic {
1667 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1668 severity: Some(lsp::DiagnosticSeverity::ERROR),
1669 message: "unused variable 'b'".to_string(),
1670 ..Default::default()
1671 }],
1672 },
1673 None,
1674 DiagnosticSourceKind::Pushed,
1675 &[],
1676 cx,
1677 )
1678 .unwrap();
1679 lsp_store
1680 .update_diagnostics(
1681 server_id,
1682 lsp::PublishDiagnosticsParams {
1683 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1684 version: None,
1685 diagnostics: vec![lsp::Diagnostic {
1686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1687 severity: Some(lsp::DiagnosticSeverity::ERROR),
1688 message: "unknown variable 'c'".to_string(),
1689 ..Default::default()
1690 }],
1691 },
1692 None,
1693 DiagnosticSourceKind::Pushed,
1694 &[],
1695 cx,
1696 )
1697 .unwrap();
1698 });
1699
1700 let main_ignored_buffer = project
1701 .update(cx, |project, cx| {
1702 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1703 })
1704 .await
1705 .unwrap();
1706 main_ignored_buffer.update(cx, |buffer, _| {
1707 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1708 assert_eq!(
1709 chunks
1710 .iter()
1711 .map(|(s, d)| (s.as_str(), *d))
1712 .collect::<Vec<_>>(),
1713 &[
1714 ("let ", None),
1715 ("b", Some(DiagnosticSeverity::ERROR)),
1716 (" = 2;", None),
1717 ],
1718 "Gigitnored buffers should still get in-buffer diagnostics",
1719 );
1720 });
1721 let other_buffer = project
1722 .update(cx, |project, cx| {
1723 project.open_buffer((other_worktree_id, rel_path("")), cx)
1724 })
1725 .await
1726 .unwrap();
1727 other_buffer.update(cx, |buffer, _| {
1728 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1729 assert_eq!(
1730 chunks
1731 .iter()
1732 .map(|(s, d)| (s.as_str(), *d))
1733 .collect::<Vec<_>>(),
1734 &[
1735 ("let b = ", None),
1736 ("c", Some(DiagnosticSeverity::ERROR)),
1737 (";", None),
1738 ],
1739 "Buffers from hidden projects should still get in-buffer diagnostics"
1740 );
1741 });
1742
1743 project.update(cx, |project, cx| {
1744 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1745 assert_eq!(
1746 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1747 vec![(
1748 ProjectPath {
1749 worktree_id: main_worktree_id,
1750 path: rel_path("b.rs").into(),
1751 },
1752 server_id,
1753 DiagnosticSummary {
1754 error_count: 1,
1755 warning_count: 0,
1756 }
1757 )]
1758 );
1759 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1760 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1761 });
1762}
1763
1764#[gpui::test]
1765async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1766 init_test(cx);
1767
1768 let progress_token = "the-progress-token";
1769
1770 let fs = FakeFs::new(cx.executor());
1771 fs.insert_tree(
1772 path!("/dir"),
1773 json!({
1774 "a.rs": "fn a() { A }",
1775 "b.rs": "const y: i32 = 1",
1776 }),
1777 )
1778 .await;
1779
1780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1781 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1782
1783 language_registry.add(rust_lang());
1784 let mut fake_servers = language_registry.register_fake_lsp(
1785 "Rust",
1786 FakeLspAdapter {
1787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1788 disk_based_diagnostics_sources: vec!["disk".into()],
1789 ..Default::default()
1790 },
1791 );
1792
1793 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1794
1795 // Cause worktree to start the fake language server
1796 let _ = project
1797 .update(cx, |project, cx| {
1798 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1799 })
1800 .await
1801 .unwrap();
1802
1803 let mut events = cx.events(&project);
1804
1805 let fake_server = fake_servers.next().await.unwrap();
1806 assert_eq!(
1807 events.next().await.unwrap(),
1808 Event::LanguageServerAdded(
1809 LanguageServerId(0),
1810 fake_server.server.name(),
1811 Some(worktree_id)
1812 ),
1813 );
1814
1815 fake_server
1816 .start_progress(format!("{}/0", progress_token))
1817 .await;
1818 assert_eq!(
1819 events.next().await.unwrap(),
1820 Event::DiskBasedDiagnosticsStarted {
1821 language_server_id: LanguageServerId(0),
1822 }
1823 );
1824
1825 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1826 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1827 version: None,
1828 diagnostics: vec![lsp::Diagnostic {
1829 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1830 severity: Some(lsp::DiagnosticSeverity::ERROR),
1831 message: "undefined variable 'A'".to_string(),
1832 ..Default::default()
1833 }],
1834 });
1835 assert_eq!(
1836 events.next().await.unwrap(),
1837 Event::DiagnosticsUpdated {
1838 language_server_id: LanguageServerId(0),
1839 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1840 }
1841 );
1842
1843 fake_server.end_progress(format!("{}/0", progress_token));
1844 assert_eq!(
1845 events.next().await.unwrap(),
1846 Event::DiskBasedDiagnosticsFinished {
1847 language_server_id: LanguageServerId(0)
1848 }
1849 );
1850
1851 let buffer = project
1852 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1853 .await
1854 .unwrap();
1855
1856 buffer.update(cx, |buffer, _| {
1857 let snapshot = buffer.snapshot();
1858 let diagnostics = snapshot
1859 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1860 .collect::<Vec<_>>();
1861 assert_eq!(
1862 diagnostics,
1863 &[DiagnosticEntryRef {
1864 range: Point::new(0, 9)..Point::new(0, 10),
1865 diagnostic: &Diagnostic {
1866 severity: lsp::DiagnosticSeverity::ERROR,
1867 message: "undefined variable 'A'".to_string(),
1868 group_id: 0,
1869 is_primary: true,
1870 source_kind: DiagnosticSourceKind::Pushed,
1871 ..Diagnostic::default()
1872 }
1873 }]
1874 )
1875 });
1876
1877 // Ensure publishing empty diagnostics twice only results in one update event.
1878 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1879 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1880 version: None,
1881 diagnostics: Default::default(),
1882 });
1883 assert_eq!(
1884 events.next().await.unwrap(),
1885 Event::DiagnosticsUpdated {
1886 language_server_id: LanguageServerId(0),
1887 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1888 }
1889 );
1890
1891 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1892 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1893 version: None,
1894 diagnostics: Default::default(),
1895 });
1896 cx.executor().run_until_parked();
1897 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1898}
1899
1900#[gpui::test]
1901async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1902 init_test(cx);
1903
1904 let progress_token = "the-progress-token";
1905
1906 let fs = FakeFs::new(cx.executor());
1907 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1908
1909 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1910
1911 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1912 language_registry.add(rust_lang());
1913 let mut fake_servers = language_registry.register_fake_lsp(
1914 "Rust",
1915 FakeLspAdapter {
1916 name: "the-language-server",
1917 disk_based_diagnostics_sources: vec!["disk".into()],
1918 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1919 ..FakeLspAdapter::default()
1920 },
1921 );
1922
1923 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1924
1925 let (buffer, _handle) = project
1926 .update(cx, |project, cx| {
1927 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1928 })
1929 .await
1930 .unwrap();
1931 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1932 // Simulate diagnostics starting to update.
1933 let fake_server = fake_servers.next().await.unwrap();
1934 fake_server.start_progress(progress_token).await;
1935
1936 // Restart the server before the diagnostics finish updating.
1937 project.update(cx, |project, cx| {
1938 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1939 });
1940 let mut events = cx.events(&project);
1941
1942 // Simulate the newly started server sending more diagnostics.
1943 let fake_server = fake_servers.next().await.unwrap();
1944 assert_eq!(
1945 events.next().await.unwrap(),
1946 Event::LanguageServerRemoved(LanguageServerId(0))
1947 );
1948 assert_eq!(
1949 events.next().await.unwrap(),
1950 Event::LanguageServerAdded(
1951 LanguageServerId(1),
1952 fake_server.server.name(),
1953 Some(worktree_id)
1954 )
1955 );
1956 fake_server.start_progress(progress_token).await;
1957 assert_eq!(
1958 events.next().await.unwrap(),
1959 Event::LanguageServerBufferRegistered {
1960 server_id: LanguageServerId(1),
1961 buffer_id,
1962 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1963 name: Some(fake_server.server.name())
1964 }
1965 );
1966 assert_eq!(
1967 events.next().await.unwrap(),
1968 Event::DiskBasedDiagnosticsStarted {
1969 language_server_id: LanguageServerId(1)
1970 }
1971 );
1972 project.update(cx, |project, cx| {
1973 assert_eq!(
1974 project
1975 .language_servers_running_disk_based_diagnostics(cx)
1976 .collect::<Vec<_>>(),
1977 [LanguageServerId(1)]
1978 );
1979 });
1980
1981 // All diagnostics are considered done, despite the old server's diagnostic
1982 // task never completing.
1983 fake_server.end_progress(progress_token);
1984 assert_eq!(
1985 events.next().await.unwrap(),
1986 Event::DiskBasedDiagnosticsFinished {
1987 language_server_id: LanguageServerId(1)
1988 }
1989 );
1990 project.update(cx, |project, cx| {
1991 assert_eq!(
1992 project
1993 .language_servers_running_disk_based_diagnostics(cx)
1994 .collect::<Vec<_>>(),
1995 [] as [language::LanguageServerId; 0]
1996 );
1997 });
1998}
1999
2000#[gpui::test]
2001async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2002 init_test(cx);
2003
2004 let fs = FakeFs::new(cx.executor());
2005 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2006
2007 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2008
2009 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2010 language_registry.add(rust_lang());
2011 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2012
2013 let (buffer, _) = project
2014 .update(cx, |project, cx| {
2015 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2016 })
2017 .await
2018 .unwrap();
2019
2020 // Publish diagnostics
2021 let fake_server = fake_servers.next().await.unwrap();
2022 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2023 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2024 version: None,
2025 diagnostics: vec![lsp::Diagnostic {
2026 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2027 severity: Some(lsp::DiagnosticSeverity::ERROR),
2028 message: "the message".to_string(),
2029 ..Default::default()
2030 }],
2031 });
2032
2033 cx.executor().run_until_parked();
2034 buffer.update(cx, |buffer, _| {
2035 assert_eq!(
2036 buffer
2037 .snapshot()
2038 .diagnostics_in_range::<_, usize>(0..1, false)
2039 .map(|entry| entry.diagnostic.message.clone())
2040 .collect::<Vec<_>>(),
2041 ["the message".to_string()]
2042 );
2043 });
2044 project.update(cx, |project, cx| {
2045 assert_eq!(
2046 project.diagnostic_summary(false, cx),
2047 DiagnosticSummary {
2048 error_count: 1,
2049 warning_count: 0,
2050 }
2051 );
2052 });
2053
2054 project.update(cx, |project, cx| {
2055 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2056 });
2057
2058 // The diagnostics are cleared.
2059 cx.executor().run_until_parked();
2060 buffer.update(cx, |buffer, _| {
2061 assert_eq!(
2062 buffer
2063 .snapshot()
2064 .diagnostics_in_range::<_, usize>(0..1, false)
2065 .map(|entry| entry.diagnostic.message.clone())
2066 .collect::<Vec<_>>(),
2067 Vec::<String>::new(),
2068 );
2069 });
2070 project.update(cx, |project, cx| {
2071 assert_eq!(
2072 project.diagnostic_summary(false, cx),
2073 DiagnosticSummary {
2074 error_count: 0,
2075 warning_count: 0,
2076 }
2077 );
2078 });
2079}
2080
2081#[gpui::test]
2082async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2083 init_test(cx);
2084
2085 let fs = FakeFs::new(cx.executor());
2086 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2087
2088 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2089 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2090
2091 language_registry.add(rust_lang());
2092 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2093
2094 let (buffer, _handle) = project
2095 .update(cx, |project, cx| {
2096 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2097 })
2098 .await
2099 .unwrap();
2100
2101 // Before restarting the server, report diagnostics with an unknown buffer version.
2102 let fake_server = fake_servers.next().await.unwrap();
2103 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2104 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2105 version: Some(10000),
2106 diagnostics: Vec::new(),
2107 });
2108 cx.executor().run_until_parked();
2109 project.update(cx, |project, cx| {
2110 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2111 });
2112
2113 let mut fake_server = fake_servers.next().await.unwrap();
2114 let notification = fake_server
2115 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2116 .await
2117 .text_document;
2118 assert_eq!(notification.version, 0);
2119}
2120
2121#[gpui::test]
2122async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2123 init_test(cx);
2124
2125 let progress_token = "the-progress-token";
2126
2127 let fs = FakeFs::new(cx.executor());
2128 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2129
2130 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2131
2132 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2133 language_registry.add(rust_lang());
2134 let mut fake_servers = language_registry.register_fake_lsp(
2135 "Rust",
2136 FakeLspAdapter {
2137 name: "the-language-server",
2138 disk_based_diagnostics_sources: vec!["disk".into()],
2139 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2140 ..Default::default()
2141 },
2142 );
2143
2144 let (buffer, _handle) = project
2145 .update(cx, |project, cx| {
2146 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2147 })
2148 .await
2149 .unwrap();
2150
2151 // Simulate diagnostics starting to update.
2152 let mut fake_server = fake_servers.next().await.unwrap();
2153 fake_server
2154 .start_progress_with(
2155 "another-token",
2156 lsp::WorkDoneProgressBegin {
2157 cancellable: Some(false),
2158 ..Default::default()
2159 },
2160 )
2161 .await;
2162 fake_server
2163 .start_progress_with(
2164 progress_token,
2165 lsp::WorkDoneProgressBegin {
2166 cancellable: Some(true),
2167 ..Default::default()
2168 },
2169 )
2170 .await;
2171 cx.executor().run_until_parked();
2172
2173 project.update(cx, |project, cx| {
2174 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2175 });
2176
2177 let cancel_notification = fake_server
2178 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2179 .await;
2180 assert_eq!(
2181 cancel_notification.token,
2182 NumberOrString::String(progress_token.into())
2183 );
2184}
2185
2186#[gpui::test]
2187async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2188 init_test(cx);
2189
2190 let fs = FakeFs::new(cx.executor());
2191 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2192 .await;
2193
2194 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2196
2197 let mut fake_rust_servers = language_registry.register_fake_lsp(
2198 "Rust",
2199 FakeLspAdapter {
2200 name: "rust-lsp",
2201 ..Default::default()
2202 },
2203 );
2204 let mut fake_js_servers = language_registry.register_fake_lsp(
2205 "JavaScript",
2206 FakeLspAdapter {
2207 name: "js-lsp",
2208 ..Default::default()
2209 },
2210 );
2211 language_registry.add(rust_lang());
2212 language_registry.add(js_lang());
2213
2214 let _rs_buffer = project
2215 .update(cx, |project, cx| {
2216 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2217 })
2218 .await
2219 .unwrap();
2220 let _js_buffer = project
2221 .update(cx, |project, cx| {
2222 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2223 })
2224 .await
2225 .unwrap();
2226
2227 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2228 assert_eq!(
2229 fake_rust_server_1
2230 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2231 .await
2232 .text_document
2233 .uri
2234 .as_str(),
2235 uri!("file:///dir/a.rs")
2236 );
2237
2238 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2239 assert_eq!(
2240 fake_js_server
2241 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2242 .await
2243 .text_document
2244 .uri
2245 .as_str(),
2246 uri!("file:///dir/b.js")
2247 );
2248
2249 // Disable Rust language server, ensuring only that server gets stopped.
2250 cx.update(|cx| {
2251 SettingsStore::update_global(cx, |settings, cx| {
2252 settings.update_user_settings(cx, |settings| {
2253 settings.languages_mut().insert(
2254 "Rust".into(),
2255 LanguageSettingsContent {
2256 enable_language_server: Some(false),
2257 ..Default::default()
2258 },
2259 );
2260 });
2261 })
2262 });
2263 fake_rust_server_1
2264 .receive_notification::<lsp::notification::Exit>()
2265 .await;
2266
2267 // Enable Rust and disable JavaScript language servers, ensuring that the
2268 // former gets started again and that the latter stops.
2269 cx.update(|cx| {
2270 SettingsStore::update_global(cx, |settings, cx| {
2271 settings.update_user_settings(cx, |settings| {
2272 settings.languages_mut().insert(
2273 "Rust".into(),
2274 LanguageSettingsContent {
2275 enable_language_server: Some(true),
2276 ..Default::default()
2277 },
2278 );
2279 settings.languages_mut().insert(
2280 "JavaScript".into(),
2281 LanguageSettingsContent {
2282 enable_language_server: Some(false),
2283 ..Default::default()
2284 },
2285 );
2286 });
2287 })
2288 });
2289 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2290 assert_eq!(
2291 fake_rust_server_2
2292 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2293 .await
2294 .text_document
2295 .uri
2296 .as_str(),
2297 uri!("file:///dir/a.rs")
2298 );
2299 fake_js_server
2300 .receive_notification::<lsp::notification::Exit>()
2301 .await;
2302}
2303
2304#[gpui::test(iterations = 3)]
2305async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2306 init_test(cx);
2307
2308 let text = "
2309 fn a() { A }
2310 fn b() { BB }
2311 fn c() { CCC }
2312 "
2313 .unindent();
2314
2315 let fs = FakeFs::new(cx.executor());
2316 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2317
2318 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2319 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2320
2321 language_registry.add(rust_lang());
2322 let mut fake_servers = language_registry.register_fake_lsp(
2323 "Rust",
2324 FakeLspAdapter {
2325 disk_based_diagnostics_sources: vec!["disk".into()],
2326 ..Default::default()
2327 },
2328 );
2329
2330 let buffer = project
2331 .update(cx, |project, cx| {
2332 project.open_local_buffer(path!("/dir/a.rs"), cx)
2333 })
2334 .await
2335 .unwrap();
2336
2337 let _handle = project.update(cx, |project, cx| {
2338 project.register_buffer_with_language_servers(&buffer, cx)
2339 });
2340
2341 let mut fake_server = fake_servers.next().await.unwrap();
2342 let open_notification = fake_server
2343 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2344 .await;
2345
2346 // Edit the buffer, moving the content down
2347 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2348 let change_notification_1 = fake_server
2349 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2350 .await;
2351 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2352
2353 // Report some diagnostics for the initial version of the buffer
2354 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2355 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2356 version: Some(open_notification.text_document.version),
2357 diagnostics: vec![
2358 lsp::Diagnostic {
2359 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2360 severity: Some(DiagnosticSeverity::ERROR),
2361 message: "undefined variable 'A'".to_string(),
2362 source: Some("disk".to_string()),
2363 ..Default::default()
2364 },
2365 lsp::Diagnostic {
2366 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2367 severity: Some(DiagnosticSeverity::ERROR),
2368 message: "undefined variable 'BB'".to_string(),
2369 source: Some("disk".to_string()),
2370 ..Default::default()
2371 },
2372 lsp::Diagnostic {
2373 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2374 severity: Some(DiagnosticSeverity::ERROR),
2375 source: Some("disk".to_string()),
2376 message: "undefined variable 'CCC'".to_string(),
2377 ..Default::default()
2378 },
2379 ],
2380 });
2381
2382 // The diagnostics have moved down since they were created.
2383 cx.executor().run_until_parked();
2384 buffer.update(cx, |buffer, _| {
2385 assert_eq!(
2386 buffer
2387 .snapshot()
2388 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2389 .collect::<Vec<_>>(),
2390 &[
2391 DiagnosticEntry {
2392 range: Point::new(3, 9)..Point::new(3, 11),
2393 diagnostic: Diagnostic {
2394 source: Some("disk".into()),
2395 severity: DiagnosticSeverity::ERROR,
2396 message: "undefined variable 'BB'".to_string(),
2397 is_disk_based: true,
2398 group_id: 1,
2399 is_primary: true,
2400 source_kind: DiagnosticSourceKind::Pushed,
2401 ..Diagnostic::default()
2402 },
2403 },
2404 DiagnosticEntry {
2405 range: Point::new(4, 9)..Point::new(4, 12),
2406 diagnostic: Diagnostic {
2407 source: Some("disk".into()),
2408 severity: DiagnosticSeverity::ERROR,
2409 message: "undefined variable 'CCC'".to_string(),
2410 is_disk_based: true,
2411 group_id: 2,
2412 is_primary: true,
2413 source_kind: DiagnosticSourceKind::Pushed,
2414 ..Diagnostic::default()
2415 }
2416 }
2417 ]
2418 );
2419 assert_eq!(
2420 chunks_with_diagnostics(buffer, 0..buffer.len()),
2421 [
2422 ("\n\nfn a() { ".to_string(), None),
2423 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2424 (" }\nfn b() { ".to_string(), None),
2425 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2426 (" }\nfn c() { ".to_string(), None),
2427 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2428 (" }\n".to_string(), None),
2429 ]
2430 );
2431 assert_eq!(
2432 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2433 [
2434 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2435 (" }\nfn c() { ".to_string(), None),
2436 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2437 ]
2438 );
2439 });
2440
2441 // Ensure overlapping diagnostics are highlighted correctly.
2442 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2443 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2444 version: Some(open_notification.text_document.version),
2445 diagnostics: vec![
2446 lsp::Diagnostic {
2447 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2448 severity: Some(DiagnosticSeverity::ERROR),
2449 message: "undefined variable 'A'".to_string(),
2450 source: Some("disk".to_string()),
2451 ..Default::default()
2452 },
2453 lsp::Diagnostic {
2454 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2455 severity: Some(DiagnosticSeverity::WARNING),
2456 message: "unreachable statement".to_string(),
2457 source: Some("disk".to_string()),
2458 ..Default::default()
2459 },
2460 ],
2461 });
2462
2463 cx.executor().run_until_parked();
2464 buffer.update(cx, |buffer, _| {
2465 assert_eq!(
2466 buffer
2467 .snapshot()
2468 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2469 .collect::<Vec<_>>(),
2470 &[
2471 DiagnosticEntry {
2472 range: Point::new(2, 9)..Point::new(2, 12),
2473 diagnostic: Diagnostic {
2474 source: Some("disk".into()),
2475 severity: DiagnosticSeverity::WARNING,
2476 message: "unreachable statement".to_string(),
2477 is_disk_based: true,
2478 group_id: 4,
2479 is_primary: true,
2480 source_kind: DiagnosticSourceKind::Pushed,
2481 ..Diagnostic::default()
2482 }
2483 },
2484 DiagnosticEntry {
2485 range: Point::new(2, 9)..Point::new(2, 10),
2486 diagnostic: Diagnostic {
2487 source: Some("disk".into()),
2488 severity: DiagnosticSeverity::ERROR,
2489 message: "undefined variable 'A'".to_string(),
2490 is_disk_based: true,
2491 group_id: 3,
2492 is_primary: true,
2493 source_kind: DiagnosticSourceKind::Pushed,
2494 ..Diagnostic::default()
2495 },
2496 }
2497 ]
2498 );
2499 assert_eq!(
2500 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2501 [
2502 ("fn a() { ".to_string(), None),
2503 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2504 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2505 ("\n".to_string(), None),
2506 ]
2507 );
2508 assert_eq!(
2509 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2510 [
2511 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2512 ("\n".to_string(), None),
2513 ]
2514 );
2515 });
2516
2517 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2518 // changes since the last save.
2519 buffer.update(cx, |buffer, cx| {
2520 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2521 buffer.edit(
2522 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2523 None,
2524 cx,
2525 );
2526 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2527 });
2528 let change_notification_2 = fake_server
2529 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2530 .await;
2531 assert!(
2532 change_notification_2.text_document.version > change_notification_1.text_document.version
2533 );
2534
2535 // Handle out-of-order diagnostics
2536 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2537 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2538 version: Some(change_notification_2.text_document.version),
2539 diagnostics: vec![
2540 lsp::Diagnostic {
2541 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2542 severity: Some(DiagnosticSeverity::ERROR),
2543 message: "undefined variable 'BB'".to_string(),
2544 source: Some("disk".to_string()),
2545 ..Default::default()
2546 },
2547 lsp::Diagnostic {
2548 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2549 severity: Some(DiagnosticSeverity::WARNING),
2550 message: "undefined variable 'A'".to_string(),
2551 source: Some("disk".to_string()),
2552 ..Default::default()
2553 },
2554 ],
2555 });
2556
2557 cx.executor().run_until_parked();
2558 buffer.update(cx, |buffer, _| {
2559 assert_eq!(
2560 buffer
2561 .snapshot()
2562 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2563 .collect::<Vec<_>>(),
2564 &[
2565 DiagnosticEntry {
2566 range: Point::new(2, 21)..Point::new(2, 22),
2567 diagnostic: Diagnostic {
2568 source: Some("disk".into()),
2569 severity: DiagnosticSeverity::WARNING,
2570 message: "undefined variable 'A'".to_string(),
2571 is_disk_based: true,
2572 group_id: 6,
2573 is_primary: true,
2574 source_kind: DiagnosticSourceKind::Pushed,
2575 ..Diagnostic::default()
2576 }
2577 },
2578 DiagnosticEntry {
2579 range: Point::new(3, 9)..Point::new(3, 14),
2580 diagnostic: Diagnostic {
2581 source: Some("disk".into()),
2582 severity: DiagnosticSeverity::ERROR,
2583 message: "undefined variable 'BB'".to_string(),
2584 is_disk_based: true,
2585 group_id: 5,
2586 is_primary: true,
2587 source_kind: DiagnosticSourceKind::Pushed,
2588 ..Diagnostic::default()
2589 },
2590 }
2591 ]
2592 );
2593 });
2594}
2595
2596#[gpui::test]
2597async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2598 init_test(cx);
2599
2600 let text = concat!(
2601 "let one = ;\n", //
2602 "let two = \n",
2603 "let three = 3;\n",
2604 );
2605
2606 let fs = FakeFs::new(cx.executor());
2607 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2608
2609 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2610 let buffer = project
2611 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2612 .await
2613 .unwrap();
2614
2615 project.update(cx, |project, cx| {
2616 project.lsp_store.update(cx, |lsp_store, cx| {
2617 lsp_store
2618 .update_diagnostic_entries(
2619 LanguageServerId(0),
2620 PathBuf::from("/dir/a.rs"),
2621 None,
2622 None,
2623 vec![
2624 DiagnosticEntry {
2625 range: Unclipped(PointUtf16::new(0, 10))
2626 ..Unclipped(PointUtf16::new(0, 10)),
2627 diagnostic: Diagnostic {
2628 severity: DiagnosticSeverity::ERROR,
2629 message: "syntax error 1".to_string(),
2630 source_kind: DiagnosticSourceKind::Pushed,
2631 ..Diagnostic::default()
2632 },
2633 },
2634 DiagnosticEntry {
2635 range: Unclipped(PointUtf16::new(1, 10))
2636 ..Unclipped(PointUtf16::new(1, 10)),
2637 diagnostic: Diagnostic {
2638 severity: DiagnosticSeverity::ERROR,
2639 message: "syntax error 2".to_string(),
2640 source_kind: DiagnosticSourceKind::Pushed,
2641 ..Diagnostic::default()
2642 },
2643 },
2644 ],
2645 cx,
2646 )
2647 .unwrap();
2648 })
2649 });
2650
2651 // An empty range is extended forward to include the following character.
2652 // At the end of a line, an empty range is extended backward to include
2653 // the preceding character.
2654 buffer.update(cx, |buffer, _| {
2655 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2656 assert_eq!(
2657 chunks
2658 .iter()
2659 .map(|(s, d)| (s.as_str(), *d))
2660 .collect::<Vec<_>>(),
2661 &[
2662 ("let one = ", None),
2663 (";", Some(DiagnosticSeverity::ERROR)),
2664 ("\nlet two =", None),
2665 (" ", Some(DiagnosticSeverity::ERROR)),
2666 ("\nlet three = 3;\n", None)
2667 ]
2668 );
2669 });
2670}
2671
2672#[gpui::test]
2673async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2674 init_test(cx);
2675
2676 let fs = FakeFs::new(cx.executor());
2677 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2678 .await;
2679
2680 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2681 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2682
2683 lsp_store.update(cx, |lsp_store, cx| {
2684 lsp_store
2685 .update_diagnostic_entries(
2686 LanguageServerId(0),
2687 Path::new("/dir/a.rs").to_owned(),
2688 None,
2689 None,
2690 vec![DiagnosticEntry {
2691 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2692 diagnostic: Diagnostic {
2693 severity: DiagnosticSeverity::ERROR,
2694 is_primary: true,
2695 message: "syntax error a1".to_string(),
2696 source_kind: DiagnosticSourceKind::Pushed,
2697 ..Diagnostic::default()
2698 },
2699 }],
2700 cx,
2701 )
2702 .unwrap();
2703 lsp_store
2704 .update_diagnostic_entries(
2705 LanguageServerId(1),
2706 Path::new("/dir/a.rs").to_owned(),
2707 None,
2708 None,
2709 vec![DiagnosticEntry {
2710 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2711 diagnostic: Diagnostic {
2712 severity: DiagnosticSeverity::ERROR,
2713 is_primary: true,
2714 message: "syntax error b1".to_string(),
2715 source_kind: DiagnosticSourceKind::Pushed,
2716 ..Diagnostic::default()
2717 },
2718 }],
2719 cx,
2720 )
2721 .unwrap();
2722
2723 assert_eq!(
2724 lsp_store.diagnostic_summary(false, cx),
2725 DiagnosticSummary {
2726 error_count: 2,
2727 warning_count: 0,
2728 }
2729 );
2730 });
2731}
2732
2733#[gpui::test]
2734async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2735 init_test(cx);
2736
2737 let text = "
2738 fn a() {
2739 f1();
2740 }
2741 fn b() {
2742 f2();
2743 }
2744 fn c() {
2745 f3();
2746 }
2747 "
2748 .unindent();
2749
2750 let fs = FakeFs::new(cx.executor());
2751 fs.insert_tree(
2752 path!("/dir"),
2753 json!({
2754 "a.rs": text.clone(),
2755 }),
2756 )
2757 .await;
2758
2759 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2760 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2761
2762 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2763 language_registry.add(rust_lang());
2764 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2765
2766 let (buffer, _handle) = project
2767 .update(cx, |project, cx| {
2768 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2769 })
2770 .await
2771 .unwrap();
2772
2773 let mut fake_server = fake_servers.next().await.unwrap();
2774 let lsp_document_version = fake_server
2775 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2776 .await
2777 .text_document
2778 .version;
2779
2780 // Simulate editing the buffer after the language server computes some edits.
2781 buffer.update(cx, |buffer, cx| {
2782 buffer.edit(
2783 [(
2784 Point::new(0, 0)..Point::new(0, 0),
2785 "// above first function\n",
2786 )],
2787 None,
2788 cx,
2789 );
2790 buffer.edit(
2791 [(
2792 Point::new(2, 0)..Point::new(2, 0),
2793 " // inside first function\n",
2794 )],
2795 None,
2796 cx,
2797 );
2798 buffer.edit(
2799 [(
2800 Point::new(6, 4)..Point::new(6, 4),
2801 "// inside second function ",
2802 )],
2803 None,
2804 cx,
2805 );
2806
2807 assert_eq!(
2808 buffer.text(),
2809 "
2810 // above first function
2811 fn a() {
2812 // inside first function
2813 f1();
2814 }
2815 fn b() {
2816 // inside second function f2();
2817 }
2818 fn c() {
2819 f3();
2820 }
2821 "
2822 .unindent()
2823 );
2824 });
2825
2826 let edits = lsp_store
2827 .update(cx, |lsp_store, cx| {
2828 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2829 &buffer,
2830 vec![
2831 // replace body of first function
2832 lsp::TextEdit {
2833 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2834 new_text: "
2835 fn a() {
2836 f10();
2837 }
2838 "
2839 .unindent(),
2840 },
2841 // edit inside second function
2842 lsp::TextEdit {
2843 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2844 new_text: "00".into(),
2845 },
2846 // edit inside third function via two distinct edits
2847 lsp::TextEdit {
2848 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2849 new_text: "4000".into(),
2850 },
2851 lsp::TextEdit {
2852 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2853 new_text: "".into(),
2854 },
2855 ],
2856 LanguageServerId(0),
2857 Some(lsp_document_version),
2858 cx,
2859 )
2860 })
2861 .await
2862 .unwrap();
2863
2864 buffer.update(cx, |buffer, cx| {
2865 for (range, new_text) in edits {
2866 buffer.edit([(range, new_text)], None, cx);
2867 }
2868 assert_eq!(
2869 buffer.text(),
2870 "
2871 // above first function
2872 fn a() {
2873 // inside first function
2874 f10();
2875 }
2876 fn b() {
2877 // inside second function f200();
2878 }
2879 fn c() {
2880 f4000();
2881 }
2882 "
2883 .unindent()
2884 );
2885 });
2886}
2887
2888#[gpui::test]
2889async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2890 init_test(cx);
2891
2892 let text = "
2893 use a::b;
2894 use a::c;
2895
2896 fn f() {
2897 b();
2898 c();
2899 }
2900 "
2901 .unindent();
2902
2903 let fs = FakeFs::new(cx.executor());
2904 fs.insert_tree(
2905 path!("/dir"),
2906 json!({
2907 "a.rs": text.clone(),
2908 }),
2909 )
2910 .await;
2911
2912 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2913 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2914 let buffer = project
2915 .update(cx, |project, cx| {
2916 project.open_local_buffer(path!("/dir/a.rs"), cx)
2917 })
2918 .await
2919 .unwrap();
2920
2921 // Simulate the language server sending us a small edit in the form of a very large diff.
2922 // Rust-analyzer does this when performing a merge-imports code action.
2923 let edits = lsp_store
2924 .update(cx, |lsp_store, cx| {
2925 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2926 &buffer,
2927 [
2928 // Replace the first use statement without editing the semicolon.
2929 lsp::TextEdit {
2930 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2931 new_text: "a::{b, c}".into(),
2932 },
2933 // Reinsert the remainder of the file between the semicolon and the final
2934 // newline of the file.
2935 lsp::TextEdit {
2936 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2937 new_text: "\n\n".into(),
2938 },
2939 lsp::TextEdit {
2940 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2941 new_text: "
2942 fn f() {
2943 b();
2944 c();
2945 }"
2946 .unindent(),
2947 },
2948 // Delete everything after the first newline of the file.
2949 lsp::TextEdit {
2950 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2951 new_text: "".into(),
2952 },
2953 ],
2954 LanguageServerId(0),
2955 None,
2956 cx,
2957 )
2958 })
2959 .await
2960 .unwrap();
2961
2962 buffer.update(cx, |buffer, cx| {
2963 let edits = edits
2964 .into_iter()
2965 .map(|(range, text)| {
2966 (
2967 range.start.to_point(buffer)..range.end.to_point(buffer),
2968 text,
2969 )
2970 })
2971 .collect::<Vec<_>>();
2972
2973 assert_eq!(
2974 edits,
2975 [
2976 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2977 (Point::new(1, 0)..Point::new(2, 0), "".into())
2978 ]
2979 );
2980
2981 for (range, new_text) in edits {
2982 buffer.edit([(range, new_text)], None, cx);
2983 }
2984 assert_eq!(
2985 buffer.text(),
2986 "
2987 use a::{b, c};
2988
2989 fn f() {
2990 b();
2991 c();
2992 }
2993 "
2994 .unindent()
2995 );
2996 });
2997}
2998
2999#[gpui::test]
3000async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3001 cx: &mut gpui::TestAppContext,
3002) {
3003 init_test(cx);
3004
3005 let text = "Path()";
3006
3007 let fs = FakeFs::new(cx.executor());
3008 fs.insert_tree(
3009 path!("/dir"),
3010 json!({
3011 "a.rs": text
3012 }),
3013 )
3014 .await;
3015
3016 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3017 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3018 let buffer = project
3019 .update(cx, |project, cx| {
3020 project.open_local_buffer(path!("/dir/a.rs"), cx)
3021 })
3022 .await
3023 .unwrap();
3024
3025 // Simulate the language server sending us a pair of edits at the same location,
3026 // with an insertion following a replacement (which violates the LSP spec).
3027 let edits = lsp_store
3028 .update(cx, |lsp_store, cx| {
3029 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3030 &buffer,
3031 [
3032 lsp::TextEdit {
3033 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3034 new_text: "Path".into(),
3035 },
3036 lsp::TextEdit {
3037 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3038 new_text: "from path import Path\n\n\n".into(),
3039 },
3040 ],
3041 LanguageServerId(0),
3042 None,
3043 cx,
3044 )
3045 })
3046 .await
3047 .unwrap();
3048
3049 buffer.update(cx, |buffer, cx| {
3050 buffer.edit(edits, None, cx);
3051 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3052 });
3053}
3054
3055#[gpui::test]
3056async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3057 init_test(cx);
3058
3059 let text = "
3060 use a::b;
3061 use a::c;
3062
3063 fn f() {
3064 b();
3065 c();
3066 }
3067 "
3068 .unindent();
3069
3070 let fs = FakeFs::new(cx.executor());
3071 fs.insert_tree(
3072 path!("/dir"),
3073 json!({
3074 "a.rs": text.clone(),
3075 }),
3076 )
3077 .await;
3078
3079 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3080 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3081 let buffer = project
3082 .update(cx, |project, cx| {
3083 project.open_local_buffer(path!("/dir/a.rs"), cx)
3084 })
3085 .await
3086 .unwrap();
3087
3088 // Simulate the language server sending us edits in a non-ordered fashion,
3089 // with ranges sometimes being inverted or pointing to invalid locations.
3090 let edits = lsp_store
3091 .update(cx, |lsp_store, cx| {
3092 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3093 &buffer,
3094 [
3095 lsp::TextEdit {
3096 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3097 new_text: "\n\n".into(),
3098 },
3099 lsp::TextEdit {
3100 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3101 new_text: "a::{b, c}".into(),
3102 },
3103 lsp::TextEdit {
3104 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3105 new_text: "".into(),
3106 },
3107 lsp::TextEdit {
3108 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3109 new_text: "
3110 fn f() {
3111 b();
3112 c();
3113 }"
3114 .unindent(),
3115 },
3116 ],
3117 LanguageServerId(0),
3118 None,
3119 cx,
3120 )
3121 })
3122 .await
3123 .unwrap();
3124
3125 buffer.update(cx, |buffer, cx| {
3126 let edits = edits
3127 .into_iter()
3128 .map(|(range, text)| {
3129 (
3130 range.start.to_point(buffer)..range.end.to_point(buffer),
3131 text,
3132 )
3133 })
3134 .collect::<Vec<_>>();
3135
3136 assert_eq!(
3137 edits,
3138 [
3139 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3140 (Point::new(1, 0)..Point::new(2, 0), "".into())
3141 ]
3142 );
3143
3144 for (range, new_text) in edits {
3145 buffer.edit([(range, new_text)], None, cx);
3146 }
3147 assert_eq!(
3148 buffer.text(),
3149 "
3150 use a::{b, c};
3151
3152 fn f() {
3153 b();
3154 c();
3155 }
3156 "
3157 .unindent()
3158 );
3159 });
3160}
3161
3162fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3163 buffer: &Buffer,
3164 range: Range<T>,
3165) -> Vec<(String, Option<DiagnosticSeverity>)> {
3166 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3167 for chunk in buffer.snapshot().chunks(range, true) {
3168 if chunks
3169 .last()
3170 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3171 {
3172 chunks.last_mut().unwrap().0.push_str(chunk.text);
3173 } else {
3174 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3175 }
3176 }
3177 chunks
3178}
3179
3180#[gpui::test(iterations = 10)]
3181async fn test_definition(cx: &mut gpui::TestAppContext) {
3182 init_test(cx);
3183
3184 let fs = FakeFs::new(cx.executor());
3185 fs.insert_tree(
3186 path!("/dir"),
3187 json!({
3188 "a.rs": "const fn a() { A }",
3189 "b.rs": "const y: i32 = crate::a()",
3190 }),
3191 )
3192 .await;
3193
3194 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3195
3196 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3197 language_registry.add(rust_lang());
3198 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3199
3200 let (buffer, _handle) = project
3201 .update(cx, |project, cx| {
3202 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3203 })
3204 .await
3205 .unwrap();
3206
3207 let fake_server = fake_servers.next().await.unwrap();
3208 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3209 let params = params.text_document_position_params;
3210 assert_eq!(
3211 params.text_document.uri.to_file_path().unwrap(),
3212 Path::new(path!("/dir/b.rs")),
3213 );
3214 assert_eq!(params.position, lsp::Position::new(0, 22));
3215
3216 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3217 lsp::Location::new(
3218 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3219 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3220 ),
3221 )))
3222 });
3223 let mut definitions = project
3224 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3225 .await
3226 .unwrap()
3227 .unwrap();
3228
3229 // Assert no new language server started
3230 cx.executor().run_until_parked();
3231 assert!(fake_servers.try_next().is_err());
3232
3233 assert_eq!(definitions.len(), 1);
3234 let definition = definitions.pop().unwrap();
3235 cx.update(|cx| {
3236 let target_buffer = definition.target.buffer.read(cx);
3237 assert_eq!(
3238 target_buffer
3239 .file()
3240 .unwrap()
3241 .as_local()
3242 .unwrap()
3243 .abs_path(cx),
3244 Path::new(path!("/dir/a.rs")),
3245 );
3246 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3247 assert_eq!(
3248 list_worktrees(&project, cx),
3249 [
3250 (path!("/dir/a.rs").as_ref(), false),
3251 (path!("/dir/b.rs").as_ref(), true)
3252 ],
3253 );
3254
3255 drop(definition);
3256 });
3257 cx.update(|cx| {
3258 assert_eq!(
3259 list_worktrees(&project, cx),
3260 [(path!("/dir/b.rs").as_ref(), true)]
3261 );
3262 });
3263
3264 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3265 project
3266 .read(cx)
3267 .worktrees(cx)
3268 .map(|worktree| {
3269 let worktree = worktree.read(cx);
3270 (
3271 worktree.as_local().unwrap().abs_path().as_ref(),
3272 worktree.is_visible(),
3273 )
3274 })
3275 .collect::<Vec<_>>()
3276 }
3277}
3278
3279#[gpui::test]
3280async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3281 init_test(cx);
3282
3283 let fs = FakeFs::new(cx.executor());
3284 fs.insert_tree(
3285 path!("/dir"),
3286 json!({
3287 "a.ts": "",
3288 }),
3289 )
3290 .await;
3291
3292 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3293
3294 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3295 language_registry.add(typescript_lang());
3296 let mut fake_language_servers = language_registry.register_fake_lsp(
3297 "TypeScript",
3298 FakeLspAdapter {
3299 capabilities: lsp::ServerCapabilities {
3300 completion_provider: Some(lsp::CompletionOptions {
3301 trigger_characters: Some(vec![".".to_string()]),
3302 ..Default::default()
3303 }),
3304 ..Default::default()
3305 },
3306 ..Default::default()
3307 },
3308 );
3309
3310 let (buffer, _handle) = project
3311 .update(cx, |p, cx| {
3312 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3313 })
3314 .await
3315 .unwrap();
3316
3317 let fake_server = fake_language_servers.next().await.unwrap();
3318
3319 // When text_edit exists, it takes precedence over insert_text and label
3320 let text = "let a = obj.fqn";
3321 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3322 let completions = project.update(cx, |project, cx| {
3323 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3324 });
3325
3326 fake_server
3327 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3328 Ok(Some(lsp::CompletionResponse::Array(vec![
3329 lsp::CompletionItem {
3330 label: "labelText".into(),
3331 insert_text: Some("insertText".into()),
3332 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3333 range: lsp::Range::new(
3334 lsp::Position::new(0, text.len() as u32 - 3),
3335 lsp::Position::new(0, text.len() as u32),
3336 ),
3337 new_text: "textEditText".into(),
3338 })),
3339 ..Default::default()
3340 },
3341 ])))
3342 })
3343 .next()
3344 .await;
3345
3346 let completions = completions
3347 .await
3348 .unwrap()
3349 .into_iter()
3350 .flat_map(|response| response.completions)
3351 .collect::<Vec<_>>();
3352 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3353
3354 assert_eq!(completions.len(), 1);
3355 assert_eq!(completions[0].new_text, "textEditText");
3356 assert_eq!(
3357 completions[0].replace_range.to_offset(&snapshot),
3358 text.len() - 3..text.len()
3359 );
3360}
3361
3362#[gpui::test]
3363async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3364 init_test(cx);
3365
3366 let fs = FakeFs::new(cx.executor());
3367 fs.insert_tree(
3368 path!("/dir"),
3369 json!({
3370 "a.ts": "",
3371 }),
3372 )
3373 .await;
3374
3375 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3376
3377 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3378 language_registry.add(typescript_lang());
3379 let mut fake_language_servers = language_registry.register_fake_lsp(
3380 "TypeScript",
3381 FakeLspAdapter {
3382 capabilities: lsp::ServerCapabilities {
3383 completion_provider: Some(lsp::CompletionOptions {
3384 trigger_characters: Some(vec![".".to_string()]),
3385 ..Default::default()
3386 }),
3387 ..Default::default()
3388 },
3389 ..Default::default()
3390 },
3391 );
3392
3393 let (buffer, _handle) = project
3394 .update(cx, |p, cx| {
3395 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3396 })
3397 .await
3398 .unwrap();
3399
3400 let fake_server = fake_language_servers.next().await.unwrap();
3401 let text = "let a = obj.fqn";
3402
3403 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3404 {
3405 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3406 let completions = project.update(cx, |project, cx| {
3407 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3408 });
3409
3410 fake_server
3411 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3412 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3413 is_incomplete: false,
3414 item_defaults: Some(lsp::CompletionListItemDefaults {
3415 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3416 lsp::Range::new(
3417 lsp::Position::new(0, text.len() as u32 - 3),
3418 lsp::Position::new(0, text.len() as u32),
3419 ),
3420 )),
3421 ..Default::default()
3422 }),
3423 items: vec![lsp::CompletionItem {
3424 label: "labelText".into(),
3425 text_edit_text: Some("textEditText".into()),
3426 text_edit: None,
3427 ..Default::default()
3428 }],
3429 })))
3430 })
3431 .next()
3432 .await;
3433
3434 let completions = completions
3435 .await
3436 .unwrap()
3437 .into_iter()
3438 .flat_map(|response| response.completions)
3439 .collect::<Vec<_>>();
3440 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3441
3442 assert_eq!(completions.len(), 1);
3443 assert_eq!(completions[0].new_text, "textEditText");
3444 assert_eq!(
3445 completions[0].replace_range.to_offset(&snapshot),
3446 text.len() - 3..text.len()
3447 );
3448 }
3449
3450 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3451 {
3452 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3453 let completions = project.update(cx, |project, cx| {
3454 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3455 });
3456
3457 fake_server
3458 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3459 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3460 is_incomplete: false,
3461 item_defaults: Some(lsp::CompletionListItemDefaults {
3462 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3463 lsp::Range::new(
3464 lsp::Position::new(0, text.len() as u32 - 3),
3465 lsp::Position::new(0, text.len() as u32),
3466 ),
3467 )),
3468 ..Default::default()
3469 }),
3470 items: vec![lsp::CompletionItem {
3471 label: "labelText".into(),
3472 text_edit_text: None,
3473 insert_text: Some("irrelevant".into()),
3474 text_edit: None,
3475 ..Default::default()
3476 }],
3477 })))
3478 })
3479 .next()
3480 .await;
3481
3482 let completions = completions
3483 .await
3484 .unwrap()
3485 .into_iter()
3486 .flat_map(|response| response.completions)
3487 .collect::<Vec<_>>();
3488 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3489
3490 assert_eq!(completions.len(), 1);
3491 assert_eq!(completions[0].new_text, "labelText");
3492 assert_eq!(
3493 completions[0].replace_range.to_offset(&snapshot),
3494 text.len() - 3..text.len()
3495 );
3496 }
3497}
3498
3499#[gpui::test]
3500async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3501 init_test(cx);
3502
3503 let fs = FakeFs::new(cx.executor());
3504 fs.insert_tree(
3505 path!("/dir"),
3506 json!({
3507 "a.ts": "",
3508 }),
3509 )
3510 .await;
3511
3512 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3513
3514 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3515 language_registry.add(typescript_lang());
3516 let mut fake_language_servers = language_registry.register_fake_lsp(
3517 "TypeScript",
3518 FakeLspAdapter {
3519 capabilities: lsp::ServerCapabilities {
3520 completion_provider: Some(lsp::CompletionOptions {
3521 trigger_characters: Some(vec![":".to_string()]),
3522 ..Default::default()
3523 }),
3524 ..Default::default()
3525 },
3526 ..Default::default()
3527 },
3528 );
3529
3530 let (buffer, _handle) = project
3531 .update(cx, |p, cx| {
3532 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3533 })
3534 .await
3535 .unwrap();
3536
3537 let fake_server = fake_language_servers.next().await.unwrap();
3538
3539 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3540 let text = "let a = b.fqn";
3541 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3542 let completions = project.update(cx, |project, cx| {
3543 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3544 });
3545
3546 fake_server
3547 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3548 Ok(Some(lsp::CompletionResponse::Array(vec![
3549 lsp::CompletionItem {
3550 label: "fullyQualifiedName?".into(),
3551 insert_text: Some("fullyQualifiedName".into()),
3552 ..Default::default()
3553 },
3554 ])))
3555 })
3556 .next()
3557 .await;
3558 let completions = completions
3559 .await
3560 .unwrap()
3561 .into_iter()
3562 .flat_map(|response| response.completions)
3563 .collect::<Vec<_>>();
3564 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3565 assert_eq!(completions.len(), 1);
3566 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3567 assert_eq!(
3568 completions[0].replace_range.to_offset(&snapshot),
3569 text.len() - 3..text.len()
3570 );
3571
3572 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3573 let text = "let a = \"atoms/cmp\"";
3574 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3575 let completions = project.update(cx, |project, cx| {
3576 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3577 });
3578
3579 fake_server
3580 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3581 Ok(Some(lsp::CompletionResponse::Array(vec![
3582 lsp::CompletionItem {
3583 label: "component".into(),
3584 ..Default::default()
3585 },
3586 ])))
3587 })
3588 .next()
3589 .await;
3590 let completions = completions
3591 .await
3592 .unwrap()
3593 .into_iter()
3594 .flat_map(|response| response.completions)
3595 .collect::<Vec<_>>();
3596 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3597 assert_eq!(completions.len(), 1);
3598 assert_eq!(completions[0].new_text, "component");
3599 assert_eq!(
3600 completions[0].replace_range.to_offset(&snapshot),
3601 text.len() - 4..text.len() - 1
3602 );
3603}
3604
3605#[gpui::test]
3606async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3607 init_test(cx);
3608
3609 let fs = FakeFs::new(cx.executor());
3610 fs.insert_tree(
3611 path!("/dir"),
3612 json!({
3613 "a.ts": "",
3614 }),
3615 )
3616 .await;
3617
3618 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3619
3620 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3621 language_registry.add(typescript_lang());
3622 let mut fake_language_servers = language_registry.register_fake_lsp(
3623 "TypeScript",
3624 FakeLspAdapter {
3625 capabilities: lsp::ServerCapabilities {
3626 completion_provider: Some(lsp::CompletionOptions {
3627 trigger_characters: Some(vec![":".to_string()]),
3628 ..Default::default()
3629 }),
3630 ..Default::default()
3631 },
3632 ..Default::default()
3633 },
3634 );
3635
3636 let (buffer, _handle) = project
3637 .update(cx, |p, cx| {
3638 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3639 })
3640 .await
3641 .unwrap();
3642
3643 let fake_server = fake_language_servers.next().await.unwrap();
3644
3645 let text = "let a = b.fqn";
3646 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3647 let completions = project.update(cx, |project, cx| {
3648 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3649 });
3650
3651 fake_server
3652 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3653 Ok(Some(lsp::CompletionResponse::Array(vec![
3654 lsp::CompletionItem {
3655 label: "fullyQualifiedName?".into(),
3656 insert_text: Some("fully\rQualified\r\nName".into()),
3657 ..Default::default()
3658 },
3659 ])))
3660 })
3661 .next()
3662 .await;
3663 let completions = completions
3664 .await
3665 .unwrap()
3666 .into_iter()
3667 .flat_map(|response| response.completions)
3668 .collect::<Vec<_>>();
3669 assert_eq!(completions.len(), 1);
3670 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3671}
3672
3673#[gpui::test(iterations = 10)]
3674async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3675 init_test(cx);
3676
3677 let fs = FakeFs::new(cx.executor());
3678 fs.insert_tree(
3679 path!("/dir"),
3680 json!({
3681 "a.ts": "a",
3682 }),
3683 )
3684 .await;
3685
3686 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3687
3688 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3689 language_registry.add(typescript_lang());
3690 let mut fake_language_servers = language_registry.register_fake_lsp(
3691 "TypeScript",
3692 FakeLspAdapter {
3693 capabilities: lsp::ServerCapabilities {
3694 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3695 lsp::CodeActionOptions {
3696 resolve_provider: Some(true),
3697 ..lsp::CodeActionOptions::default()
3698 },
3699 )),
3700 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3701 commands: vec!["_the/command".to_string()],
3702 ..lsp::ExecuteCommandOptions::default()
3703 }),
3704 ..lsp::ServerCapabilities::default()
3705 },
3706 ..FakeLspAdapter::default()
3707 },
3708 );
3709
3710 let (buffer, _handle) = project
3711 .update(cx, |p, cx| {
3712 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3713 })
3714 .await
3715 .unwrap();
3716
3717 let fake_server = fake_language_servers.next().await.unwrap();
3718
3719 // Language server returns code actions that contain commands, and not edits.
3720 let actions = project.update(cx, |project, cx| {
3721 project.code_actions(&buffer, 0..0, None, cx)
3722 });
3723 fake_server
3724 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3725 Ok(Some(vec![
3726 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3727 title: "The code action".into(),
3728 data: Some(serde_json::json!({
3729 "command": "_the/command",
3730 })),
3731 ..lsp::CodeAction::default()
3732 }),
3733 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3734 title: "two".into(),
3735 ..lsp::CodeAction::default()
3736 }),
3737 ]))
3738 })
3739 .next()
3740 .await;
3741
3742 let action = actions.await.unwrap().unwrap()[0].clone();
3743 let apply = project.update(cx, |project, cx| {
3744 project.apply_code_action(buffer.clone(), action, true, cx)
3745 });
3746
3747 // Resolving the code action does not populate its edits. In absence of
3748 // edits, we must execute the given command.
3749 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3750 |mut action, _| async move {
3751 if action.data.is_some() {
3752 action.command = Some(lsp::Command {
3753 title: "The command".into(),
3754 command: "_the/command".into(),
3755 arguments: Some(vec![json!("the-argument")]),
3756 });
3757 }
3758 Ok(action)
3759 },
3760 );
3761
3762 // While executing the command, the language server sends the editor
3763 // a `workspaceEdit` request.
3764 fake_server
3765 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3766 let fake = fake_server.clone();
3767 move |params, _| {
3768 assert_eq!(params.command, "_the/command");
3769 let fake = fake.clone();
3770 async move {
3771 fake.server
3772 .request::<lsp::request::ApplyWorkspaceEdit>(
3773 lsp::ApplyWorkspaceEditParams {
3774 label: None,
3775 edit: lsp::WorkspaceEdit {
3776 changes: Some(
3777 [(
3778 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3779 vec![lsp::TextEdit {
3780 range: lsp::Range::new(
3781 lsp::Position::new(0, 0),
3782 lsp::Position::new(0, 0),
3783 ),
3784 new_text: "X".into(),
3785 }],
3786 )]
3787 .into_iter()
3788 .collect(),
3789 ),
3790 ..Default::default()
3791 },
3792 },
3793 )
3794 .await
3795 .into_response()
3796 .unwrap();
3797 Ok(Some(json!(null)))
3798 }
3799 }
3800 })
3801 .next()
3802 .await;
3803
3804 // Applying the code action returns a project transaction containing the edits
3805 // sent by the language server in its `workspaceEdit` request.
3806 let transaction = apply.await.unwrap();
3807 assert!(transaction.0.contains_key(&buffer));
3808 buffer.update(cx, |buffer, cx| {
3809 assert_eq!(buffer.text(), "Xa");
3810 buffer.undo(cx);
3811 assert_eq!(buffer.text(), "a");
3812 });
3813}
3814
3815#[gpui::test]
3816async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3817 init_test(cx);
3818 let fs = FakeFs::new(cx.background_executor.clone());
3819 let expected_contents = "content";
3820 fs.as_fake()
3821 .insert_tree(
3822 "/root",
3823 json!({
3824 "test.txt": expected_contents
3825 }),
3826 )
3827 .await;
3828
3829 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3830
3831 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3832 let worktree = project.worktrees(cx).next().unwrap();
3833 let entry_id = worktree
3834 .read(cx)
3835 .entry_for_path(rel_path("test.txt"))
3836 .unwrap()
3837 .id;
3838 (worktree, entry_id)
3839 });
3840 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3841 let _result = project
3842 .update(cx, |project, cx| {
3843 project.rename_entry(
3844 entry_id,
3845 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3846 cx,
3847 )
3848 })
3849 .await
3850 .unwrap();
3851 worktree.read_with(cx, |worktree, _| {
3852 assert!(
3853 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3854 "Old file should have been removed"
3855 );
3856 assert!(
3857 worktree
3858 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3859 .is_some(),
3860 "Whole directory hierarchy and the new file should have been created"
3861 );
3862 });
3863 assert_eq!(
3864 worktree
3865 .update(cx, |worktree, cx| {
3866 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3867 })
3868 .await
3869 .unwrap()
3870 .text,
3871 expected_contents,
3872 "Moved file's contents should be preserved"
3873 );
3874
3875 let entry_id = worktree.read_with(cx, |worktree, _| {
3876 worktree
3877 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3878 .unwrap()
3879 .id
3880 });
3881
3882 let _result = project
3883 .update(cx, |project, cx| {
3884 project.rename_entry(
3885 entry_id,
3886 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3887 cx,
3888 )
3889 })
3890 .await
3891 .unwrap();
3892 worktree.read_with(cx, |worktree, _| {
3893 assert!(
3894 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3895 "First file should not reappear"
3896 );
3897 assert!(
3898 worktree
3899 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3900 .is_none(),
3901 "Old file should have been removed"
3902 );
3903 assert!(
3904 worktree
3905 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3906 .is_some(),
3907 "No error should have occurred after moving into existing directory"
3908 );
3909 });
3910 assert_eq!(
3911 worktree
3912 .update(cx, |worktree, cx| {
3913 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3914 })
3915 .await
3916 .unwrap()
3917 .text,
3918 expected_contents,
3919 "Moved file's contents should be preserved"
3920 );
3921}
3922
3923#[gpui::test(iterations = 10)]
3924async fn test_save_file(cx: &mut gpui::TestAppContext) {
3925 init_test(cx);
3926
3927 let fs = FakeFs::new(cx.executor());
3928 fs.insert_tree(
3929 path!("/dir"),
3930 json!({
3931 "file1": "the old contents",
3932 }),
3933 )
3934 .await;
3935
3936 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3937 let buffer = project
3938 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3939 .await
3940 .unwrap();
3941 buffer.update(cx, |buffer, cx| {
3942 assert_eq!(buffer.text(), "the old contents");
3943 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3944 });
3945
3946 project
3947 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3948 .await
3949 .unwrap();
3950
3951 let new_text = fs
3952 .load(Path::new(path!("/dir/file1")))
3953 .await
3954 .unwrap()
3955 .replace("\r\n", "\n");
3956 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3957}
3958
3959#[gpui::test(iterations = 10)]
3960async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3961 // Issue: #24349
3962 init_test(cx);
3963
3964 let fs = FakeFs::new(cx.executor());
3965 fs.insert_tree(path!("/dir"), json!({})).await;
3966
3967 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3968 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3969
3970 language_registry.add(rust_lang());
3971 let mut fake_rust_servers = language_registry.register_fake_lsp(
3972 "Rust",
3973 FakeLspAdapter {
3974 name: "the-rust-language-server",
3975 capabilities: lsp::ServerCapabilities {
3976 completion_provider: Some(lsp::CompletionOptions {
3977 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3978 ..Default::default()
3979 }),
3980 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3981 lsp::TextDocumentSyncOptions {
3982 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3983 ..Default::default()
3984 },
3985 )),
3986 ..Default::default()
3987 },
3988 ..Default::default()
3989 },
3990 );
3991
3992 let buffer = project
3993 .update(cx, |this, cx| this.create_buffer(false, cx))
3994 .unwrap()
3995 .await;
3996 project.update(cx, |this, cx| {
3997 this.register_buffer_with_language_servers(&buffer, cx);
3998 buffer.update(cx, |buffer, cx| {
3999 assert!(!this.has_language_servers_for(buffer, cx));
4000 })
4001 });
4002
4003 project
4004 .update(cx, |this, cx| {
4005 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4006 this.save_buffer_as(
4007 buffer.clone(),
4008 ProjectPath {
4009 worktree_id,
4010 path: rel_path("file.rs").into(),
4011 },
4012 cx,
4013 )
4014 })
4015 .await
4016 .unwrap();
4017 // A server is started up, and it is notified about Rust files.
4018 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4019 assert_eq!(
4020 fake_rust_server
4021 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4022 .await
4023 .text_document,
4024 lsp::TextDocumentItem {
4025 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4026 version: 0,
4027 text: "".to_string(),
4028 language_id: "rust".to_string(),
4029 }
4030 );
4031
4032 project.update(cx, |this, cx| {
4033 buffer.update(cx, |buffer, cx| {
4034 assert!(this.has_language_servers_for(buffer, cx));
4035 })
4036 });
4037}
4038
4039#[gpui::test(iterations = 30)]
4040async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4041 init_test(cx);
4042
4043 let fs = FakeFs::new(cx.executor());
4044 fs.insert_tree(
4045 path!("/dir"),
4046 json!({
4047 "file1": "the original contents",
4048 }),
4049 )
4050 .await;
4051
4052 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4053 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4054 let buffer = project
4055 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4056 .await
4057 .unwrap();
4058
4059 // Simulate buffer diffs being slow, so that they don't complete before
4060 // the next file change occurs.
4061 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4062
4063 // Change the buffer's file on disk, and then wait for the file change
4064 // to be detected by the worktree, so that the buffer starts reloading.
4065 fs.save(
4066 path!("/dir/file1").as_ref(),
4067 &"the first contents".into(),
4068 Default::default(),
4069 )
4070 .await
4071 .unwrap();
4072 worktree.next_event(cx).await;
4073
4074 // Change the buffer's file again. Depending on the random seed, the
4075 // previous file change may still be in progress.
4076 fs.save(
4077 path!("/dir/file1").as_ref(),
4078 &"the second contents".into(),
4079 Default::default(),
4080 )
4081 .await
4082 .unwrap();
4083 worktree.next_event(cx).await;
4084
4085 cx.executor().run_until_parked();
4086 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4087 buffer.read_with(cx, |buffer, _| {
4088 assert_eq!(buffer.text(), on_disk_text);
4089 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4090 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4091 });
4092}
4093
4094#[gpui::test(iterations = 30)]
4095async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4096 init_test(cx);
4097
4098 let fs = FakeFs::new(cx.executor());
4099 fs.insert_tree(
4100 path!("/dir"),
4101 json!({
4102 "file1": "the original contents",
4103 }),
4104 )
4105 .await;
4106
4107 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4108 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4109 let buffer = project
4110 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4111 .await
4112 .unwrap();
4113
4114 // Simulate buffer diffs being slow, so that they don't complete before
4115 // the next file change occurs.
4116 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4117
4118 // Change the buffer's file on disk, and then wait for the file change
4119 // to be detected by the worktree, so that the buffer starts reloading.
4120 fs.save(
4121 path!("/dir/file1").as_ref(),
4122 &"the first contents".into(),
4123 Default::default(),
4124 )
4125 .await
4126 .unwrap();
4127 worktree.next_event(cx).await;
4128
4129 cx.executor()
4130 .spawn(cx.executor().simulate_random_delay())
4131 .await;
4132
4133 // Perform a noop edit, causing the buffer's version to increase.
4134 buffer.update(cx, |buffer, cx| {
4135 buffer.edit([(0..0, " ")], None, cx);
4136 buffer.undo(cx);
4137 });
4138
4139 cx.executor().run_until_parked();
4140 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4141 buffer.read_with(cx, |buffer, _| {
4142 let buffer_text = buffer.text();
4143 if buffer_text == on_disk_text {
4144 assert!(
4145 !buffer.is_dirty() && !buffer.has_conflict(),
4146 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4147 );
4148 }
4149 // If the file change occurred while the buffer was processing the first
4150 // change, the buffer will be in a conflicting state.
4151 else {
4152 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4153 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4154 }
4155 });
4156}
4157
4158#[gpui::test]
4159async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4160 init_test(cx);
4161
4162 let fs = FakeFs::new(cx.executor());
4163 fs.insert_tree(
4164 path!("/dir"),
4165 json!({
4166 "file1": "the old contents",
4167 }),
4168 )
4169 .await;
4170
4171 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4172 let buffer = project
4173 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4174 .await
4175 .unwrap();
4176 buffer.update(cx, |buffer, cx| {
4177 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4178 });
4179
4180 project
4181 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4182 .await
4183 .unwrap();
4184
4185 let new_text = fs
4186 .load(Path::new(path!("/dir/file1")))
4187 .await
4188 .unwrap()
4189 .replace("\r\n", "\n");
4190 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4191}
4192
4193#[gpui::test]
4194async fn test_save_as(cx: &mut gpui::TestAppContext) {
4195 init_test(cx);
4196
4197 let fs = FakeFs::new(cx.executor());
4198 fs.insert_tree("/dir", json!({})).await;
4199
4200 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4201
4202 let languages = project.update(cx, |project, _| project.languages().clone());
4203 languages.add(rust_lang());
4204
4205 let buffer = project.update(cx, |project, cx| {
4206 project.create_local_buffer("", None, false, cx)
4207 });
4208 buffer.update(cx, |buffer, cx| {
4209 buffer.edit([(0..0, "abc")], None, cx);
4210 assert!(buffer.is_dirty());
4211 assert!(!buffer.has_conflict());
4212 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4213 });
4214 project
4215 .update(cx, |project, cx| {
4216 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4217 let path = ProjectPath {
4218 worktree_id,
4219 path: rel_path("file1.rs").into(),
4220 };
4221 project.save_buffer_as(buffer.clone(), path, cx)
4222 })
4223 .await
4224 .unwrap();
4225 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4226
4227 cx.executor().run_until_parked();
4228 buffer.update(cx, |buffer, cx| {
4229 assert_eq!(
4230 buffer.file().unwrap().full_path(cx),
4231 Path::new("dir/file1.rs")
4232 );
4233 assert!(!buffer.is_dirty());
4234 assert!(!buffer.has_conflict());
4235 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4236 });
4237
4238 let opened_buffer = project
4239 .update(cx, |project, cx| {
4240 project.open_local_buffer("/dir/file1.rs", cx)
4241 })
4242 .await
4243 .unwrap();
4244 assert_eq!(opened_buffer, buffer);
4245}
4246
4247#[gpui::test]
4248async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4249 init_test(cx);
4250
4251 let fs = FakeFs::new(cx.executor());
4252 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4253
4254 fs.insert_tree(
4255 path!("/dir"),
4256 json!({
4257 "data_a.txt": "data about a"
4258 }),
4259 )
4260 .await;
4261
4262 let buffer = project
4263 .update(cx, |project, cx| {
4264 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4265 })
4266 .await
4267 .unwrap();
4268
4269 buffer.update(cx, |buffer, cx| {
4270 buffer.edit([(11..12, "b")], None, cx);
4271 });
4272
4273 // Save buffer's contents as a new file and confirm that the buffer's now
4274 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4275 // file associated with the buffer has now been updated to `data_b.txt`
4276 project
4277 .update(cx, |project, cx| {
4278 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4279 let new_path = ProjectPath {
4280 worktree_id,
4281 path: rel_path("data_b.txt").into(),
4282 };
4283
4284 project.save_buffer_as(buffer.clone(), new_path, cx)
4285 })
4286 .await
4287 .unwrap();
4288
4289 buffer.update(cx, |buffer, cx| {
4290 assert_eq!(
4291 buffer.file().unwrap().full_path(cx),
4292 Path::new("dir/data_b.txt")
4293 )
4294 });
4295
4296 // Open the original `data_a.txt` file, confirming that its contents are
4297 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4298 let original_buffer = project
4299 .update(cx, |project, cx| {
4300 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4301 })
4302 .await
4303 .unwrap();
4304
4305 original_buffer.update(cx, |buffer, cx| {
4306 assert_eq!(buffer.text(), "data about a");
4307 assert_eq!(
4308 buffer.file().unwrap().full_path(cx),
4309 Path::new("dir/data_a.txt")
4310 )
4311 });
4312}
4313
4314#[gpui::test(retries = 5)]
4315async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4316 use worktree::WorktreeModelHandle as _;
4317
4318 init_test(cx);
4319 cx.executor().allow_parking();
4320
4321 let dir = TempTree::new(json!({
4322 "a": {
4323 "file1": "",
4324 "file2": "",
4325 "file3": "",
4326 },
4327 "b": {
4328 "c": {
4329 "file4": "",
4330 "file5": "",
4331 }
4332 }
4333 }));
4334
4335 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4336
4337 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4338 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4339 async move { buffer.await.unwrap() }
4340 };
4341 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4342 project.update(cx, |project, cx| {
4343 let tree = project.worktrees(cx).next().unwrap();
4344 tree.read(cx)
4345 .entry_for_path(rel_path(path))
4346 .unwrap_or_else(|| panic!("no entry for path {}", path))
4347 .id
4348 })
4349 };
4350
4351 let buffer2 = buffer_for_path("a/file2", cx).await;
4352 let buffer3 = buffer_for_path("a/file3", cx).await;
4353 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4354 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4355
4356 let file2_id = id_for_path("a/file2", cx);
4357 let file3_id = id_for_path("a/file3", cx);
4358 let file4_id = id_for_path("b/c/file4", cx);
4359
4360 // Create a remote copy of this worktree.
4361 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4362 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4363
4364 let updates = Arc::new(Mutex::new(Vec::new()));
4365 tree.update(cx, |tree, cx| {
4366 let updates = updates.clone();
4367 tree.observe_updates(0, cx, move |update| {
4368 updates.lock().push(update);
4369 async { true }
4370 });
4371 });
4372
4373 let remote = cx.update(|cx| {
4374 Worktree::remote(
4375 0,
4376 ReplicaId::REMOTE_SERVER,
4377 metadata,
4378 project.read(cx).client().into(),
4379 project.read(cx).path_style(cx),
4380 cx,
4381 )
4382 });
4383
4384 cx.executor().run_until_parked();
4385
4386 cx.update(|cx| {
4387 assert!(!buffer2.read(cx).is_dirty());
4388 assert!(!buffer3.read(cx).is_dirty());
4389 assert!(!buffer4.read(cx).is_dirty());
4390 assert!(!buffer5.read(cx).is_dirty());
4391 });
4392
4393 // Rename and delete files and directories.
4394 tree.flush_fs_events(cx).await;
4395 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4396 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4397 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4398 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4399 tree.flush_fs_events(cx).await;
4400
4401 cx.update(|app| {
4402 assert_eq!(
4403 tree.read(app).paths().collect::<Vec<_>>(),
4404 vec![
4405 rel_path("a"),
4406 rel_path("a/file1"),
4407 rel_path("a/file2.new"),
4408 rel_path("b"),
4409 rel_path("d"),
4410 rel_path("d/file3"),
4411 rel_path("d/file4"),
4412 ]
4413 );
4414 });
4415
4416 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4417 assert_eq!(id_for_path("d/file3", cx), file3_id);
4418 assert_eq!(id_for_path("d/file4", cx), file4_id);
4419
4420 cx.update(|cx| {
4421 assert_eq!(
4422 buffer2.read(cx).file().unwrap().path().as_ref(),
4423 rel_path("a/file2.new")
4424 );
4425 assert_eq!(
4426 buffer3.read(cx).file().unwrap().path().as_ref(),
4427 rel_path("d/file3")
4428 );
4429 assert_eq!(
4430 buffer4.read(cx).file().unwrap().path().as_ref(),
4431 rel_path("d/file4")
4432 );
4433 assert_eq!(
4434 buffer5.read(cx).file().unwrap().path().as_ref(),
4435 rel_path("b/c/file5")
4436 );
4437
4438 assert_matches!(
4439 buffer2.read(cx).file().unwrap().disk_state(),
4440 DiskState::Present { .. }
4441 );
4442 assert_matches!(
4443 buffer3.read(cx).file().unwrap().disk_state(),
4444 DiskState::Present { .. }
4445 );
4446 assert_matches!(
4447 buffer4.read(cx).file().unwrap().disk_state(),
4448 DiskState::Present { .. }
4449 );
4450 assert_eq!(
4451 buffer5.read(cx).file().unwrap().disk_state(),
4452 DiskState::Deleted
4453 );
4454 });
4455
4456 // Update the remote worktree. Check that it becomes consistent with the
4457 // local worktree.
4458 cx.executor().run_until_parked();
4459
4460 remote.update(cx, |remote, _| {
4461 for update in updates.lock().drain(..) {
4462 remote.as_remote_mut().unwrap().update_from_remote(update);
4463 }
4464 });
4465 cx.executor().run_until_parked();
4466 remote.update(cx, |remote, _| {
4467 assert_eq!(
4468 remote.paths().collect::<Vec<_>>(),
4469 vec![
4470 rel_path("a"),
4471 rel_path("a/file1"),
4472 rel_path("a/file2.new"),
4473 rel_path("b"),
4474 rel_path("d"),
4475 rel_path("d/file3"),
4476 rel_path("d/file4"),
4477 ]
4478 );
4479 });
4480}
4481
4482#[gpui::test(iterations = 10)]
4483async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4484 init_test(cx);
4485
4486 let fs = FakeFs::new(cx.executor());
4487 fs.insert_tree(
4488 path!("/dir"),
4489 json!({
4490 "a": {
4491 "file1": "",
4492 }
4493 }),
4494 )
4495 .await;
4496
4497 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4498 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4499 let tree_id = tree.update(cx, |tree, _| tree.id());
4500
4501 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4502 project.update(cx, |project, cx| {
4503 let tree = project.worktrees(cx).next().unwrap();
4504 tree.read(cx)
4505 .entry_for_path(rel_path(path))
4506 .unwrap_or_else(|| panic!("no entry for path {}", path))
4507 .id
4508 })
4509 };
4510
4511 let dir_id = id_for_path("a", cx);
4512 let file_id = id_for_path("a/file1", cx);
4513 let buffer = project
4514 .update(cx, |p, cx| {
4515 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4516 })
4517 .await
4518 .unwrap();
4519 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4520
4521 project
4522 .update(cx, |project, cx| {
4523 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4524 })
4525 .unwrap()
4526 .await
4527 .into_included()
4528 .unwrap();
4529 cx.executor().run_until_parked();
4530
4531 assert_eq!(id_for_path("b", cx), dir_id);
4532 assert_eq!(id_for_path("b/file1", cx), file_id);
4533 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4534}
4535
4536#[gpui::test]
4537async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4538 init_test(cx);
4539
4540 let fs = FakeFs::new(cx.executor());
4541 fs.insert_tree(
4542 "/dir",
4543 json!({
4544 "a.txt": "a-contents",
4545 "b.txt": "b-contents",
4546 }),
4547 )
4548 .await;
4549
4550 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4551
4552 // Spawn multiple tasks to open paths, repeating some paths.
4553 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4554 (
4555 p.open_local_buffer("/dir/a.txt", cx),
4556 p.open_local_buffer("/dir/b.txt", cx),
4557 p.open_local_buffer("/dir/a.txt", cx),
4558 )
4559 });
4560
4561 let buffer_a_1 = buffer_a_1.await.unwrap();
4562 let buffer_a_2 = buffer_a_2.await.unwrap();
4563 let buffer_b = buffer_b.await.unwrap();
4564 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4565 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4566
4567 // There is only one buffer per path.
4568 let buffer_a_id = buffer_a_1.entity_id();
4569 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4570
4571 // Open the same path again while it is still open.
4572 drop(buffer_a_1);
4573 let buffer_a_3 = project
4574 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4575 .await
4576 .unwrap();
4577
4578 // There's still only one buffer per path.
4579 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4580}
4581
4582#[gpui::test]
4583async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4584 init_test(cx);
4585
4586 let fs = FakeFs::new(cx.executor());
4587 fs.insert_tree(
4588 path!("/dir"),
4589 json!({
4590 "file1": "abc",
4591 "file2": "def",
4592 "file3": "ghi",
4593 }),
4594 )
4595 .await;
4596
4597 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4598
4599 let buffer1 = project
4600 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4601 .await
4602 .unwrap();
4603 let events = Arc::new(Mutex::new(Vec::new()));
4604
4605 // initially, the buffer isn't dirty.
4606 buffer1.update(cx, |buffer, cx| {
4607 cx.subscribe(&buffer1, {
4608 let events = events.clone();
4609 move |_, _, event, _| match event {
4610 BufferEvent::Operation { .. } => {}
4611 _ => events.lock().push(event.clone()),
4612 }
4613 })
4614 .detach();
4615
4616 assert!(!buffer.is_dirty());
4617 assert!(events.lock().is_empty());
4618
4619 buffer.edit([(1..2, "")], None, cx);
4620 });
4621
4622 // after the first edit, the buffer is dirty, and emits a dirtied event.
4623 buffer1.update(cx, |buffer, cx| {
4624 assert!(buffer.text() == "ac");
4625 assert!(buffer.is_dirty());
4626 assert_eq!(
4627 *events.lock(),
4628 &[
4629 language::BufferEvent::Edited,
4630 language::BufferEvent::DirtyChanged
4631 ]
4632 );
4633 events.lock().clear();
4634 buffer.did_save(
4635 buffer.version(),
4636 buffer.file().unwrap().disk_state().mtime(),
4637 cx,
4638 );
4639 });
4640
4641 // after saving, the buffer is not dirty, and emits a saved event.
4642 buffer1.update(cx, |buffer, cx| {
4643 assert!(!buffer.is_dirty());
4644 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4645 events.lock().clear();
4646
4647 buffer.edit([(1..1, "B")], None, cx);
4648 buffer.edit([(2..2, "D")], None, cx);
4649 });
4650
4651 // after editing again, the buffer is dirty, and emits another dirty event.
4652 buffer1.update(cx, |buffer, cx| {
4653 assert!(buffer.text() == "aBDc");
4654 assert!(buffer.is_dirty());
4655 assert_eq!(
4656 *events.lock(),
4657 &[
4658 language::BufferEvent::Edited,
4659 language::BufferEvent::DirtyChanged,
4660 language::BufferEvent::Edited,
4661 ],
4662 );
4663 events.lock().clear();
4664
4665 // After restoring the buffer to its previously-saved state,
4666 // the buffer is not considered dirty anymore.
4667 buffer.edit([(1..3, "")], None, cx);
4668 assert!(buffer.text() == "ac");
4669 assert!(!buffer.is_dirty());
4670 });
4671
4672 assert_eq!(
4673 *events.lock(),
4674 &[
4675 language::BufferEvent::Edited,
4676 language::BufferEvent::DirtyChanged
4677 ]
4678 );
4679
4680 // When a file is deleted, it is not considered dirty.
4681 let events = Arc::new(Mutex::new(Vec::new()));
4682 let buffer2 = project
4683 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4684 .await
4685 .unwrap();
4686 buffer2.update(cx, |_, cx| {
4687 cx.subscribe(&buffer2, {
4688 let events = events.clone();
4689 move |_, _, event, _| match event {
4690 BufferEvent::Operation { .. } => {}
4691 _ => events.lock().push(event.clone()),
4692 }
4693 })
4694 .detach();
4695 });
4696
4697 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4698 .await
4699 .unwrap();
4700 cx.executor().run_until_parked();
4701 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4702 assert_eq!(
4703 mem::take(&mut *events.lock()),
4704 &[language::BufferEvent::FileHandleChanged]
4705 );
4706
4707 // Buffer becomes dirty when edited.
4708 buffer2.update(cx, |buffer, cx| {
4709 buffer.edit([(2..3, "")], None, cx);
4710 assert_eq!(buffer.is_dirty(), true);
4711 });
4712 assert_eq!(
4713 mem::take(&mut *events.lock()),
4714 &[
4715 language::BufferEvent::Edited,
4716 language::BufferEvent::DirtyChanged
4717 ]
4718 );
4719
4720 // Buffer becomes clean again when all of its content is removed, because
4721 // the file was deleted.
4722 buffer2.update(cx, |buffer, cx| {
4723 buffer.edit([(0..2, "")], None, cx);
4724 assert_eq!(buffer.is_empty(), true);
4725 assert_eq!(buffer.is_dirty(), false);
4726 });
4727 assert_eq!(
4728 *events.lock(),
4729 &[
4730 language::BufferEvent::Edited,
4731 language::BufferEvent::DirtyChanged
4732 ]
4733 );
4734
4735 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4736 let events = Arc::new(Mutex::new(Vec::new()));
4737 let buffer3 = project
4738 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4739 .await
4740 .unwrap();
4741 buffer3.update(cx, |_, cx| {
4742 cx.subscribe(&buffer3, {
4743 let events = events.clone();
4744 move |_, _, event, _| match event {
4745 BufferEvent::Operation { .. } => {}
4746 _ => events.lock().push(event.clone()),
4747 }
4748 })
4749 .detach();
4750 });
4751
4752 buffer3.update(cx, |buffer, cx| {
4753 buffer.edit([(0..0, "x")], None, cx);
4754 });
4755 events.lock().clear();
4756 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4757 .await
4758 .unwrap();
4759 cx.executor().run_until_parked();
4760 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4761 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4762}
4763
4764#[gpui::test]
4765async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4766 init_test(cx);
4767
4768 let (initial_contents, initial_offsets) =
4769 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4770 let fs = FakeFs::new(cx.executor());
4771 fs.insert_tree(
4772 path!("/dir"),
4773 json!({
4774 "the-file": initial_contents,
4775 }),
4776 )
4777 .await;
4778 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4779 let buffer = project
4780 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4781 .await
4782 .unwrap();
4783
4784 let anchors = initial_offsets
4785 .iter()
4786 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4787 .collect::<Vec<_>>();
4788
4789 // Change the file on disk, adding two new lines of text, and removing
4790 // one line.
4791 buffer.update(cx, |buffer, _| {
4792 assert!(!buffer.is_dirty());
4793 assert!(!buffer.has_conflict());
4794 });
4795
4796 let (new_contents, new_offsets) =
4797 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4798 fs.save(
4799 path!("/dir/the-file").as_ref(),
4800 &new_contents.as_str().into(),
4801 LineEnding::Unix,
4802 )
4803 .await
4804 .unwrap();
4805
4806 // Because the buffer was not modified, it is reloaded from disk. Its
4807 // contents are edited according to the diff between the old and new
4808 // file contents.
4809 cx.executor().run_until_parked();
4810 buffer.update(cx, |buffer, _| {
4811 assert_eq!(buffer.text(), new_contents);
4812 assert!(!buffer.is_dirty());
4813 assert!(!buffer.has_conflict());
4814
4815 let anchor_offsets = anchors
4816 .iter()
4817 .map(|anchor| anchor.to_offset(&*buffer))
4818 .collect::<Vec<_>>();
4819 assert_eq!(anchor_offsets, new_offsets);
4820 });
4821
4822 // Modify the buffer
4823 buffer.update(cx, |buffer, cx| {
4824 buffer.edit([(0..0, " ")], None, cx);
4825 assert!(buffer.is_dirty());
4826 assert!(!buffer.has_conflict());
4827 });
4828
4829 // Change the file on disk again, adding blank lines to the beginning.
4830 fs.save(
4831 path!("/dir/the-file").as_ref(),
4832 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4833 LineEnding::Unix,
4834 )
4835 .await
4836 .unwrap();
4837
4838 // Because the buffer is modified, it doesn't reload from disk, but is
4839 // marked as having a conflict.
4840 cx.executor().run_until_parked();
4841 buffer.update(cx, |buffer, _| {
4842 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4843 assert!(buffer.has_conflict());
4844 });
4845}
4846
4847#[gpui::test]
4848async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4849 init_test(cx);
4850
4851 let fs = FakeFs::new(cx.executor());
4852 fs.insert_tree(
4853 path!("/dir"),
4854 json!({
4855 "file1": "a\nb\nc\n",
4856 "file2": "one\r\ntwo\r\nthree\r\n",
4857 }),
4858 )
4859 .await;
4860
4861 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4862 let buffer1 = project
4863 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4864 .await
4865 .unwrap();
4866 let buffer2 = project
4867 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4868 .await
4869 .unwrap();
4870
4871 buffer1.update(cx, |buffer, _| {
4872 assert_eq!(buffer.text(), "a\nb\nc\n");
4873 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4874 });
4875 buffer2.update(cx, |buffer, _| {
4876 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4877 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4878 });
4879
4880 // Change a file's line endings on disk from unix to windows. The buffer's
4881 // state updates correctly.
4882 fs.save(
4883 path!("/dir/file1").as_ref(),
4884 &"aaa\nb\nc\n".into(),
4885 LineEnding::Windows,
4886 )
4887 .await
4888 .unwrap();
4889 cx.executor().run_until_parked();
4890 buffer1.update(cx, |buffer, _| {
4891 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4892 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4893 });
4894
4895 // Save a file with windows line endings. The file is written correctly.
4896 buffer2.update(cx, |buffer, cx| {
4897 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4898 });
4899 project
4900 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4901 .await
4902 .unwrap();
4903 assert_eq!(
4904 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4905 "one\r\ntwo\r\nthree\r\nfour\r\n",
4906 );
4907}
4908
4909#[gpui::test]
4910async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4911 init_test(cx);
4912
4913 let fs = FakeFs::new(cx.executor());
4914 fs.insert_tree(
4915 path!("/dir"),
4916 json!({
4917 "a.rs": "
4918 fn foo(mut v: Vec<usize>) {
4919 for x in &v {
4920 v.push(1);
4921 }
4922 }
4923 "
4924 .unindent(),
4925 }),
4926 )
4927 .await;
4928
4929 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4930 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4931 let buffer = project
4932 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4933 .await
4934 .unwrap();
4935
4936 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4937 let message = lsp::PublishDiagnosticsParams {
4938 uri: buffer_uri.clone(),
4939 diagnostics: vec![
4940 lsp::Diagnostic {
4941 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4942 severity: Some(DiagnosticSeverity::WARNING),
4943 message: "error 1".to_string(),
4944 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4945 location: lsp::Location {
4946 uri: buffer_uri.clone(),
4947 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4948 },
4949 message: "error 1 hint 1".to_string(),
4950 }]),
4951 ..Default::default()
4952 },
4953 lsp::Diagnostic {
4954 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4955 severity: Some(DiagnosticSeverity::HINT),
4956 message: "error 1 hint 1".to_string(),
4957 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4958 location: lsp::Location {
4959 uri: buffer_uri.clone(),
4960 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4961 },
4962 message: "original diagnostic".to_string(),
4963 }]),
4964 ..Default::default()
4965 },
4966 lsp::Diagnostic {
4967 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4968 severity: Some(DiagnosticSeverity::ERROR),
4969 message: "error 2".to_string(),
4970 related_information: Some(vec![
4971 lsp::DiagnosticRelatedInformation {
4972 location: lsp::Location {
4973 uri: buffer_uri.clone(),
4974 range: lsp::Range::new(
4975 lsp::Position::new(1, 13),
4976 lsp::Position::new(1, 15),
4977 ),
4978 },
4979 message: "error 2 hint 1".to_string(),
4980 },
4981 lsp::DiagnosticRelatedInformation {
4982 location: lsp::Location {
4983 uri: buffer_uri.clone(),
4984 range: lsp::Range::new(
4985 lsp::Position::new(1, 13),
4986 lsp::Position::new(1, 15),
4987 ),
4988 },
4989 message: "error 2 hint 2".to_string(),
4990 },
4991 ]),
4992 ..Default::default()
4993 },
4994 lsp::Diagnostic {
4995 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
4996 severity: Some(DiagnosticSeverity::HINT),
4997 message: "error 2 hint 1".to_string(),
4998 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4999 location: lsp::Location {
5000 uri: buffer_uri.clone(),
5001 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5002 },
5003 message: "original diagnostic".to_string(),
5004 }]),
5005 ..Default::default()
5006 },
5007 lsp::Diagnostic {
5008 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5009 severity: Some(DiagnosticSeverity::HINT),
5010 message: "error 2 hint 2".to_string(),
5011 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5012 location: lsp::Location {
5013 uri: buffer_uri,
5014 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5015 },
5016 message: "original diagnostic".to_string(),
5017 }]),
5018 ..Default::default()
5019 },
5020 ],
5021 version: None,
5022 };
5023
5024 lsp_store
5025 .update(cx, |lsp_store, cx| {
5026 lsp_store.update_diagnostics(
5027 LanguageServerId(0),
5028 message,
5029 None,
5030 DiagnosticSourceKind::Pushed,
5031 &[],
5032 cx,
5033 )
5034 })
5035 .unwrap();
5036 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5037
5038 assert_eq!(
5039 buffer
5040 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5041 .collect::<Vec<_>>(),
5042 &[
5043 DiagnosticEntry {
5044 range: Point::new(1, 8)..Point::new(1, 9),
5045 diagnostic: Diagnostic {
5046 severity: DiagnosticSeverity::WARNING,
5047 message: "error 1".to_string(),
5048 group_id: 1,
5049 is_primary: true,
5050 source_kind: DiagnosticSourceKind::Pushed,
5051 ..Diagnostic::default()
5052 }
5053 },
5054 DiagnosticEntry {
5055 range: Point::new(1, 8)..Point::new(1, 9),
5056 diagnostic: Diagnostic {
5057 severity: DiagnosticSeverity::HINT,
5058 message: "error 1 hint 1".to_string(),
5059 group_id: 1,
5060 is_primary: false,
5061 source_kind: DiagnosticSourceKind::Pushed,
5062 ..Diagnostic::default()
5063 }
5064 },
5065 DiagnosticEntry {
5066 range: Point::new(1, 13)..Point::new(1, 15),
5067 diagnostic: Diagnostic {
5068 severity: DiagnosticSeverity::HINT,
5069 message: "error 2 hint 1".to_string(),
5070 group_id: 0,
5071 is_primary: false,
5072 source_kind: DiagnosticSourceKind::Pushed,
5073 ..Diagnostic::default()
5074 }
5075 },
5076 DiagnosticEntry {
5077 range: Point::new(1, 13)..Point::new(1, 15),
5078 diagnostic: Diagnostic {
5079 severity: DiagnosticSeverity::HINT,
5080 message: "error 2 hint 2".to_string(),
5081 group_id: 0,
5082 is_primary: false,
5083 source_kind: DiagnosticSourceKind::Pushed,
5084 ..Diagnostic::default()
5085 }
5086 },
5087 DiagnosticEntry {
5088 range: Point::new(2, 8)..Point::new(2, 17),
5089 diagnostic: Diagnostic {
5090 severity: DiagnosticSeverity::ERROR,
5091 message: "error 2".to_string(),
5092 group_id: 0,
5093 is_primary: true,
5094 source_kind: DiagnosticSourceKind::Pushed,
5095 ..Diagnostic::default()
5096 }
5097 }
5098 ]
5099 );
5100
5101 assert_eq!(
5102 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5103 &[
5104 DiagnosticEntry {
5105 range: Point::new(1, 13)..Point::new(1, 15),
5106 diagnostic: Diagnostic {
5107 severity: DiagnosticSeverity::HINT,
5108 message: "error 2 hint 1".to_string(),
5109 group_id: 0,
5110 is_primary: false,
5111 source_kind: DiagnosticSourceKind::Pushed,
5112 ..Diagnostic::default()
5113 }
5114 },
5115 DiagnosticEntry {
5116 range: Point::new(1, 13)..Point::new(1, 15),
5117 diagnostic: Diagnostic {
5118 severity: DiagnosticSeverity::HINT,
5119 message: "error 2 hint 2".to_string(),
5120 group_id: 0,
5121 is_primary: false,
5122 source_kind: DiagnosticSourceKind::Pushed,
5123 ..Diagnostic::default()
5124 }
5125 },
5126 DiagnosticEntry {
5127 range: Point::new(2, 8)..Point::new(2, 17),
5128 diagnostic: Diagnostic {
5129 severity: DiagnosticSeverity::ERROR,
5130 message: "error 2".to_string(),
5131 group_id: 0,
5132 is_primary: true,
5133 source_kind: DiagnosticSourceKind::Pushed,
5134 ..Diagnostic::default()
5135 }
5136 }
5137 ]
5138 );
5139
5140 assert_eq!(
5141 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5142 &[
5143 DiagnosticEntry {
5144 range: Point::new(1, 8)..Point::new(1, 9),
5145 diagnostic: Diagnostic {
5146 severity: DiagnosticSeverity::WARNING,
5147 message: "error 1".to_string(),
5148 group_id: 1,
5149 is_primary: true,
5150 source_kind: DiagnosticSourceKind::Pushed,
5151 ..Diagnostic::default()
5152 }
5153 },
5154 DiagnosticEntry {
5155 range: Point::new(1, 8)..Point::new(1, 9),
5156 diagnostic: Diagnostic {
5157 severity: DiagnosticSeverity::HINT,
5158 message: "error 1 hint 1".to_string(),
5159 group_id: 1,
5160 is_primary: false,
5161 source_kind: DiagnosticSourceKind::Pushed,
5162 ..Diagnostic::default()
5163 }
5164 },
5165 ]
5166 );
5167}
5168
5169#[gpui::test]
5170async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5171 init_test(cx);
5172
5173 let fs = FakeFs::new(cx.executor());
5174 fs.insert_tree(
5175 path!("/dir"),
5176 json!({
5177 "one.rs": "const ONE: usize = 1;",
5178 "two": {
5179 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5180 }
5181
5182 }),
5183 )
5184 .await;
5185 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5186
5187 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5188 language_registry.add(rust_lang());
5189 let watched_paths = lsp::FileOperationRegistrationOptions {
5190 filters: vec![
5191 FileOperationFilter {
5192 scheme: Some("file".to_owned()),
5193 pattern: lsp::FileOperationPattern {
5194 glob: "**/*.rs".to_owned(),
5195 matches: Some(lsp::FileOperationPatternKind::File),
5196 options: None,
5197 },
5198 },
5199 FileOperationFilter {
5200 scheme: Some("file".to_owned()),
5201 pattern: lsp::FileOperationPattern {
5202 glob: "**/**".to_owned(),
5203 matches: Some(lsp::FileOperationPatternKind::Folder),
5204 options: None,
5205 },
5206 },
5207 ],
5208 };
5209 let mut fake_servers = language_registry.register_fake_lsp(
5210 "Rust",
5211 FakeLspAdapter {
5212 capabilities: lsp::ServerCapabilities {
5213 workspace: Some(lsp::WorkspaceServerCapabilities {
5214 workspace_folders: None,
5215 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5216 did_rename: Some(watched_paths.clone()),
5217 will_rename: Some(watched_paths),
5218 ..Default::default()
5219 }),
5220 }),
5221 ..Default::default()
5222 },
5223 ..Default::default()
5224 },
5225 );
5226
5227 let _ = project
5228 .update(cx, |project, cx| {
5229 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5230 })
5231 .await
5232 .unwrap();
5233
5234 let fake_server = fake_servers.next().await.unwrap();
5235 let response = project.update(cx, |project, cx| {
5236 let worktree = project.worktrees(cx).next().unwrap();
5237 let entry = worktree
5238 .read(cx)
5239 .entry_for_path(rel_path("one.rs"))
5240 .unwrap();
5241 project.rename_entry(
5242 entry.id,
5243 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5244 cx,
5245 )
5246 });
5247 let expected_edit = lsp::WorkspaceEdit {
5248 changes: None,
5249 document_changes: Some(DocumentChanges::Edits({
5250 vec![TextDocumentEdit {
5251 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5252 range: lsp::Range {
5253 start: lsp::Position {
5254 line: 0,
5255 character: 1,
5256 },
5257 end: lsp::Position {
5258 line: 0,
5259 character: 3,
5260 },
5261 },
5262 new_text: "This is not a drill".to_owned(),
5263 })],
5264 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5265 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5266 version: Some(1337),
5267 },
5268 }]
5269 })),
5270 change_annotations: None,
5271 };
5272 let resolved_workspace_edit = Arc::new(OnceLock::new());
5273 fake_server
5274 .set_request_handler::<WillRenameFiles, _, _>({
5275 let resolved_workspace_edit = resolved_workspace_edit.clone();
5276 let expected_edit = expected_edit.clone();
5277 move |params, _| {
5278 let resolved_workspace_edit = resolved_workspace_edit.clone();
5279 let expected_edit = expected_edit.clone();
5280 async move {
5281 assert_eq!(params.files.len(), 1);
5282 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5283 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5284 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5285 Ok(Some(expected_edit))
5286 }
5287 }
5288 })
5289 .next()
5290 .await
5291 .unwrap();
5292 let _ = response.await.unwrap();
5293 fake_server
5294 .handle_notification::<DidRenameFiles, _>(|params, _| {
5295 assert_eq!(params.files.len(), 1);
5296 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5297 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5298 })
5299 .next()
5300 .await
5301 .unwrap();
5302 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5303}
5304
5305#[gpui::test]
5306async fn test_rename(cx: &mut gpui::TestAppContext) {
5307 // hi
5308 init_test(cx);
5309
5310 let fs = FakeFs::new(cx.executor());
5311 fs.insert_tree(
5312 path!("/dir"),
5313 json!({
5314 "one.rs": "const ONE: usize = 1;",
5315 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5316 }),
5317 )
5318 .await;
5319
5320 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5321
5322 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5323 language_registry.add(rust_lang());
5324 let mut fake_servers = language_registry.register_fake_lsp(
5325 "Rust",
5326 FakeLspAdapter {
5327 capabilities: lsp::ServerCapabilities {
5328 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5329 prepare_provider: Some(true),
5330 work_done_progress_options: Default::default(),
5331 })),
5332 ..Default::default()
5333 },
5334 ..Default::default()
5335 },
5336 );
5337
5338 let (buffer, _handle) = project
5339 .update(cx, |project, cx| {
5340 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5341 })
5342 .await
5343 .unwrap();
5344
5345 let fake_server = fake_servers.next().await.unwrap();
5346
5347 let response = project.update(cx, |project, cx| {
5348 project.prepare_rename(buffer.clone(), 7, cx)
5349 });
5350 fake_server
5351 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5352 assert_eq!(
5353 params.text_document.uri.as_str(),
5354 uri!("file:///dir/one.rs")
5355 );
5356 assert_eq!(params.position, lsp::Position::new(0, 7));
5357 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5358 lsp::Position::new(0, 6),
5359 lsp::Position::new(0, 9),
5360 ))))
5361 })
5362 .next()
5363 .await
5364 .unwrap();
5365 let response = response.await.unwrap();
5366 let PrepareRenameResponse::Success(range) = response else {
5367 panic!("{:?}", response);
5368 };
5369 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5370 assert_eq!(range, 6..9);
5371
5372 let response = project.update(cx, |project, cx| {
5373 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5374 });
5375 fake_server
5376 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5377 assert_eq!(
5378 params.text_document_position.text_document.uri.as_str(),
5379 uri!("file:///dir/one.rs")
5380 );
5381 assert_eq!(
5382 params.text_document_position.position,
5383 lsp::Position::new(0, 7)
5384 );
5385 assert_eq!(params.new_name, "THREE");
5386 Ok(Some(lsp::WorkspaceEdit {
5387 changes: Some(
5388 [
5389 (
5390 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5391 vec![lsp::TextEdit::new(
5392 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5393 "THREE".to_string(),
5394 )],
5395 ),
5396 (
5397 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5398 vec![
5399 lsp::TextEdit::new(
5400 lsp::Range::new(
5401 lsp::Position::new(0, 24),
5402 lsp::Position::new(0, 27),
5403 ),
5404 "THREE".to_string(),
5405 ),
5406 lsp::TextEdit::new(
5407 lsp::Range::new(
5408 lsp::Position::new(0, 35),
5409 lsp::Position::new(0, 38),
5410 ),
5411 "THREE".to_string(),
5412 ),
5413 ],
5414 ),
5415 ]
5416 .into_iter()
5417 .collect(),
5418 ),
5419 ..Default::default()
5420 }))
5421 })
5422 .next()
5423 .await
5424 .unwrap();
5425 let mut transaction = response.await.unwrap().0;
5426 assert_eq!(transaction.len(), 2);
5427 assert_eq!(
5428 transaction
5429 .remove_entry(&buffer)
5430 .unwrap()
5431 .0
5432 .update(cx, |buffer, _| buffer.text()),
5433 "const THREE: usize = 1;"
5434 );
5435 assert_eq!(
5436 transaction
5437 .into_keys()
5438 .next()
5439 .unwrap()
5440 .update(cx, |buffer, _| buffer.text()),
5441 "const TWO: usize = one::THREE + one::THREE;"
5442 );
5443}
5444
5445#[gpui::test]
5446async fn test_search(cx: &mut gpui::TestAppContext) {
5447 init_test(cx);
5448
5449 let fs = FakeFs::new(cx.executor());
5450 fs.insert_tree(
5451 path!("/dir"),
5452 json!({
5453 "one.rs": "const ONE: usize = 1;",
5454 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5455 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5456 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5457 }),
5458 )
5459 .await;
5460 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5461 assert_eq!(
5462 search(
5463 &project,
5464 SearchQuery::text(
5465 "TWO",
5466 false,
5467 true,
5468 false,
5469 Default::default(),
5470 Default::default(),
5471 false,
5472 None
5473 )
5474 .unwrap(),
5475 cx
5476 )
5477 .await
5478 .unwrap(),
5479 HashMap::from_iter([
5480 (path!("dir/two.rs").to_string(), vec![6..9]),
5481 (path!("dir/three.rs").to_string(), vec![37..40])
5482 ])
5483 );
5484
5485 let buffer_4 = project
5486 .update(cx, |project, cx| {
5487 project.open_local_buffer(path!("/dir/four.rs"), cx)
5488 })
5489 .await
5490 .unwrap();
5491 buffer_4.update(cx, |buffer, cx| {
5492 let text = "two::TWO";
5493 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5494 });
5495
5496 assert_eq!(
5497 search(
5498 &project,
5499 SearchQuery::text(
5500 "TWO",
5501 false,
5502 true,
5503 false,
5504 Default::default(),
5505 Default::default(),
5506 false,
5507 None,
5508 )
5509 .unwrap(),
5510 cx
5511 )
5512 .await
5513 .unwrap(),
5514 HashMap::from_iter([
5515 (path!("dir/two.rs").to_string(), vec![6..9]),
5516 (path!("dir/three.rs").to_string(), vec![37..40]),
5517 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5518 ])
5519 );
5520}
5521
5522#[gpui::test]
5523async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5524 init_test(cx);
5525
5526 let search_query = "file";
5527
5528 let fs = FakeFs::new(cx.executor());
5529 fs.insert_tree(
5530 path!("/dir"),
5531 json!({
5532 "one.rs": r#"// Rust file one"#,
5533 "one.ts": r#"// TypeScript file one"#,
5534 "two.rs": r#"// Rust file two"#,
5535 "two.ts": r#"// TypeScript file two"#,
5536 }),
5537 )
5538 .await;
5539 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5540
5541 assert!(
5542 search(
5543 &project,
5544 SearchQuery::text(
5545 search_query,
5546 false,
5547 true,
5548 false,
5549 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5550 Default::default(),
5551 false,
5552 None
5553 )
5554 .unwrap(),
5555 cx
5556 )
5557 .await
5558 .unwrap()
5559 .is_empty(),
5560 "If no inclusions match, no files should be returned"
5561 );
5562
5563 assert_eq!(
5564 search(
5565 &project,
5566 SearchQuery::text(
5567 search_query,
5568 false,
5569 true,
5570 false,
5571 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5572 Default::default(),
5573 false,
5574 None
5575 )
5576 .unwrap(),
5577 cx
5578 )
5579 .await
5580 .unwrap(),
5581 HashMap::from_iter([
5582 (path!("dir/one.rs").to_string(), vec![8..12]),
5583 (path!("dir/two.rs").to_string(), vec![8..12]),
5584 ]),
5585 "Rust only search should give only Rust files"
5586 );
5587
5588 assert_eq!(
5589 search(
5590 &project,
5591 SearchQuery::text(
5592 search_query,
5593 false,
5594 true,
5595 false,
5596 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5597 .unwrap(),
5598 Default::default(),
5599 false,
5600 None,
5601 )
5602 .unwrap(),
5603 cx
5604 )
5605 .await
5606 .unwrap(),
5607 HashMap::from_iter([
5608 (path!("dir/one.ts").to_string(), vec![14..18]),
5609 (path!("dir/two.ts").to_string(), vec![14..18]),
5610 ]),
5611 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5612 );
5613
5614 assert_eq!(
5615 search(
5616 &project,
5617 SearchQuery::text(
5618 search_query,
5619 false,
5620 true,
5621 false,
5622 PathMatcher::new(
5623 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5624 PathStyle::local()
5625 )
5626 .unwrap(),
5627 Default::default(),
5628 false,
5629 None,
5630 )
5631 .unwrap(),
5632 cx
5633 )
5634 .await
5635 .unwrap(),
5636 HashMap::from_iter([
5637 (path!("dir/two.ts").to_string(), vec![14..18]),
5638 (path!("dir/one.rs").to_string(), vec![8..12]),
5639 (path!("dir/one.ts").to_string(), vec![14..18]),
5640 (path!("dir/two.rs").to_string(), vec![8..12]),
5641 ]),
5642 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5643 );
5644}
5645
5646#[gpui::test]
5647async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5648 init_test(cx);
5649
5650 let search_query = "file";
5651
5652 let fs = FakeFs::new(cx.executor());
5653 fs.insert_tree(
5654 path!("/dir"),
5655 json!({
5656 "one.rs": r#"// Rust file one"#,
5657 "one.ts": r#"// TypeScript file one"#,
5658 "two.rs": r#"// Rust file two"#,
5659 "two.ts": r#"// TypeScript file two"#,
5660 }),
5661 )
5662 .await;
5663 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5664
5665 assert_eq!(
5666 search(
5667 &project,
5668 SearchQuery::text(
5669 search_query,
5670 false,
5671 true,
5672 false,
5673 Default::default(),
5674 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5675 false,
5676 None,
5677 )
5678 .unwrap(),
5679 cx
5680 )
5681 .await
5682 .unwrap(),
5683 HashMap::from_iter([
5684 (path!("dir/one.rs").to_string(), vec![8..12]),
5685 (path!("dir/one.ts").to_string(), vec![14..18]),
5686 (path!("dir/two.rs").to_string(), vec![8..12]),
5687 (path!("dir/two.ts").to_string(), vec![14..18]),
5688 ]),
5689 "If no exclusions match, all files should be returned"
5690 );
5691
5692 assert_eq!(
5693 search(
5694 &project,
5695 SearchQuery::text(
5696 search_query,
5697 false,
5698 true,
5699 false,
5700 Default::default(),
5701 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5702 false,
5703 None,
5704 )
5705 .unwrap(),
5706 cx
5707 )
5708 .await
5709 .unwrap(),
5710 HashMap::from_iter([
5711 (path!("dir/one.ts").to_string(), vec![14..18]),
5712 (path!("dir/two.ts").to_string(), vec![14..18]),
5713 ]),
5714 "Rust exclusion search should give only TypeScript files"
5715 );
5716
5717 assert_eq!(
5718 search(
5719 &project,
5720 SearchQuery::text(
5721 search_query,
5722 false,
5723 true,
5724 false,
5725 Default::default(),
5726 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5727 .unwrap(),
5728 false,
5729 None,
5730 )
5731 .unwrap(),
5732 cx
5733 )
5734 .await
5735 .unwrap(),
5736 HashMap::from_iter([
5737 (path!("dir/one.rs").to_string(), vec![8..12]),
5738 (path!("dir/two.rs").to_string(), vec![8..12]),
5739 ]),
5740 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5741 );
5742
5743 assert!(
5744 search(
5745 &project,
5746 SearchQuery::text(
5747 search_query,
5748 false,
5749 true,
5750 false,
5751 Default::default(),
5752 PathMatcher::new(
5753 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5754 PathStyle::local(),
5755 )
5756 .unwrap(),
5757 false,
5758 None,
5759 )
5760 .unwrap(),
5761 cx
5762 )
5763 .await
5764 .unwrap()
5765 .is_empty(),
5766 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5767 );
5768}
5769
5770#[gpui::test]
5771async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5772 init_test(cx);
5773
5774 let search_query = "file";
5775
5776 let fs = FakeFs::new(cx.executor());
5777 fs.insert_tree(
5778 path!("/dir"),
5779 json!({
5780 "one.rs": r#"// Rust file one"#,
5781 "one.ts": r#"// TypeScript file one"#,
5782 "two.rs": r#"// Rust file two"#,
5783 "two.ts": r#"// TypeScript file two"#,
5784 }),
5785 )
5786 .await;
5787
5788 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5789 let path_style = PathStyle::local();
5790 let _buffer = project.update(cx, |project, cx| {
5791 project.create_local_buffer("file", None, false, cx)
5792 });
5793
5794 assert_eq!(
5795 search(
5796 &project,
5797 SearchQuery::text(
5798 search_query,
5799 false,
5800 true,
5801 false,
5802 Default::default(),
5803 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5804 false,
5805 None,
5806 )
5807 .unwrap(),
5808 cx
5809 )
5810 .await
5811 .unwrap(),
5812 HashMap::from_iter([
5813 (path!("dir/one.rs").to_string(), vec![8..12]),
5814 (path!("dir/one.ts").to_string(), vec![14..18]),
5815 (path!("dir/two.rs").to_string(), vec![8..12]),
5816 (path!("dir/two.ts").to_string(), vec![14..18]),
5817 ]),
5818 "If no exclusions match, all files should be returned"
5819 );
5820
5821 assert_eq!(
5822 search(
5823 &project,
5824 SearchQuery::text(
5825 search_query,
5826 false,
5827 true,
5828 false,
5829 Default::default(),
5830 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5831 false,
5832 None,
5833 )
5834 .unwrap(),
5835 cx
5836 )
5837 .await
5838 .unwrap(),
5839 HashMap::from_iter([
5840 (path!("dir/one.ts").to_string(), vec![14..18]),
5841 (path!("dir/two.ts").to_string(), vec![14..18]),
5842 ]),
5843 "Rust exclusion search should give only TypeScript files"
5844 );
5845
5846 assert_eq!(
5847 search(
5848 &project,
5849 SearchQuery::text(
5850 search_query,
5851 false,
5852 true,
5853 false,
5854 Default::default(),
5855 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5856 false,
5857 None,
5858 )
5859 .unwrap(),
5860 cx
5861 )
5862 .await
5863 .unwrap(),
5864 HashMap::from_iter([
5865 (path!("dir/one.rs").to_string(), vec![8..12]),
5866 (path!("dir/two.rs").to_string(), vec![8..12]),
5867 ]),
5868 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5869 );
5870
5871 assert!(
5872 search(
5873 &project,
5874 SearchQuery::text(
5875 search_query,
5876 false,
5877 true,
5878 false,
5879 Default::default(),
5880 PathMatcher::new(
5881 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5882 PathStyle::local(),
5883 )
5884 .unwrap(),
5885 false,
5886 None,
5887 )
5888 .unwrap(),
5889 cx
5890 )
5891 .await
5892 .unwrap()
5893 .is_empty(),
5894 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5895 );
5896}
5897
5898#[gpui::test]
5899async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5900 init_test(cx);
5901
5902 let search_query = "file";
5903
5904 let fs = FakeFs::new(cx.executor());
5905 fs.insert_tree(
5906 path!("/dir"),
5907 json!({
5908 "one.rs": r#"// Rust file one"#,
5909 "one.ts": r#"// TypeScript file one"#,
5910 "two.rs": r#"// Rust file two"#,
5911 "two.ts": r#"// TypeScript file two"#,
5912 }),
5913 )
5914 .await;
5915 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5916 assert!(
5917 search(
5918 &project,
5919 SearchQuery::text(
5920 search_query,
5921 false,
5922 true,
5923 false,
5924 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5925 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5926 false,
5927 None,
5928 )
5929 .unwrap(),
5930 cx
5931 )
5932 .await
5933 .unwrap()
5934 .is_empty(),
5935 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5936 );
5937
5938 assert!(
5939 search(
5940 &project,
5941 SearchQuery::text(
5942 search_query,
5943 false,
5944 true,
5945 false,
5946 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5947 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5948 false,
5949 None,
5950 )
5951 .unwrap(),
5952 cx
5953 )
5954 .await
5955 .unwrap()
5956 .is_empty(),
5957 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5958 );
5959
5960 assert!(
5961 search(
5962 &project,
5963 SearchQuery::text(
5964 search_query,
5965 false,
5966 true,
5967 false,
5968 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5969 .unwrap(),
5970 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5971 .unwrap(),
5972 false,
5973 None,
5974 )
5975 .unwrap(),
5976 cx
5977 )
5978 .await
5979 .unwrap()
5980 .is_empty(),
5981 "Non-matching inclusions and exclusions should not change that."
5982 );
5983
5984 assert_eq!(
5985 search(
5986 &project,
5987 SearchQuery::text(
5988 search_query,
5989 false,
5990 true,
5991 false,
5992 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5993 .unwrap(),
5994 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
5995 .unwrap(),
5996 false,
5997 None,
5998 )
5999 .unwrap(),
6000 cx
6001 )
6002 .await
6003 .unwrap(),
6004 HashMap::from_iter([
6005 (path!("dir/one.ts").to_string(), vec![14..18]),
6006 (path!("dir/two.ts").to_string(), vec![14..18]),
6007 ]),
6008 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6009 );
6010}
6011
6012#[gpui::test]
6013async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6014 init_test(cx);
6015
6016 let fs = FakeFs::new(cx.executor());
6017 fs.insert_tree(
6018 path!("/worktree-a"),
6019 json!({
6020 "haystack.rs": r#"// NEEDLE"#,
6021 "haystack.ts": r#"// NEEDLE"#,
6022 }),
6023 )
6024 .await;
6025 fs.insert_tree(
6026 path!("/worktree-b"),
6027 json!({
6028 "haystack.rs": r#"// NEEDLE"#,
6029 "haystack.ts": r#"// NEEDLE"#,
6030 }),
6031 )
6032 .await;
6033
6034 let path_style = PathStyle::local();
6035 let project = Project::test(
6036 fs.clone(),
6037 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6038 cx,
6039 )
6040 .await;
6041
6042 assert_eq!(
6043 search(
6044 &project,
6045 SearchQuery::text(
6046 "NEEDLE",
6047 false,
6048 true,
6049 false,
6050 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6051 Default::default(),
6052 true,
6053 None,
6054 )
6055 .unwrap(),
6056 cx
6057 )
6058 .await
6059 .unwrap(),
6060 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6061 "should only return results from included worktree"
6062 );
6063 assert_eq!(
6064 search(
6065 &project,
6066 SearchQuery::text(
6067 "NEEDLE",
6068 false,
6069 true,
6070 false,
6071 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6072 Default::default(),
6073 true,
6074 None,
6075 )
6076 .unwrap(),
6077 cx
6078 )
6079 .await
6080 .unwrap(),
6081 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6082 "should only return results from included worktree"
6083 );
6084
6085 assert_eq!(
6086 search(
6087 &project,
6088 SearchQuery::text(
6089 "NEEDLE",
6090 false,
6091 true,
6092 false,
6093 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6094 Default::default(),
6095 false,
6096 None,
6097 )
6098 .unwrap(),
6099 cx
6100 )
6101 .await
6102 .unwrap(),
6103 HashMap::from_iter([
6104 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6105 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6106 ]),
6107 "should return results from both worktrees"
6108 );
6109}
6110
6111#[gpui::test]
6112async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6113 init_test(cx);
6114
6115 let fs = FakeFs::new(cx.background_executor.clone());
6116 fs.insert_tree(
6117 path!("/dir"),
6118 json!({
6119 ".git": {},
6120 ".gitignore": "**/target\n/node_modules\n",
6121 "target": {
6122 "index.txt": "index_key:index_value"
6123 },
6124 "node_modules": {
6125 "eslint": {
6126 "index.ts": "const eslint_key = 'eslint value'",
6127 "package.json": r#"{ "some_key": "some value" }"#,
6128 },
6129 "prettier": {
6130 "index.ts": "const prettier_key = 'prettier value'",
6131 "package.json": r#"{ "other_key": "other value" }"#,
6132 },
6133 },
6134 "package.json": r#"{ "main_key": "main value" }"#,
6135 }),
6136 )
6137 .await;
6138 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6139
6140 let query = "key";
6141 assert_eq!(
6142 search(
6143 &project,
6144 SearchQuery::text(
6145 query,
6146 false,
6147 false,
6148 false,
6149 Default::default(),
6150 Default::default(),
6151 false,
6152 None,
6153 )
6154 .unwrap(),
6155 cx
6156 )
6157 .await
6158 .unwrap(),
6159 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6160 "Only one non-ignored file should have the query"
6161 );
6162
6163 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6164 let path_style = PathStyle::local();
6165 assert_eq!(
6166 search(
6167 &project,
6168 SearchQuery::text(
6169 query,
6170 false,
6171 false,
6172 true,
6173 Default::default(),
6174 Default::default(),
6175 false,
6176 None,
6177 )
6178 .unwrap(),
6179 cx
6180 )
6181 .await
6182 .unwrap(),
6183 HashMap::from_iter([
6184 (path!("dir/package.json").to_string(), vec![8..11]),
6185 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6186 (
6187 path!("dir/node_modules/prettier/package.json").to_string(),
6188 vec![9..12]
6189 ),
6190 (
6191 path!("dir/node_modules/prettier/index.ts").to_string(),
6192 vec![15..18]
6193 ),
6194 (
6195 path!("dir/node_modules/eslint/index.ts").to_string(),
6196 vec![13..16]
6197 ),
6198 (
6199 path!("dir/node_modules/eslint/package.json").to_string(),
6200 vec![8..11]
6201 ),
6202 ]),
6203 "Unrestricted search with ignored directories should find every file with the query"
6204 );
6205
6206 let files_to_include =
6207 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6208 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6209 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6210 assert_eq!(
6211 search(
6212 &project,
6213 SearchQuery::text(
6214 query,
6215 false,
6216 false,
6217 true,
6218 files_to_include,
6219 files_to_exclude,
6220 false,
6221 None,
6222 )
6223 .unwrap(),
6224 cx
6225 )
6226 .await
6227 .unwrap(),
6228 HashMap::from_iter([(
6229 path!("dir/node_modules/prettier/package.json").to_string(),
6230 vec![9..12]
6231 )]),
6232 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6233 );
6234}
6235
6236#[gpui::test]
6237async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6238 init_test(cx);
6239
6240 let fs = FakeFs::new(cx.executor());
6241 fs.insert_tree(
6242 path!("/dir"),
6243 json!({
6244 "one.rs": "// ПРИВЕТ? привет!",
6245 "two.rs": "// ПРИВЕТ.",
6246 "three.rs": "// привет",
6247 }),
6248 )
6249 .await;
6250 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6251 let unicode_case_sensitive_query = SearchQuery::text(
6252 "привет",
6253 false,
6254 true,
6255 false,
6256 Default::default(),
6257 Default::default(),
6258 false,
6259 None,
6260 );
6261 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6262 assert_eq!(
6263 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6264 .await
6265 .unwrap(),
6266 HashMap::from_iter([
6267 (path!("dir/one.rs").to_string(), vec![17..29]),
6268 (path!("dir/three.rs").to_string(), vec![3..15]),
6269 ])
6270 );
6271
6272 let unicode_case_insensitive_query = SearchQuery::text(
6273 "привет",
6274 false,
6275 false,
6276 false,
6277 Default::default(),
6278 Default::default(),
6279 false,
6280 None,
6281 );
6282 assert_matches!(
6283 unicode_case_insensitive_query,
6284 Ok(SearchQuery::Regex { .. })
6285 );
6286 assert_eq!(
6287 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6288 .await
6289 .unwrap(),
6290 HashMap::from_iter([
6291 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6292 (path!("dir/two.rs").to_string(), vec![3..15]),
6293 (path!("dir/three.rs").to_string(), vec![3..15]),
6294 ])
6295 );
6296
6297 assert_eq!(
6298 search(
6299 &project,
6300 SearchQuery::text(
6301 "привет.",
6302 false,
6303 false,
6304 false,
6305 Default::default(),
6306 Default::default(),
6307 false,
6308 None,
6309 )
6310 .unwrap(),
6311 cx
6312 )
6313 .await
6314 .unwrap(),
6315 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6316 );
6317}
6318
6319#[gpui::test]
6320async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6321 init_test(cx);
6322
6323 let fs = FakeFs::new(cx.executor());
6324 fs.insert_tree(
6325 "/one/two",
6326 json!({
6327 "three": {
6328 "a.txt": "",
6329 "four": {}
6330 },
6331 "c.rs": ""
6332 }),
6333 )
6334 .await;
6335
6336 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6337 project
6338 .update(cx, |project, cx| {
6339 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6340 project.create_entry((id, rel_path("b..")), true, cx)
6341 })
6342 .await
6343 .unwrap()
6344 .into_included()
6345 .unwrap();
6346
6347 assert_eq!(
6348 fs.paths(true),
6349 vec![
6350 PathBuf::from(path!("/")),
6351 PathBuf::from(path!("/one")),
6352 PathBuf::from(path!("/one/two")),
6353 PathBuf::from(path!("/one/two/c.rs")),
6354 PathBuf::from(path!("/one/two/three")),
6355 PathBuf::from(path!("/one/two/three/a.txt")),
6356 PathBuf::from(path!("/one/two/three/b..")),
6357 PathBuf::from(path!("/one/two/three/four")),
6358 ]
6359 );
6360}
6361
6362#[gpui::test]
6363async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6364 init_test(cx);
6365
6366 let fs = FakeFs::new(cx.executor());
6367 fs.insert_tree(
6368 path!("/dir"),
6369 json!({
6370 "a.tsx": "a",
6371 }),
6372 )
6373 .await;
6374
6375 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6376
6377 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6378 language_registry.add(tsx_lang());
6379 let language_server_names = [
6380 "TypeScriptServer",
6381 "TailwindServer",
6382 "ESLintServer",
6383 "NoHoverCapabilitiesServer",
6384 ];
6385 let mut language_servers = [
6386 language_registry.register_fake_lsp(
6387 "tsx",
6388 FakeLspAdapter {
6389 name: language_server_names[0],
6390 capabilities: lsp::ServerCapabilities {
6391 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6392 ..lsp::ServerCapabilities::default()
6393 },
6394 ..FakeLspAdapter::default()
6395 },
6396 ),
6397 language_registry.register_fake_lsp(
6398 "tsx",
6399 FakeLspAdapter {
6400 name: language_server_names[1],
6401 capabilities: lsp::ServerCapabilities {
6402 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6403 ..lsp::ServerCapabilities::default()
6404 },
6405 ..FakeLspAdapter::default()
6406 },
6407 ),
6408 language_registry.register_fake_lsp(
6409 "tsx",
6410 FakeLspAdapter {
6411 name: language_server_names[2],
6412 capabilities: lsp::ServerCapabilities {
6413 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6414 ..lsp::ServerCapabilities::default()
6415 },
6416 ..FakeLspAdapter::default()
6417 },
6418 ),
6419 language_registry.register_fake_lsp(
6420 "tsx",
6421 FakeLspAdapter {
6422 name: language_server_names[3],
6423 capabilities: lsp::ServerCapabilities {
6424 hover_provider: None,
6425 ..lsp::ServerCapabilities::default()
6426 },
6427 ..FakeLspAdapter::default()
6428 },
6429 ),
6430 ];
6431
6432 let (buffer, _handle) = project
6433 .update(cx, |p, cx| {
6434 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6435 })
6436 .await
6437 .unwrap();
6438 cx.executor().run_until_parked();
6439
6440 let mut servers_with_hover_requests = HashMap::default();
6441 for i in 0..language_server_names.len() {
6442 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6443 panic!(
6444 "Failed to get language server #{i} with name {}",
6445 &language_server_names[i]
6446 )
6447 });
6448 let new_server_name = new_server.server.name();
6449 assert!(
6450 !servers_with_hover_requests.contains_key(&new_server_name),
6451 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6452 );
6453 match new_server_name.as_ref() {
6454 "TailwindServer" | "TypeScriptServer" => {
6455 servers_with_hover_requests.insert(
6456 new_server_name.clone(),
6457 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6458 move |_, _| {
6459 let name = new_server_name.clone();
6460 async move {
6461 Ok(Some(lsp::Hover {
6462 contents: lsp::HoverContents::Scalar(
6463 lsp::MarkedString::String(format!("{name} hover")),
6464 ),
6465 range: None,
6466 }))
6467 }
6468 },
6469 ),
6470 );
6471 }
6472 "ESLintServer" => {
6473 servers_with_hover_requests.insert(
6474 new_server_name,
6475 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6476 |_, _| async move { Ok(None) },
6477 ),
6478 );
6479 }
6480 "NoHoverCapabilitiesServer" => {
6481 let _never_handled = new_server
6482 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6483 panic!(
6484 "Should not call for hovers server with no corresponding capabilities"
6485 )
6486 });
6487 }
6488 unexpected => panic!("Unexpected server name: {unexpected}"),
6489 }
6490 }
6491
6492 let hover_task = project.update(cx, |project, cx| {
6493 project.hover(&buffer, Point::new(0, 0), cx)
6494 });
6495 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6496 |mut hover_request| async move {
6497 hover_request
6498 .next()
6499 .await
6500 .expect("All hover requests should have been triggered")
6501 },
6502 ))
6503 .await;
6504 assert_eq!(
6505 vec!["TailwindServer hover", "TypeScriptServer hover"],
6506 hover_task
6507 .await
6508 .into_iter()
6509 .flatten()
6510 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6511 .sorted()
6512 .collect::<Vec<_>>(),
6513 "Should receive hover responses from all related servers with hover capabilities"
6514 );
6515}
6516
6517#[gpui::test]
6518async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6519 init_test(cx);
6520
6521 let fs = FakeFs::new(cx.executor());
6522 fs.insert_tree(
6523 path!("/dir"),
6524 json!({
6525 "a.ts": "a",
6526 }),
6527 )
6528 .await;
6529
6530 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6531
6532 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6533 language_registry.add(typescript_lang());
6534 let mut fake_language_servers = language_registry.register_fake_lsp(
6535 "TypeScript",
6536 FakeLspAdapter {
6537 capabilities: lsp::ServerCapabilities {
6538 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6539 ..lsp::ServerCapabilities::default()
6540 },
6541 ..FakeLspAdapter::default()
6542 },
6543 );
6544
6545 let (buffer, _handle) = project
6546 .update(cx, |p, cx| {
6547 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6548 })
6549 .await
6550 .unwrap();
6551 cx.executor().run_until_parked();
6552
6553 let fake_server = fake_language_servers
6554 .next()
6555 .await
6556 .expect("failed to get the language server");
6557
6558 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6559 move |_, _| async move {
6560 Ok(Some(lsp::Hover {
6561 contents: lsp::HoverContents::Array(vec![
6562 lsp::MarkedString::String("".to_string()),
6563 lsp::MarkedString::String(" ".to_string()),
6564 lsp::MarkedString::String("\n\n\n".to_string()),
6565 ]),
6566 range: None,
6567 }))
6568 },
6569 );
6570
6571 let hover_task = project.update(cx, |project, cx| {
6572 project.hover(&buffer, Point::new(0, 0), cx)
6573 });
6574 let () = request_handled
6575 .next()
6576 .await
6577 .expect("All hover requests should have been triggered");
6578 assert_eq!(
6579 Vec::<String>::new(),
6580 hover_task
6581 .await
6582 .into_iter()
6583 .flatten()
6584 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6585 .sorted()
6586 .collect::<Vec<_>>(),
6587 "Empty hover parts should be ignored"
6588 );
6589}
6590
6591#[gpui::test]
6592async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6593 init_test(cx);
6594
6595 let fs = FakeFs::new(cx.executor());
6596 fs.insert_tree(
6597 path!("/dir"),
6598 json!({
6599 "a.ts": "a",
6600 }),
6601 )
6602 .await;
6603
6604 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6605
6606 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6607 language_registry.add(typescript_lang());
6608 let mut fake_language_servers = language_registry.register_fake_lsp(
6609 "TypeScript",
6610 FakeLspAdapter {
6611 capabilities: lsp::ServerCapabilities {
6612 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6613 ..lsp::ServerCapabilities::default()
6614 },
6615 ..FakeLspAdapter::default()
6616 },
6617 );
6618
6619 let (buffer, _handle) = project
6620 .update(cx, |p, cx| {
6621 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6622 })
6623 .await
6624 .unwrap();
6625 cx.executor().run_until_parked();
6626
6627 let fake_server = fake_language_servers
6628 .next()
6629 .await
6630 .expect("failed to get the language server");
6631
6632 let mut request_handled = fake_server
6633 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6634 Ok(Some(vec![
6635 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6636 title: "organize imports".to_string(),
6637 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6638 ..lsp::CodeAction::default()
6639 }),
6640 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6641 title: "fix code".to_string(),
6642 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6643 ..lsp::CodeAction::default()
6644 }),
6645 ]))
6646 });
6647
6648 let code_actions_task = project.update(cx, |project, cx| {
6649 project.code_actions(
6650 &buffer,
6651 0..buffer.read(cx).len(),
6652 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6653 cx,
6654 )
6655 });
6656
6657 let () = request_handled
6658 .next()
6659 .await
6660 .expect("The code action request should have been triggered");
6661
6662 let code_actions = code_actions_task.await.unwrap().unwrap();
6663 assert_eq!(code_actions.len(), 1);
6664 assert_eq!(
6665 code_actions[0].lsp_action.action_kind(),
6666 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6667 );
6668}
6669
6670#[gpui::test]
6671async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6672 init_test(cx);
6673
6674 let fs = FakeFs::new(cx.executor());
6675 fs.insert_tree(
6676 path!("/dir"),
6677 json!({
6678 "a.tsx": "a",
6679 }),
6680 )
6681 .await;
6682
6683 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6684
6685 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6686 language_registry.add(tsx_lang());
6687 let language_server_names = [
6688 "TypeScriptServer",
6689 "TailwindServer",
6690 "ESLintServer",
6691 "NoActionsCapabilitiesServer",
6692 ];
6693
6694 let mut language_server_rxs = [
6695 language_registry.register_fake_lsp(
6696 "tsx",
6697 FakeLspAdapter {
6698 name: language_server_names[0],
6699 capabilities: lsp::ServerCapabilities {
6700 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6701 ..lsp::ServerCapabilities::default()
6702 },
6703 ..FakeLspAdapter::default()
6704 },
6705 ),
6706 language_registry.register_fake_lsp(
6707 "tsx",
6708 FakeLspAdapter {
6709 name: language_server_names[1],
6710 capabilities: lsp::ServerCapabilities {
6711 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6712 ..lsp::ServerCapabilities::default()
6713 },
6714 ..FakeLspAdapter::default()
6715 },
6716 ),
6717 language_registry.register_fake_lsp(
6718 "tsx",
6719 FakeLspAdapter {
6720 name: language_server_names[2],
6721 capabilities: lsp::ServerCapabilities {
6722 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6723 ..lsp::ServerCapabilities::default()
6724 },
6725 ..FakeLspAdapter::default()
6726 },
6727 ),
6728 language_registry.register_fake_lsp(
6729 "tsx",
6730 FakeLspAdapter {
6731 name: language_server_names[3],
6732 capabilities: lsp::ServerCapabilities {
6733 code_action_provider: None,
6734 ..lsp::ServerCapabilities::default()
6735 },
6736 ..FakeLspAdapter::default()
6737 },
6738 ),
6739 ];
6740
6741 let (buffer, _handle) = project
6742 .update(cx, |p, cx| {
6743 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6744 })
6745 .await
6746 .unwrap();
6747 cx.executor().run_until_parked();
6748
6749 let mut servers_with_actions_requests = HashMap::default();
6750 for i in 0..language_server_names.len() {
6751 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6752 panic!(
6753 "Failed to get language server #{i} with name {}",
6754 &language_server_names[i]
6755 )
6756 });
6757 let new_server_name = new_server.server.name();
6758
6759 assert!(
6760 !servers_with_actions_requests.contains_key(&new_server_name),
6761 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6762 );
6763 match new_server_name.0.as_ref() {
6764 "TailwindServer" | "TypeScriptServer" => {
6765 servers_with_actions_requests.insert(
6766 new_server_name.clone(),
6767 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6768 move |_, _| {
6769 let name = new_server_name.clone();
6770 async move {
6771 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6772 lsp::CodeAction {
6773 title: format!("{name} code action"),
6774 ..lsp::CodeAction::default()
6775 },
6776 )]))
6777 }
6778 },
6779 ),
6780 );
6781 }
6782 "ESLintServer" => {
6783 servers_with_actions_requests.insert(
6784 new_server_name,
6785 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6786 |_, _| async move { Ok(None) },
6787 ),
6788 );
6789 }
6790 "NoActionsCapabilitiesServer" => {
6791 let _never_handled = new_server
6792 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6793 panic!(
6794 "Should not call for code actions server with no corresponding capabilities"
6795 )
6796 });
6797 }
6798 unexpected => panic!("Unexpected server name: {unexpected}"),
6799 }
6800 }
6801
6802 let code_actions_task = project.update(cx, |project, cx| {
6803 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6804 });
6805
6806 // cx.run_until_parked();
6807 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6808 |mut code_actions_request| async move {
6809 code_actions_request
6810 .next()
6811 .await
6812 .expect("All code actions requests should have been triggered")
6813 },
6814 ))
6815 .await;
6816 assert_eq!(
6817 vec!["TailwindServer code action", "TypeScriptServer code action"],
6818 code_actions_task
6819 .await
6820 .unwrap()
6821 .unwrap()
6822 .into_iter()
6823 .map(|code_action| code_action.lsp_action.title().to_owned())
6824 .sorted()
6825 .collect::<Vec<_>>(),
6826 "Should receive code actions responses from all related servers with hover capabilities"
6827 );
6828}
6829
6830#[gpui::test]
6831async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6832 init_test(cx);
6833
6834 let fs = FakeFs::new(cx.executor());
6835 fs.insert_tree(
6836 "/dir",
6837 json!({
6838 "a.rs": "let a = 1;",
6839 "b.rs": "let b = 2;",
6840 "c.rs": "let c = 2;",
6841 }),
6842 )
6843 .await;
6844
6845 let project = Project::test(
6846 fs,
6847 [
6848 "/dir/a.rs".as_ref(),
6849 "/dir/b.rs".as_ref(),
6850 "/dir/c.rs".as_ref(),
6851 ],
6852 cx,
6853 )
6854 .await;
6855
6856 // check the initial state and get the worktrees
6857 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6858 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6859 assert_eq!(worktrees.len(), 3);
6860
6861 let worktree_a = worktrees[0].read(cx);
6862 let worktree_b = worktrees[1].read(cx);
6863 let worktree_c = worktrees[2].read(cx);
6864
6865 // check they start in the right order
6866 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6867 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6868 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6869
6870 (
6871 worktrees[0].clone(),
6872 worktrees[1].clone(),
6873 worktrees[2].clone(),
6874 )
6875 });
6876
6877 // move first worktree to after the second
6878 // [a, b, c] -> [b, a, c]
6879 project
6880 .update(cx, |project, cx| {
6881 let first = worktree_a.read(cx);
6882 let second = worktree_b.read(cx);
6883 project.move_worktree(first.id(), second.id(), cx)
6884 })
6885 .expect("moving first after second");
6886
6887 // check the state after moving
6888 project.update(cx, |project, cx| {
6889 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6890 assert_eq!(worktrees.len(), 3);
6891
6892 let first = worktrees[0].read(cx);
6893 let second = worktrees[1].read(cx);
6894 let third = worktrees[2].read(cx);
6895
6896 // check they are now in the right order
6897 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6898 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6899 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6900 });
6901
6902 // move the second worktree to before the first
6903 // [b, a, c] -> [a, b, c]
6904 project
6905 .update(cx, |project, cx| {
6906 let second = worktree_a.read(cx);
6907 let first = worktree_b.read(cx);
6908 project.move_worktree(first.id(), second.id(), cx)
6909 })
6910 .expect("moving second before first");
6911
6912 // check the state after moving
6913 project.update(cx, |project, cx| {
6914 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6915 assert_eq!(worktrees.len(), 3);
6916
6917 let first = worktrees[0].read(cx);
6918 let second = worktrees[1].read(cx);
6919 let third = worktrees[2].read(cx);
6920
6921 // check they are now in the right order
6922 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6923 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6924 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6925 });
6926
6927 // move the second worktree to after the third
6928 // [a, b, c] -> [a, c, b]
6929 project
6930 .update(cx, |project, cx| {
6931 let second = worktree_b.read(cx);
6932 let third = worktree_c.read(cx);
6933 project.move_worktree(second.id(), third.id(), cx)
6934 })
6935 .expect("moving second after third");
6936
6937 // check the state after moving
6938 project.update(cx, |project, cx| {
6939 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6940 assert_eq!(worktrees.len(), 3);
6941
6942 let first = worktrees[0].read(cx);
6943 let second = worktrees[1].read(cx);
6944 let third = worktrees[2].read(cx);
6945
6946 // check they are now in the right order
6947 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6948 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6949 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6950 });
6951
6952 // move the third worktree to before the second
6953 // [a, c, b] -> [a, b, c]
6954 project
6955 .update(cx, |project, cx| {
6956 let third = worktree_c.read(cx);
6957 let second = worktree_b.read(cx);
6958 project.move_worktree(third.id(), second.id(), cx)
6959 })
6960 .expect("moving third before second");
6961
6962 // check the state after moving
6963 project.update(cx, |project, cx| {
6964 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6965 assert_eq!(worktrees.len(), 3);
6966
6967 let first = worktrees[0].read(cx);
6968 let second = worktrees[1].read(cx);
6969 let third = worktrees[2].read(cx);
6970
6971 // check they are now in the right order
6972 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6973 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6974 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6975 });
6976
6977 // move the first worktree to after the third
6978 // [a, b, c] -> [b, c, a]
6979 project
6980 .update(cx, |project, cx| {
6981 let first = worktree_a.read(cx);
6982 let third = worktree_c.read(cx);
6983 project.move_worktree(first.id(), third.id(), cx)
6984 })
6985 .expect("moving first after third");
6986
6987 // check the state after moving
6988 project.update(cx, |project, cx| {
6989 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6990 assert_eq!(worktrees.len(), 3);
6991
6992 let first = worktrees[0].read(cx);
6993 let second = worktrees[1].read(cx);
6994 let third = worktrees[2].read(cx);
6995
6996 // check they are now in the right order
6997 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6998 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6999 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7000 });
7001
7002 // move the third worktree to before the first
7003 // [b, c, a] -> [a, b, c]
7004 project
7005 .update(cx, |project, cx| {
7006 let third = worktree_a.read(cx);
7007 let first = worktree_b.read(cx);
7008 project.move_worktree(third.id(), first.id(), cx)
7009 })
7010 .expect("moving third before first");
7011
7012 // check the state after moving
7013 project.update(cx, |project, cx| {
7014 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7015 assert_eq!(worktrees.len(), 3);
7016
7017 let first = worktrees[0].read(cx);
7018 let second = worktrees[1].read(cx);
7019 let third = worktrees[2].read(cx);
7020
7021 // check they are now in the right order
7022 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7023 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7024 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7025 });
7026}
7027
7028#[gpui::test]
7029async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7030 init_test(cx);
7031
7032 let staged_contents = r#"
7033 fn main() {
7034 println!("hello world");
7035 }
7036 "#
7037 .unindent();
7038 let file_contents = r#"
7039 // print goodbye
7040 fn main() {
7041 println!("goodbye world");
7042 }
7043 "#
7044 .unindent();
7045
7046 let fs = FakeFs::new(cx.background_executor.clone());
7047 fs.insert_tree(
7048 "/dir",
7049 json!({
7050 ".git": {},
7051 "src": {
7052 "main.rs": file_contents,
7053 }
7054 }),
7055 )
7056 .await;
7057
7058 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7059
7060 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7061
7062 let buffer = project
7063 .update(cx, |project, cx| {
7064 project.open_local_buffer("/dir/src/main.rs", cx)
7065 })
7066 .await
7067 .unwrap();
7068 let unstaged_diff = project
7069 .update(cx, |project, cx| {
7070 project.open_unstaged_diff(buffer.clone(), cx)
7071 })
7072 .await
7073 .unwrap();
7074
7075 cx.run_until_parked();
7076 unstaged_diff.update(cx, |unstaged_diff, cx| {
7077 let snapshot = buffer.read(cx).snapshot();
7078 assert_hunks(
7079 unstaged_diff.hunks(&snapshot, cx),
7080 &snapshot,
7081 &unstaged_diff.base_text_string().unwrap(),
7082 &[
7083 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7084 (
7085 2..3,
7086 " println!(\"hello world\");\n",
7087 " println!(\"goodbye world\");\n",
7088 DiffHunkStatus::modified_none(),
7089 ),
7090 ],
7091 );
7092 });
7093
7094 let staged_contents = r#"
7095 // print goodbye
7096 fn main() {
7097 }
7098 "#
7099 .unindent();
7100
7101 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7102
7103 cx.run_until_parked();
7104 unstaged_diff.update(cx, |unstaged_diff, cx| {
7105 let snapshot = buffer.read(cx).snapshot();
7106 assert_hunks(
7107 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7108 &snapshot,
7109 &unstaged_diff.base_text().text(),
7110 &[(
7111 2..3,
7112 "",
7113 " println!(\"goodbye world\");\n",
7114 DiffHunkStatus::added_none(),
7115 )],
7116 );
7117 });
7118}
7119
7120#[gpui::test]
7121async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7122 init_test(cx);
7123
7124 let committed_contents = r#"
7125 fn main() {
7126 println!("hello world");
7127 }
7128 "#
7129 .unindent();
7130 let staged_contents = r#"
7131 fn main() {
7132 println!("goodbye world");
7133 }
7134 "#
7135 .unindent();
7136 let file_contents = r#"
7137 // print goodbye
7138 fn main() {
7139 println!("goodbye world");
7140 }
7141 "#
7142 .unindent();
7143
7144 let fs = FakeFs::new(cx.background_executor.clone());
7145 fs.insert_tree(
7146 "/dir",
7147 json!({
7148 ".git": {},
7149 "src": {
7150 "modification.rs": file_contents,
7151 }
7152 }),
7153 )
7154 .await;
7155
7156 fs.set_head_for_repo(
7157 Path::new("/dir/.git"),
7158 &[
7159 ("src/modification.rs", committed_contents),
7160 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7161 ],
7162 "deadbeef",
7163 );
7164 fs.set_index_for_repo(
7165 Path::new("/dir/.git"),
7166 &[
7167 ("src/modification.rs", staged_contents),
7168 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7169 ],
7170 );
7171
7172 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7173 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7174 let language = rust_lang();
7175 language_registry.add(language.clone());
7176
7177 let buffer_1 = project
7178 .update(cx, |project, cx| {
7179 project.open_local_buffer("/dir/src/modification.rs", cx)
7180 })
7181 .await
7182 .unwrap();
7183 let diff_1 = project
7184 .update(cx, |project, cx| {
7185 project.open_uncommitted_diff(buffer_1.clone(), cx)
7186 })
7187 .await
7188 .unwrap();
7189 diff_1.read_with(cx, |diff, _| {
7190 assert_eq!(diff.base_text().language().cloned(), Some(language))
7191 });
7192 cx.run_until_parked();
7193 diff_1.update(cx, |diff, cx| {
7194 let snapshot = buffer_1.read(cx).snapshot();
7195 assert_hunks(
7196 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7197 &snapshot,
7198 &diff.base_text_string().unwrap(),
7199 &[
7200 (
7201 0..1,
7202 "",
7203 "// print goodbye\n",
7204 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7205 ),
7206 (
7207 2..3,
7208 " println!(\"hello world\");\n",
7209 " println!(\"goodbye world\");\n",
7210 DiffHunkStatus::modified_none(),
7211 ),
7212 ],
7213 );
7214 });
7215
7216 // Reset HEAD to a version that differs from both the buffer and the index.
7217 let committed_contents = r#"
7218 // print goodbye
7219 fn main() {
7220 }
7221 "#
7222 .unindent();
7223 fs.set_head_for_repo(
7224 Path::new("/dir/.git"),
7225 &[
7226 ("src/modification.rs", committed_contents.clone()),
7227 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7228 ],
7229 "deadbeef",
7230 );
7231
7232 // Buffer now has an unstaged hunk.
7233 cx.run_until_parked();
7234 diff_1.update(cx, |diff, cx| {
7235 let snapshot = buffer_1.read(cx).snapshot();
7236 assert_hunks(
7237 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7238 &snapshot,
7239 &diff.base_text().text(),
7240 &[(
7241 2..3,
7242 "",
7243 " println!(\"goodbye world\");\n",
7244 DiffHunkStatus::added_none(),
7245 )],
7246 );
7247 });
7248
7249 // Open a buffer for a file that's been deleted.
7250 let buffer_2 = project
7251 .update(cx, |project, cx| {
7252 project.open_local_buffer("/dir/src/deletion.rs", cx)
7253 })
7254 .await
7255 .unwrap();
7256 let diff_2 = project
7257 .update(cx, |project, cx| {
7258 project.open_uncommitted_diff(buffer_2.clone(), cx)
7259 })
7260 .await
7261 .unwrap();
7262 cx.run_until_parked();
7263 diff_2.update(cx, |diff, cx| {
7264 let snapshot = buffer_2.read(cx).snapshot();
7265 assert_hunks(
7266 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7267 &snapshot,
7268 &diff.base_text_string().unwrap(),
7269 &[(
7270 0..0,
7271 "// the-deleted-contents\n",
7272 "",
7273 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7274 )],
7275 );
7276 });
7277
7278 // Stage the deletion of this file
7279 fs.set_index_for_repo(
7280 Path::new("/dir/.git"),
7281 &[("src/modification.rs", committed_contents.clone())],
7282 );
7283 cx.run_until_parked();
7284 diff_2.update(cx, |diff, cx| {
7285 let snapshot = buffer_2.read(cx).snapshot();
7286 assert_hunks(
7287 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7288 &snapshot,
7289 &diff.base_text_string().unwrap(),
7290 &[(
7291 0..0,
7292 "// the-deleted-contents\n",
7293 "",
7294 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7295 )],
7296 );
7297 });
7298}
7299
7300#[gpui::test]
7301async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7302 use DiffHunkSecondaryStatus::*;
7303 init_test(cx);
7304
7305 let committed_contents = r#"
7306 zero
7307 one
7308 two
7309 three
7310 four
7311 five
7312 "#
7313 .unindent();
7314 let file_contents = r#"
7315 one
7316 TWO
7317 three
7318 FOUR
7319 five
7320 "#
7321 .unindent();
7322
7323 let fs = FakeFs::new(cx.background_executor.clone());
7324 fs.insert_tree(
7325 "/dir",
7326 json!({
7327 ".git": {},
7328 "file.txt": file_contents.clone()
7329 }),
7330 )
7331 .await;
7332
7333 fs.set_head_and_index_for_repo(
7334 path!("/dir/.git").as_ref(),
7335 &[("file.txt", committed_contents.clone())],
7336 );
7337
7338 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7339
7340 let buffer = project
7341 .update(cx, |project, cx| {
7342 project.open_local_buffer("/dir/file.txt", cx)
7343 })
7344 .await
7345 .unwrap();
7346 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7347 let uncommitted_diff = project
7348 .update(cx, |project, cx| {
7349 project.open_uncommitted_diff(buffer.clone(), cx)
7350 })
7351 .await
7352 .unwrap();
7353 let mut diff_events = cx.events(&uncommitted_diff);
7354
7355 // The hunks are initially unstaged.
7356 uncommitted_diff.read_with(cx, |diff, cx| {
7357 assert_hunks(
7358 diff.hunks(&snapshot, cx),
7359 &snapshot,
7360 &diff.base_text_string().unwrap(),
7361 &[
7362 (
7363 0..0,
7364 "zero\n",
7365 "",
7366 DiffHunkStatus::deleted(HasSecondaryHunk),
7367 ),
7368 (
7369 1..2,
7370 "two\n",
7371 "TWO\n",
7372 DiffHunkStatus::modified(HasSecondaryHunk),
7373 ),
7374 (
7375 3..4,
7376 "four\n",
7377 "FOUR\n",
7378 DiffHunkStatus::modified(HasSecondaryHunk),
7379 ),
7380 ],
7381 );
7382 });
7383
7384 // Stage a hunk. It appears as optimistically staged.
7385 uncommitted_diff.update(cx, |diff, cx| {
7386 let range =
7387 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7388 let hunks = diff
7389 .hunks_intersecting_range(range, &snapshot, cx)
7390 .collect::<Vec<_>>();
7391 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7392
7393 assert_hunks(
7394 diff.hunks(&snapshot, cx),
7395 &snapshot,
7396 &diff.base_text_string().unwrap(),
7397 &[
7398 (
7399 0..0,
7400 "zero\n",
7401 "",
7402 DiffHunkStatus::deleted(HasSecondaryHunk),
7403 ),
7404 (
7405 1..2,
7406 "two\n",
7407 "TWO\n",
7408 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7409 ),
7410 (
7411 3..4,
7412 "four\n",
7413 "FOUR\n",
7414 DiffHunkStatus::modified(HasSecondaryHunk),
7415 ),
7416 ],
7417 );
7418 });
7419
7420 // The diff emits a change event for the range of the staged hunk.
7421 assert!(matches!(
7422 diff_events.next().await.unwrap(),
7423 BufferDiffEvent::HunksStagedOrUnstaged(_)
7424 ));
7425 let event = diff_events.next().await.unwrap();
7426 if let BufferDiffEvent::DiffChanged {
7427 changed_range: Some(changed_range),
7428 } = event
7429 {
7430 let changed_range = changed_range.to_point(&snapshot);
7431 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7432 } else {
7433 panic!("Unexpected event {event:?}");
7434 }
7435
7436 // When the write to the index completes, it appears as staged.
7437 cx.run_until_parked();
7438 uncommitted_diff.update(cx, |diff, cx| {
7439 assert_hunks(
7440 diff.hunks(&snapshot, cx),
7441 &snapshot,
7442 &diff.base_text_string().unwrap(),
7443 &[
7444 (
7445 0..0,
7446 "zero\n",
7447 "",
7448 DiffHunkStatus::deleted(HasSecondaryHunk),
7449 ),
7450 (
7451 1..2,
7452 "two\n",
7453 "TWO\n",
7454 DiffHunkStatus::modified(NoSecondaryHunk),
7455 ),
7456 (
7457 3..4,
7458 "four\n",
7459 "FOUR\n",
7460 DiffHunkStatus::modified(HasSecondaryHunk),
7461 ),
7462 ],
7463 );
7464 });
7465
7466 // The diff emits a change event for the changed index text.
7467 let event = diff_events.next().await.unwrap();
7468 if let BufferDiffEvent::DiffChanged {
7469 changed_range: Some(changed_range),
7470 } = event
7471 {
7472 let changed_range = changed_range.to_point(&snapshot);
7473 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7474 } else {
7475 panic!("Unexpected event {event:?}");
7476 }
7477
7478 // Simulate a problem writing to the git index.
7479 fs.set_error_message_for_index_write(
7480 "/dir/.git".as_ref(),
7481 Some("failed to write git index".into()),
7482 );
7483
7484 // Stage another hunk.
7485 uncommitted_diff.update(cx, |diff, cx| {
7486 let range =
7487 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7488 let hunks = diff
7489 .hunks_intersecting_range(range, &snapshot, cx)
7490 .collect::<Vec<_>>();
7491 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7492
7493 assert_hunks(
7494 diff.hunks(&snapshot, cx),
7495 &snapshot,
7496 &diff.base_text_string().unwrap(),
7497 &[
7498 (
7499 0..0,
7500 "zero\n",
7501 "",
7502 DiffHunkStatus::deleted(HasSecondaryHunk),
7503 ),
7504 (
7505 1..2,
7506 "two\n",
7507 "TWO\n",
7508 DiffHunkStatus::modified(NoSecondaryHunk),
7509 ),
7510 (
7511 3..4,
7512 "four\n",
7513 "FOUR\n",
7514 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7515 ),
7516 ],
7517 );
7518 });
7519 assert!(matches!(
7520 diff_events.next().await.unwrap(),
7521 BufferDiffEvent::HunksStagedOrUnstaged(_)
7522 ));
7523 let event = diff_events.next().await.unwrap();
7524 if let BufferDiffEvent::DiffChanged {
7525 changed_range: Some(changed_range),
7526 } = event
7527 {
7528 let changed_range = changed_range.to_point(&snapshot);
7529 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7530 } else {
7531 panic!("Unexpected event {event:?}");
7532 }
7533
7534 // When the write fails, the hunk returns to being unstaged.
7535 cx.run_until_parked();
7536 uncommitted_diff.update(cx, |diff, cx| {
7537 assert_hunks(
7538 diff.hunks(&snapshot, cx),
7539 &snapshot,
7540 &diff.base_text_string().unwrap(),
7541 &[
7542 (
7543 0..0,
7544 "zero\n",
7545 "",
7546 DiffHunkStatus::deleted(HasSecondaryHunk),
7547 ),
7548 (
7549 1..2,
7550 "two\n",
7551 "TWO\n",
7552 DiffHunkStatus::modified(NoSecondaryHunk),
7553 ),
7554 (
7555 3..4,
7556 "four\n",
7557 "FOUR\n",
7558 DiffHunkStatus::modified(HasSecondaryHunk),
7559 ),
7560 ],
7561 );
7562 });
7563
7564 let event = diff_events.next().await.unwrap();
7565 if let BufferDiffEvent::DiffChanged {
7566 changed_range: Some(changed_range),
7567 } = event
7568 {
7569 let changed_range = changed_range.to_point(&snapshot);
7570 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7571 } else {
7572 panic!("Unexpected event {event:?}");
7573 }
7574
7575 // Allow writing to the git index to succeed again.
7576 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7577
7578 // Stage two hunks with separate operations.
7579 uncommitted_diff.update(cx, |diff, cx| {
7580 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7581 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7582 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7583 });
7584
7585 // Both staged hunks appear as pending.
7586 uncommitted_diff.update(cx, |diff, cx| {
7587 assert_hunks(
7588 diff.hunks(&snapshot, cx),
7589 &snapshot,
7590 &diff.base_text_string().unwrap(),
7591 &[
7592 (
7593 0..0,
7594 "zero\n",
7595 "",
7596 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7597 ),
7598 (
7599 1..2,
7600 "two\n",
7601 "TWO\n",
7602 DiffHunkStatus::modified(NoSecondaryHunk),
7603 ),
7604 (
7605 3..4,
7606 "four\n",
7607 "FOUR\n",
7608 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7609 ),
7610 ],
7611 );
7612 });
7613
7614 // Both staging operations take effect.
7615 cx.run_until_parked();
7616 uncommitted_diff.update(cx, |diff, cx| {
7617 assert_hunks(
7618 diff.hunks(&snapshot, cx),
7619 &snapshot,
7620 &diff.base_text_string().unwrap(),
7621 &[
7622 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7623 (
7624 1..2,
7625 "two\n",
7626 "TWO\n",
7627 DiffHunkStatus::modified(NoSecondaryHunk),
7628 ),
7629 (
7630 3..4,
7631 "four\n",
7632 "FOUR\n",
7633 DiffHunkStatus::modified(NoSecondaryHunk),
7634 ),
7635 ],
7636 );
7637 });
7638}
7639
7640#[gpui::test(seeds(340, 472))]
7641async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7642 use DiffHunkSecondaryStatus::*;
7643 init_test(cx);
7644
7645 let committed_contents = r#"
7646 zero
7647 one
7648 two
7649 three
7650 four
7651 five
7652 "#
7653 .unindent();
7654 let file_contents = r#"
7655 one
7656 TWO
7657 three
7658 FOUR
7659 five
7660 "#
7661 .unindent();
7662
7663 let fs = FakeFs::new(cx.background_executor.clone());
7664 fs.insert_tree(
7665 "/dir",
7666 json!({
7667 ".git": {},
7668 "file.txt": file_contents.clone()
7669 }),
7670 )
7671 .await;
7672
7673 fs.set_head_for_repo(
7674 "/dir/.git".as_ref(),
7675 &[("file.txt", committed_contents.clone())],
7676 "deadbeef",
7677 );
7678 fs.set_index_for_repo(
7679 "/dir/.git".as_ref(),
7680 &[("file.txt", committed_contents.clone())],
7681 );
7682
7683 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7684
7685 let buffer = project
7686 .update(cx, |project, cx| {
7687 project.open_local_buffer("/dir/file.txt", cx)
7688 })
7689 .await
7690 .unwrap();
7691 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7692 let uncommitted_diff = project
7693 .update(cx, |project, cx| {
7694 project.open_uncommitted_diff(buffer.clone(), cx)
7695 })
7696 .await
7697 .unwrap();
7698
7699 // The hunks are initially unstaged.
7700 uncommitted_diff.read_with(cx, |diff, cx| {
7701 assert_hunks(
7702 diff.hunks(&snapshot, cx),
7703 &snapshot,
7704 &diff.base_text_string().unwrap(),
7705 &[
7706 (
7707 0..0,
7708 "zero\n",
7709 "",
7710 DiffHunkStatus::deleted(HasSecondaryHunk),
7711 ),
7712 (
7713 1..2,
7714 "two\n",
7715 "TWO\n",
7716 DiffHunkStatus::modified(HasSecondaryHunk),
7717 ),
7718 (
7719 3..4,
7720 "four\n",
7721 "FOUR\n",
7722 DiffHunkStatus::modified(HasSecondaryHunk),
7723 ),
7724 ],
7725 );
7726 });
7727
7728 // Pause IO events
7729 fs.pause_events();
7730
7731 // Stage the first hunk.
7732 uncommitted_diff.update(cx, |diff, cx| {
7733 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7734 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7735 assert_hunks(
7736 diff.hunks(&snapshot, cx),
7737 &snapshot,
7738 &diff.base_text_string().unwrap(),
7739 &[
7740 (
7741 0..0,
7742 "zero\n",
7743 "",
7744 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7745 ),
7746 (
7747 1..2,
7748 "two\n",
7749 "TWO\n",
7750 DiffHunkStatus::modified(HasSecondaryHunk),
7751 ),
7752 (
7753 3..4,
7754 "four\n",
7755 "FOUR\n",
7756 DiffHunkStatus::modified(HasSecondaryHunk),
7757 ),
7758 ],
7759 );
7760 });
7761
7762 // Stage the second hunk *before* receiving the FS event for the first hunk.
7763 cx.run_until_parked();
7764 uncommitted_diff.update(cx, |diff, cx| {
7765 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7766 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7767 assert_hunks(
7768 diff.hunks(&snapshot, cx),
7769 &snapshot,
7770 &diff.base_text_string().unwrap(),
7771 &[
7772 (
7773 0..0,
7774 "zero\n",
7775 "",
7776 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7777 ),
7778 (
7779 1..2,
7780 "two\n",
7781 "TWO\n",
7782 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7783 ),
7784 (
7785 3..4,
7786 "four\n",
7787 "FOUR\n",
7788 DiffHunkStatus::modified(HasSecondaryHunk),
7789 ),
7790 ],
7791 );
7792 });
7793
7794 // Process the FS event for staging the first hunk (second event is still pending).
7795 fs.flush_events(1);
7796 cx.run_until_parked();
7797
7798 // Stage the third hunk before receiving the second FS event.
7799 uncommitted_diff.update(cx, |diff, cx| {
7800 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7801 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7802 });
7803
7804 // Wait for all remaining IO.
7805 cx.run_until_parked();
7806 fs.flush_events(fs.buffered_event_count());
7807
7808 // Now all hunks are staged.
7809 cx.run_until_parked();
7810 uncommitted_diff.update(cx, |diff, cx| {
7811 assert_hunks(
7812 diff.hunks(&snapshot, cx),
7813 &snapshot,
7814 &diff.base_text_string().unwrap(),
7815 &[
7816 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7817 (
7818 1..2,
7819 "two\n",
7820 "TWO\n",
7821 DiffHunkStatus::modified(NoSecondaryHunk),
7822 ),
7823 (
7824 3..4,
7825 "four\n",
7826 "FOUR\n",
7827 DiffHunkStatus::modified(NoSecondaryHunk),
7828 ),
7829 ],
7830 );
7831 });
7832}
7833
7834#[gpui::test(iterations = 25)]
7835async fn test_staging_random_hunks(
7836 mut rng: StdRng,
7837 executor: BackgroundExecutor,
7838 cx: &mut gpui::TestAppContext,
7839) {
7840 let operations = env::var("OPERATIONS")
7841 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7842 .unwrap_or(20);
7843
7844 // Try to induce races between diff recalculation and index writes.
7845 if rng.random_bool(0.5) {
7846 executor.deprioritize(*CALCULATE_DIFF_TASK);
7847 }
7848
7849 use DiffHunkSecondaryStatus::*;
7850 init_test(cx);
7851
7852 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7853 let index_text = committed_text.clone();
7854 let buffer_text = (0..30)
7855 .map(|i| match i % 5 {
7856 0 => format!("line {i} (modified)\n"),
7857 _ => format!("line {i}\n"),
7858 })
7859 .collect::<String>();
7860
7861 let fs = FakeFs::new(cx.background_executor.clone());
7862 fs.insert_tree(
7863 path!("/dir"),
7864 json!({
7865 ".git": {},
7866 "file.txt": buffer_text.clone()
7867 }),
7868 )
7869 .await;
7870 fs.set_head_for_repo(
7871 path!("/dir/.git").as_ref(),
7872 &[("file.txt", committed_text.clone())],
7873 "deadbeef",
7874 );
7875 fs.set_index_for_repo(
7876 path!("/dir/.git").as_ref(),
7877 &[("file.txt", index_text.clone())],
7878 );
7879 let repo = fs
7880 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7881 .unwrap();
7882
7883 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7884 let buffer = project
7885 .update(cx, |project, cx| {
7886 project.open_local_buffer(path!("/dir/file.txt"), cx)
7887 })
7888 .await
7889 .unwrap();
7890 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7891 let uncommitted_diff = project
7892 .update(cx, |project, cx| {
7893 project.open_uncommitted_diff(buffer.clone(), cx)
7894 })
7895 .await
7896 .unwrap();
7897
7898 let mut hunks =
7899 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7900 assert_eq!(hunks.len(), 6);
7901
7902 for _i in 0..operations {
7903 let hunk_ix = rng.random_range(0..hunks.len());
7904 let hunk = &mut hunks[hunk_ix];
7905 let row = hunk.range.start.row;
7906
7907 if hunk.status().has_secondary_hunk() {
7908 log::info!("staging hunk at {row}");
7909 uncommitted_diff.update(cx, |diff, cx| {
7910 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7911 });
7912 hunk.secondary_status = SecondaryHunkRemovalPending;
7913 } else {
7914 log::info!("unstaging hunk at {row}");
7915 uncommitted_diff.update(cx, |diff, cx| {
7916 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7917 });
7918 hunk.secondary_status = SecondaryHunkAdditionPending;
7919 }
7920
7921 for _ in 0..rng.random_range(0..10) {
7922 log::info!("yielding");
7923 cx.executor().simulate_random_delay().await;
7924 }
7925 }
7926
7927 cx.executor().run_until_parked();
7928
7929 for hunk in &mut hunks {
7930 if hunk.secondary_status == SecondaryHunkRemovalPending {
7931 hunk.secondary_status = NoSecondaryHunk;
7932 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7933 hunk.secondary_status = HasSecondaryHunk;
7934 }
7935 }
7936
7937 log::info!(
7938 "index text:\n{}",
7939 repo.load_index_text(rel_path("file.txt").into())
7940 .await
7941 .unwrap()
7942 );
7943
7944 uncommitted_diff.update(cx, |diff, cx| {
7945 let expected_hunks = hunks
7946 .iter()
7947 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7948 .collect::<Vec<_>>();
7949 let actual_hunks = diff
7950 .hunks(&snapshot, cx)
7951 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7952 .collect::<Vec<_>>();
7953 assert_eq!(actual_hunks, expected_hunks);
7954 });
7955}
7956
7957#[gpui::test]
7958async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7959 init_test(cx);
7960
7961 let committed_contents = r#"
7962 fn main() {
7963 println!("hello from HEAD");
7964 }
7965 "#
7966 .unindent();
7967 let file_contents = r#"
7968 fn main() {
7969 println!("hello from the working copy");
7970 }
7971 "#
7972 .unindent();
7973
7974 let fs = FakeFs::new(cx.background_executor.clone());
7975 fs.insert_tree(
7976 "/dir",
7977 json!({
7978 ".git": {},
7979 "src": {
7980 "main.rs": file_contents,
7981 }
7982 }),
7983 )
7984 .await;
7985
7986 fs.set_head_for_repo(
7987 Path::new("/dir/.git"),
7988 &[("src/main.rs", committed_contents.clone())],
7989 "deadbeef",
7990 );
7991 fs.set_index_for_repo(
7992 Path::new("/dir/.git"),
7993 &[("src/main.rs", committed_contents.clone())],
7994 );
7995
7996 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
7997
7998 let buffer = project
7999 .update(cx, |project, cx| {
8000 project.open_local_buffer("/dir/src/main.rs", cx)
8001 })
8002 .await
8003 .unwrap();
8004 let uncommitted_diff = project
8005 .update(cx, |project, cx| {
8006 project.open_uncommitted_diff(buffer.clone(), cx)
8007 })
8008 .await
8009 .unwrap();
8010
8011 cx.run_until_parked();
8012 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8013 let snapshot = buffer.read(cx).snapshot();
8014 assert_hunks(
8015 uncommitted_diff.hunks(&snapshot, cx),
8016 &snapshot,
8017 &uncommitted_diff.base_text_string().unwrap(),
8018 &[(
8019 1..2,
8020 " println!(\"hello from HEAD\");\n",
8021 " println!(\"hello from the working copy\");\n",
8022 DiffHunkStatus {
8023 kind: DiffHunkStatusKind::Modified,
8024 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8025 },
8026 )],
8027 );
8028 });
8029}
8030
8031#[gpui::test]
8032async fn test_repository_and_path_for_project_path(
8033 background_executor: BackgroundExecutor,
8034 cx: &mut gpui::TestAppContext,
8035) {
8036 init_test(cx);
8037 let fs = FakeFs::new(background_executor);
8038 fs.insert_tree(
8039 path!("/root"),
8040 json!({
8041 "c.txt": "",
8042 "dir1": {
8043 ".git": {},
8044 "deps": {
8045 "dep1": {
8046 ".git": {},
8047 "src": {
8048 "a.txt": ""
8049 }
8050 }
8051 },
8052 "src": {
8053 "b.txt": ""
8054 }
8055 },
8056 }),
8057 )
8058 .await;
8059
8060 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8061 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8062 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8063 project
8064 .update(cx, |project, cx| project.git_scans_complete(cx))
8065 .await;
8066 cx.run_until_parked();
8067
8068 project.read_with(cx, |project, cx| {
8069 let git_store = project.git_store().read(cx);
8070 let pairs = [
8071 ("c.txt", None),
8072 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8073 (
8074 "dir1/deps/dep1/src/a.txt",
8075 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8076 ),
8077 ];
8078 let expected = pairs
8079 .iter()
8080 .map(|(path, result)| {
8081 (
8082 path,
8083 result.map(|(repo, repo_path)| {
8084 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8085 }),
8086 )
8087 })
8088 .collect::<Vec<_>>();
8089 let actual = pairs
8090 .iter()
8091 .map(|(path, _)| {
8092 let project_path = (tree_id, rel_path(path)).into();
8093 let result = maybe!({
8094 let (repo, repo_path) =
8095 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8096 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8097 });
8098 (path, result)
8099 })
8100 .collect::<Vec<_>>();
8101 pretty_assertions::assert_eq!(expected, actual);
8102 });
8103
8104 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8105 .await
8106 .unwrap();
8107 cx.run_until_parked();
8108
8109 project.read_with(cx, |project, cx| {
8110 let git_store = project.git_store().read(cx);
8111 assert_eq!(
8112 git_store.repository_and_path_for_project_path(
8113 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8114 cx
8115 ),
8116 None
8117 );
8118 });
8119}
8120
8121#[gpui::test]
8122async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8123 init_test(cx);
8124 let fs = FakeFs::new(cx.background_executor.clone());
8125 let home = paths::home_dir();
8126 fs.insert_tree(
8127 home,
8128 json!({
8129 ".git": {},
8130 "project": {
8131 "a.txt": "A"
8132 },
8133 }),
8134 )
8135 .await;
8136
8137 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8138 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8139 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8140
8141 project
8142 .update(cx, |project, cx| project.git_scans_complete(cx))
8143 .await;
8144 tree.flush_fs_events(cx).await;
8145
8146 project.read_with(cx, |project, cx| {
8147 let containing = project
8148 .git_store()
8149 .read(cx)
8150 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8151 assert!(containing.is_none());
8152 });
8153
8154 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8155 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8156 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8157 project
8158 .update(cx, |project, cx| project.git_scans_complete(cx))
8159 .await;
8160 tree.flush_fs_events(cx).await;
8161
8162 project.read_with(cx, |project, cx| {
8163 let containing = project
8164 .git_store()
8165 .read(cx)
8166 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8167 assert_eq!(
8168 containing
8169 .unwrap()
8170 .0
8171 .read(cx)
8172 .work_directory_abs_path
8173 .as_ref(),
8174 home,
8175 );
8176 });
8177}
8178
8179#[gpui::test]
8180async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8181 init_test(cx);
8182 cx.executor().allow_parking();
8183
8184 let root = TempTree::new(json!({
8185 "project": {
8186 "a.txt": "a", // Modified
8187 "b.txt": "bb", // Added
8188 "c.txt": "ccc", // Unchanged
8189 "d.txt": "dddd", // Deleted
8190 },
8191 }));
8192
8193 // Set up git repository before creating the project.
8194 let work_dir = root.path().join("project");
8195 let repo = git_init(work_dir.as_path());
8196 git_add("a.txt", &repo);
8197 git_add("c.txt", &repo);
8198 git_add("d.txt", &repo);
8199 git_commit("Initial commit", &repo);
8200 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8201 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8202
8203 let project = Project::test(
8204 Arc::new(RealFs::new(None, cx.executor())),
8205 [root.path()],
8206 cx,
8207 )
8208 .await;
8209
8210 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8211 tree.flush_fs_events(cx).await;
8212 project
8213 .update(cx, |project, cx| project.git_scans_complete(cx))
8214 .await;
8215 cx.executor().run_until_parked();
8216
8217 let repository = project.read_with(cx, |project, cx| {
8218 project.repositories(cx).values().next().unwrap().clone()
8219 });
8220
8221 // Check that the right git state is observed on startup
8222 repository.read_with(cx, |repository, _| {
8223 let entries = repository.cached_status().collect::<Vec<_>>();
8224 assert_eq!(
8225 entries,
8226 [
8227 StatusEntry {
8228 repo_path: repo_path("a.txt"),
8229 status: StatusCode::Modified.worktree(),
8230 },
8231 StatusEntry {
8232 repo_path: repo_path("b.txt"),
8233 status: FileStatus::Untracked,
8234 },
8235 StatusEntry {
8236 repo_path: repo_path("d.txt"),
8237 status: StatusCode::Deleted.worktree(),
8238 },
8239 ]
8240 );
8241 });
8242
8243 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8244
8245 tree.flush_fs_events(cx).await;
8246 project
8247 .update(cx, |project, cx| project.git_scans_complete(cx))
8248 .await;
8249 cx.executor().run_until_parked();
8250
8251 repository.read_with(cx, |repository, _| {
8252 let entries = repository.cached_status().collect::<Vec<_>>();
8253 assert_eq!(
8254 entries,
8255 [
8256 StatusEntry {
8257 repo_path: repo_path("a.txt"),
8258 status: StatusCode::Modified.worktree(),
8259 },
8260 StatusEntry {
8261 repo_path: repo_path("b.txt"),
8262 status: FileStatus::Untracked,
8263 },
8264 StatusEntry {
8265 repo_path: repo_path("c.txt"),
8266 status: StatusCode::Modified.worktree(),
8267 },
8268 StatusEntry {
8269 repo_path: repo_path("d.txt"),
8270 status: StatusCode::Deleted.worktree(),
8271 },
8272 ]
8273 );
8274 });
8275
8276 git_add("a.txt", &repo);
8277 git_add("c.txt", &repo);
8278 git_remove_index(Path::new("d.txt"), &repo);
8279 git_commit("Another commit", &repo);
8280 tree.flush_fs_events(cx).await;
8281 project
8282 .update(cx, |project, cx| project.git_scans_complete(cx))
8283 .await;
8284 cx.executor().run_until_parked();
8285
8286 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8287 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8288 tree.flush_fs_events(cx).await;
8289 project
8290 .update(cx, |project, cx| project.git_scans_complete(cx))
8291 .await;
8292 cx.executor().run_until_parked();
8293
8294 repository.read_with(cx, |repository, _cx| {
8295 let entries = repository.cached_status().collect::<Vec<_>>();
8296
8297 // Deleting an untracked entry, b.txt, should leave no status
8298 // a.txt was tracked, and so should have a status
8299 assert_eq!(
8300 entries,
8301 [StatusEntry {
8302 repo_path: repo_path("a.txt"),
8303 status: StatusCode::Deleted.worktree(),
8304 }]
8305 );
8306 });
8307}
8308
8309#[gpui::test]
8310async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8311 init_test(cx);
8312 cx.executor().allow_parking();
8313
8314 let root = TempTree::new(json!({
8315 "project": {
8316 "sub": {},
8317 "a.txt": "",
8318 },
8319 }));
8320
8321 let work_dir = root.path().join("project");
8322 let repo = git_init(work_dir.as_path());
8323 // a.txt exists in HEAD and the working copy but is deleted in the index.
8324 git_add("a.txt", &repo);
8325 git_commit("Initial commit", &repo);
8326 git_remove_index("a.txt".as_ref(), &repo);
8327 // `sub` is a nested git repository.
8328 let _sub = git_init(&work_dir.join("sub"));
8329
8330 let project = Project::test(
8331 Arc::new(RealFs::new(None, cx.executor())),
8332 [root.path()],
8333 cx,
8334 )
8335 .await;
8336
8337 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8338 tree.flush_fs_events(cx).await;
8339 project
8340 .update(cx, |project, cx| project.git_scans_complete(cx))
8341 .await;
8342 cx.executor().run_until_parked();
8343
8344 let repository = project.read_with(cx, |project, cx| {
8345 project
8346 .repositories(cx)
8347 .values()
8348 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8349 .unwrap()
8350 .clone()
8351 });
8352
8353 repository.read_with(cx, |repository, _cx| {
8354 let entries = repository.cached_status().collect::<Vec<_>>();
8355
8356 // `sub` doesn't appear in our computed statuses.
8357 // a.txt appears with a combined `DA` status.
8358 assert_eq!(
8359 entries,
8360 [StatusEntry {
8361 repo_path: repo_path("a.txt"),
8362 status: TrackedStatus {
8363 index_status: StatusCode::Deleted,
8364 worktree_status: StatusCode::Added
8365 }
8366 .into(),
8367 }]
8368 )
8369 });
8370}
8371
8372#[gpui::test]
8373async fn test_repository_subfolder_git_status(
8374 executor: gpui::BackgroundExecutor,
8375 cx: &mut gpui::TestAppContext,
8376) {
8377 init_test(cx);
8378
8379 let fs = FakeFs::new(executor);
8380 fs.insert_tree(
8381 path!("/root"),
8382 json!({
8383 "my-repo": {
8384 ".git": {},
8385 "a.txt": "a",
8386 "sub-folder-1": {
8387 "sub-folder-2": {
8388 "c.txt": "cc",
8389 "d": {
8390 "e.txt": "eee"
8391 }
8392 },
8393 }
8394 },
8395 }),
8396 )
8397 .await;
8398
8399 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8400 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8401
8402 fs.set_status_for_repo(
8403 path!("/root/my-repo/.git").as_ref(),
8404 &[(E_TXT, FileStatus::Untracked)],
8405 );
8406
8407 let project = Project::test(
8408 fs.clone(),
8409 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8410 cx,
8411 )
8412 .await;
8413
8414 project
8415 .update(cx, |project, cx| project.git_scans_complete(cx))
8416 .await;
8417 cx.run_until_parked();
8418
8419 let repository = project.read_with(cx, |project, cx| {
8420 project.repositories(cx).values().next().unwrap().clone()
8421 });
8422
8423 // Ensure that the git status is loaded correctly
8424 repository.read_with(cx, |repository, _cx| {
8425 assert_eq!(
8426 repository.work_directory_abs_path,
8427 Path::new(path!("/root/my-repo")).into()
8428 );
8429
8430 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8431 assert_eq!(
8432 repository
8433 .status_for_path(&repo_path(E_TXT))
8434 .unwrap()
8435 .status,
8436 FileStatus::Untracked
8437 );
8438 });
8439
8440 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8441 project
8442 .update(cx, |project, cx| project.git_scans_complete(cx))
8443 .await;
8444 cx.run_until_parked();
8445
8446 repository.read_with(cx, |repository, _cx| {
8447 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8448 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8449 });
8450}
8451
8452// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8453#[cfg(any())]
8454#[gpui::test]
8455async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8456 init_test(cx);
8457 cx.executor().allow_parking();
8458
8459 let root = TempTree::new(json!({
8460 "project": {
8461 "a.txt": "a",
8462 },
8463 }));
8464 let root_path = root.path();
8465
8466 let repo = git_init(&root_path.join("project"));
8467 git_add("a.txt", &repo);
8468 git_commit("init", &repo);
8469
8470 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8471
8472 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8473 tree.flush_fs_events(cx).await;
8474 project
8475 .update(cx, |project, cx| project.git_scans_complete(cx))
8476 .await;
8477 cx.executor().run_until_parked();
8478
8479 let repository = project.read_with(cx, |project, cx| {
8480 project.repositories(cx).values().next().unwrap().clone()
8481 });
8482
8483 git_branch("other-branch", &repo);
8484 git_checkout("refs/heads/other-branch", &repo);
8485 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8486 git_add("a.txt", &repo);
8487 git_commit("capitalize", &repo);
8488 let commit = repo
8489 .head()
8490 .expect("Failed to get HEAD")
8491 .peel_to_commit()
8492 .expect("HEAD is not a commit");
8493 git_checkout("refs/heads/main", &repo);
8494 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8495 git_add("a.txt", &repo);
8496 git_commit("improve letter", &repo);
8497 git_cherry_pick(&commit, &repo);
8498 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8499 .expect("No CHERRY_PICK_HEAD");
8500 pretty_assertions::assert_eq!(
8501 git_status(&repo),
8502 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8503 );
8504 tree.flush_fs_events(cx).await;
8505 project
8506 .update(cx, |project, cx| project.git_scans_complete(cx))
8507 .await;
8508 cx.executor().run_until_parked();
8509 let conflicts = repository.update(cx, |repository, _| {
8510 repository
8511 .merge_conflicts
8512 .iter()
8513 .cloned()
8514 .collect::<Vec<_>>()
8515 });
8516 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8517
8518 git_add("a.txt", &repo);
8519 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8520 git_commit("whatevs", &repo);
8521 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8522 .expect("Failed to remove CHERRY_PICK_HEAD");
8523 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8524 tree.flush_fs_events(cx).await;
8525 let conflicts = repository.update(cx, |repository, _| {
8526 repository
8527 .merge_conflicts
8528 .iter()
8529 .cloned()
8530 .collect::<Vec<_>>()
8531 });
8532 pretty_assertions::assert_eq!(conflicts, []);
8533}
8534
8535#[gpui::test]
8536async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8537 init_test(cx);
8538 let fs = FakeFs::new(cx.background_executor.clone());
8539 fs.insert_tree(
8540 path!("/root"),
8541 json!({
8542 ".git": {},
8543 ".gitignore": "*.txt\n",
8544 "a.xml": "<a></a>",
8545 "b.txt": "Some text"
8546 }),
8547 )
8548 .await;
8549
8550 fs.set_head_and_index_for_repo(
8551 path!("/root/.git").as_ref(),
8552 &[
8553 (".gitignore", "*.txt\n".into()),
8554 ("a.xml", "<a></a>".into()),
8555 ],
8556 );
8557
8558 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8559
8560 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8561 tree.flush_fs_events(cx).await;
8562 project
8563 .update(cx, |project, cx| project.git_scans_complete(cx))
8564 .await;
8565 cx.executor().run_until_parked();
8566
8567 let repository = project.read_with(cx, |project, cx| {
8568 project.repositories(cx).values().next().unwrap().clone()
8569 });
8570
8571 // One file is unmodified, the other is ignored.
8572 cx.read(|cx| {
8573 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8574 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8575 });
8576
8577 // Change the gitignore, and stage the newly non-ignored file.
8578 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8579 .await
8580 .unwrap();
8581 fs.set_index_for_repo(
8582 Path::new(path!("/root/.git")),
8583 &[
8584 (".gitignore", "*.txt\n".into()),
8585 ("a.xml", "<a></a>".into()),
8586 ("b.txt", "Some text".into()),
8587 ],
8588 );
8589
8590 cx.executor().run_until_parked();
8591 cx.read(|cx| {
8592 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8593 assert_entry_git_state(
8594 tree.read(cx),
8595 repository.read(cx),
8596 "b.txt",
8597 Some(StatusCode::Added),
8598 false,
8599 );
8600 });
8601}
8602
8603// NOTE:
8604// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8605// a directory which some program has already open.
8606// This is a limitation of the Windows.
8607// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8608// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8609#[gpui::test]
8610#[cfg_attr(target_os = "windows", ignore)]
8611async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8612 init_test(cx);
8613 cx.executor().allow_parking();
8614 let root = TempTree::new(json!({
8615 "projects": {
8616 "project1": {
8617 "a": "",
8618 "b": "",
8619 }
8620 },
8621
8622 }));
8623 let root_path = root.path();
8624
8625 let repo = git_init(&root_path.join("projects/project1"));
8626 git_add("a", &repo);
8627 git_commit("init", &repo);
8628 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8629
8630 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8631
8632 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8633 tree.flush_fs_events(cx).await;
8634 project
8635 .update(cx, |project, cx| project.git_scans_complete(cx))
8636 .await;
8637 cx.executor().run_until_parked();
8638
8639 let repository = project.read_with(cx, |project, cx| {
8640 project.repositories(cx).values().next().unwrap().clone()
8641 });
8642
8643 repository.read_with(cx, |repository, _| {
8644 assert_eq!(
8645 repository.work_directory_abs_path.as_ref(),
8646 root_path.join("projects/project1").as_path()
8647 );
8648 assert_eq!(
8649 repository
8650 .status_for_path(&repo_path("a"))
8651 .map(|entry| entry.status),
8652 Some(StatusCode::Modified.worktree()),
8653 );
8654 assert_eq!(
8655 repository
8656 .status_for_path(&repo_path("b"))
8657 .map(|entry| entry.status),
8658 Some(FileStatus::Untracked),
8659 );
8660 });
8661
8662 std::fs::rename(
8663 root_path.join("projects/project1"),
8664 root_path.join("projects/project2"),
8665 )
8666 .unwrap();
8667 tree.flush_fs_events(cx).await;
8668
8669 repository.read_with(cx, |repository, _| {
8670 assert_eq!(
8671 repository.work_directory_abs_path.as_ref(),
8672 root_path.join("projects/project2").as_path()
8673 );
8674 assert_eq!(
8675 repository.status_for_path(&repo_path("a")).unwrap().status,
8676 StatusCode::Modified.worktree(),
8677 );
8678 assert_eq!(
8679 repository.status_for_path(&repo_path("b")).unwrap().status,
8680 FileStatus::Untracked,
8681 );
8682 });
8683}
8684
8685// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8686// you can't rename a directory which some program has already open. This is a
8687// limitation of the Windows. See:
8688// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8689// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8690#[gpui::test]
8691#[cfg_attr(target_os = "windows", ignore)]
8692async fn test_file_status(cx: &mut gpui::TestAppContext) {
8693 init_test(cx);
8694 cx.executor().allow_parking();
8695 const IGNORE_RULE: &str = "**/target";
8696
8697 let root = TempTree::new(json!({
8698 "project": {
8699 "a.txt": "a",
8700 "b.txt": "bb",
8701 "c": {
8702 "d": {
8703 "e.txt": "eee"
8704 }
8705 },
8706 "f.txt": "ffff",
8707 "target": {
8708 "build_file": "???"
8709 },
8710 ".gitignore": IGNORE_RULE
8711 },
8712
8713 }));
8714 let root_path = root.path();
8715
8716 const A_TXT: &str = "a.txt";
8717 const B_TXT: &str = "b.txt";
8718 const E_TXT: &str = "c/d/e.txt";
8719 const F_TXT: &str = "f.txt";
8720 const DOTGITIGNORE: &str = ".gitignore";
8721 const BUILD_FILE: &str = "target/build_file";
8722
8723 // Set up git repository before creating the worktree.
8724 let work_dir = root.path().join("project");
8725 let mut repo = git_init(work_dir.as_path());
8726 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8727 git_add(A_TXT, &repo);
8728 git_add(E_TXT, &repo);
8729 git_add(DOTGITIGNORE, &repo);
8730 git_commit("Initial commit", &repo);
8731
8732 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8733
8734 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8735 tree.flush_fs_events(cx).await;
8736 project
8737 .update(cx, |project, cx| project.git_scans_complete(cx))
8738 .await;
8739 cx.executor().run_until_parked();
8740
8741 let repository = project.read_with(cx, |project, cx| {
8742 project.repositories(cx).values().next().unwrap().clone()
8743 });
8744
8745 // Check that the right git state is observed on startup
8746 repository.read_with(cx, |repository, _cx| {
8747 assert_eq!(
8748 repository.work_directory_abs_path.as_ref(),
8749 root_path.join("project").as_path()
8750 );
8751
8752 assert_eq!(
8753 repository
8754 .status_for_path(&repo_path(B_TXT))
8755 .unwrap()
8756 .status,
8757 FileStatus::Untracked,
8758 );
8759 assert_eq!(
8760 repository
8761 .status_for_path(&repo_path(F_TXT))
8762 .unwrap()
8763 .status,
8764 FileStatus::Untracked,
8765 );
8766 });
8767
8768 // Modify a file in the working copy.
8769 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8770 tree.flush_fs_events(cx).await;
8771 project
8772 .update(cx, |project, cx| project.git_scans_complete(cx))
8773 .await;
8774 cx.executor().run_until_parked();
8775
8776 // The worktree detects that the file's git status has changed.
8777 repository.read_with(cx, |repository, _| {
8778 assert_eq!(
8779 repository
8780 .status_for_path(&repo_path(A_TXT))
8781 .unwrap()
8782 .status,
8783 StatusCode::Modified.worktree(),
8784 );
8785 });
8786
8787 // Create a commit in the git repository.
8788 git_add(A_TXT, &repo);
8789 git_add(B_TXT, &repo);
8790 git_commit("Committing modified and added", &repo);
8791 tree.flush_fs_events(cx).await;
8792 project
8793 .update(cx, |project, cx| project.git_scans_complete(cx))
8794 .await;
8795 cx.executor().run_until_parked();
8796
8797 // The worktree detects that the files' git status have changed.
8798 repository.read_with(cx, |repository, _cx| {
8799 assert_eq!(
8800 repository
8801 .status_for_path(&repo_path(F_TXT))
8802 .unwrap()
8803 .status,
8804 FileStatus::Untracked,
8805 );
8806 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8807 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8808 });
8809
8810 // Modify files in the working copy and perform git operations on other files.
8811 git_reset(0, &repo);
8812 git_remove_index(Path::new(B_TXT), &repo);
8813 git_stash(&mut repo);
8814 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8815 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8816 tree.flush_fs_events(cx).await;
8817 project
8818 .update(cx, |project, cx| project.git_scans_complete(cx))
8819 .await;
8820 cx.executor().run_until_parked();
8821
8822 // Check that more complex repo changes are tracked
8823 repository.read_with(cx, |repository, _cx| {
8824 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8825 assert_eq!(
8826 repository
8827 .status_for_path(&repo_path(B_TXT))
8828 .unwrap()
8829 .status,
8830 FileStatus::Untracked,
8831 );
8832 assert_eq!(
8833 repository
8834 .status_for_path(&repo_path(E_TXT))
8835 .unwrap()
8836 .status,
8837 StatusCode::Modified.worktree(),
8838 );
8839 });
8840
8841 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8842 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8843 std::fs::write(
8844 work_dir.join(DOTGITIGNORE),
8845 [IGNORE_RULE, "f.txt"].join("\n"),
8846 )
8847 .unwrap();
8848
8849 git_add(Path::new(DOTGITIGNORE), &repo);
8850 git_commit("Committing modified git ignore", &repo);
8851
8852 tree.flush_fs_events(cx).await;
8853 cx.executor().run_until_parked();
8854
8855 let mut renamed_dir_name = "first_directory/second_directory";
8856 const RENAMED_FILE: &str = "rf.txt";
8857
8858 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8859 std::fs::write(
8860 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8861 "new-contents",
8862 )
8863 .unwrap();
8864
8865 tree.flush_fs_events(cx).await;
8866 project
8867 .update(cx, |project, cx| project.git_scans_complete(cx))
8868 .await;
8869 cx.executor().run_until_parked();
8870
8871 repository.read_with(cx, |repository, _cx| {
8872 assert_eq!(
8873 repository
8874 .status_for_path(
8875 &rel_path(renamed_dir_name)
8876 .join(rel_path(RENAMED_FILE))
8877 .into()
8878 )
8879 .unwrap()
8880 .status,
8881 FileStatus::Untracked,
8882 );
8883 });
8884
8885 renamed_dir_name = "new_first_directory/second_directory";
8886
8887 std::fs::rename(
8888 work_dir.join("first_directory"),
8889 work_dir.join("new_first_directory"),
8890 )
8891 .unwrap();
8892
8893 tree.flush_fs_events(cx).await;
8894 project
8895 .update(cx, |project, cx| project.git_scans_complete(cx))
8896 .await;
8897 cx.executor().run_until_parked();
8898
8899 repository.read_with(cx, |repository, _cx| {
8900 assert_eq!(
8901 repository
8902 .status_for_path(
8903 &rel_path(renamed_dir_name)
8904 .join(rel_path(RENAMED_FILE))
8905 .into()
8906 )
8907 .unwrap()
8908 .status,
8909 FileStatus::Untracked,
8910 );
8911 });
8912}
8913
8914#[gpui::test]
8915#[ignore]
8916async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8917 init_test(cx);
8918 cx.executor().allow_parking();
8919
8920 const IGNORE_RULE: &str = "**/target";
8921
8922 let root = TempTree::new(json!({
8923 "project": {
8924 "src": {
8925 "main.rs": "fn main() {}"
8926 },
8927 "target": {
8928 "debug": {
8929 "important_text.txt": "important text",
8930 },
8931 },
8932 ".gitignore": IGNORE_RULE
8933 },
8934
8935 }));
8936 let root_path = root.path();
8937
8938 // Set up git repository before creating the worktree.
8939 let work_dir = root.path().join("project");
8940 let repo = git_init(work_dir.as_path());
8941 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8942 git_add("src/main.rs", &repo);
8943 git_add(".gitignore", &repo);
8944 git_commit("Initial commit", &repo);
8945
8946 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8947 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8948 let project_events = Arc::new(Mutex::new(Vec::new()));
8949 project.update(cx, |project, cx| {
8950 let repo_events = repository_updates.clone();
8951 cx.subscribe(project.git_store(), move |_, _, e, _| {
8952 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8953 repo_events.lock().push(e.clone());
8954 }
8955 })
8956 .detach();
8957 let project_events = project_events.clone();
8958 cx.subscribe_self(move |_, e, _| {
8959 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8960 project_events.lock().extend(
8961 updates
8962 .iter()
8963 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8964 .filter(|(path, _)| path != "fs-event-sentinel"),
8965 );
8966 }
8967 })
8968 .detach();
8969 });
8970
8971 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8972 tree.flush_fs_events(cx).await;
8973 tree.update(cx, |tree, cx| {
8974 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
8975 })
8976 .await
8977 .unwrap();
8978 tree.update(cx, |tree, _| {
8979 assert_eq!(
8980 tree.entries(true, 0)
8981 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8982 .collect::<Vec<_>>(),
8983 vec![
8984 (rel_path(""), false),
8985 (rel_path("project/"), false),
8986 (rel_path("project/.gitignore"), false),
8987 (rel_path("project/src"), false),
8988 (rel_path("project/src/main.rs"), false),
8989 (rel_path("project/target"), true),
8990 (rel_path("project/target/debug"), true),
8991 (rel_path("project/target/debug/important_text.txt"), true),
8992 ]
8993 );
8994 });
8995
8996 assert_eq!(
8997 repository_updates.lock().drain(..).collect::<Vec<_>>(),
8998 vec![
8999 RepositoryEvent::StatusesChanged { full_scan: true },
9000 RepositoryEvent::MergeHeadsChanged,
9001 ],
9002 "Initial worktree scan should produce a repo update event"
9003 );
9004 assert_eq!(
9005 project_events.lock().drain(..).collect::<Vec<_>>(),
9006 vec![
9007 ("project/target".to_string(), PathChange::Loaded),
9008 ("project/target/debug".to_string(), PathChange::Loaded),
9009 (
9010 "project/target/debug/important_text.txt".to_string(),
9011 PathChange::Loaded
9012 ),
9013 ],
9014 "Initial project changes should show that all not-ignored and all opened files are loaded"
9015 );
9016
9017 let deps_dir = work_dir.join("target").join("debug").join("deps");
9018 std::fs::create_dir_all(&deps_dir).unwrap();
9019 tree.flush_fs_events(cx).await;
9020 project
9021 .update(cx, |project, cx| project.git_scans_complete(cx))
9022 .await;
9023 cx.executor().run_until_parked();
9024 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9025 tree.flush_fs_events(cx).await;
9026 project
9027 .update(cx, |project, cx| project.git_scans_complete(cx))
9028 .await;
9029 cx.executor().run_until_parked();
9030 std::fs::remove_dir_all(&deps_dir).unwrap();
9031 tree.flush_fs_events(cx).await;
9032 project
9033 .update(cx, |project, cx| project.git_scans_complete(cx))
9034 .await;
9035 cx.executor().run_until_parked();
9036
9037 tree.update(cx, |tree, _| {
9038 assert_eq!(
9039 tree.entries(true, 0)
9040 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9041 .collect::<Vec<_>>(),
9042 vec![
9043 (rel_path(""), false),
9044 (rel_path("project/"), false),
9045 (rel_path("project/.gitignore"), false),
9046 (rel_path("project/src"), false),
9047 (rel_path("project/src/main.rs"), false),
9048 (rel_path("project/target"), true),
9049 (rel_path("project/target/debug"), true),
9050 (rel_path("project/target/debug/important_text.txt"), true),
9051 ],
9052 "No stray temp files should be left after the flycheck changes"
9053 );
9054 });
9055
9056 assert_eq!(
9057 repository_updates
9058 .lock()
9059 .iter()
9060 .cloned()
9061 .collect::<Vec<_>>(),
9062 Vec::new(),
9063 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9064 );
9065 assert_eq!(
9066 project_events.lock().as_slice(),
9067 vec![
9068 ("project/target/debug/deps".to_string(), PathChange::Added),
9069 ("project/target/debug/deps".to_string(), PathChange::Removed),
9070 ],
9071 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9072 No updates for more nested directories should happen as those are ignored",
9073 );
9074}
9075
9076#[gpui::test]
9077async fn test_odd_events_for_ignored_dirs(
9078 executor: BackgroundExecutor,
9079 cx: &mut gpui::TestAppContext,
9080) {
9081 init_test(cx);
9082 let fs = FakeFs::new(executor);
9083 fs.insert_tree(
9084 path!("/root"),
9085 json!({
9086 ".git": {},
9087 ".gitignore": "**/target/",
9088 "src": {
9089 "main.rs": "fn main() {}",
9090 },
9091 "target": {
9092 "debug": {
9093 "foo.txt": "foo",
9094 "deps": {}
9095 }
9096 }
9097 }),
9098 )
9099 .await;
9100 fs.set_head_and_index_for_repo(
9101 path!("/root/.git").as_ref(),
9102 &[
9103 (".gitignore", "**/target/".into()),
9104 ("src/main.rs", "fn main() {}".into()),
9105 ],
9106 );
9107
9108 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9109 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9110 let project_events = Arc::new(Mutex::new(Vec::new()));
9111 project.update(cx, |project, cx| {
9112 let repository_updates = repository_updates.clone();
9113 cx.subscribe(project.git_store(), move |_, _, e, _| {
9114 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9115 repository_updates.lock().push(e.clone());
9116 }
9117 })
9118 .detach();
9119 let project_events = project_events.clone();
9120 cx.subscribe_self(move |_, e, _| {
9121 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9122 project_events.lock().extend(
9123 updates
9124 .iter()
9125 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9126 .filter(|(path, _)| path != "fs-event-sentinel"),
9127 );
9128 }
9129 })
9130 .detach();
9131 });
9132
9133 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9134 tree.update(cx, |tree, cx| {
9135 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9136 })
9137 .await
9138 .unwrap();
9139 tree.flush_fs_events(cx).await;
9140 project
9141 .update(cx, |project, cx| project.git_scans_complete(cx))
9142 .await;
9143 cx.run_until_parked();
9144 tree.update(cx, |tree, _| {
9145 assert_eq!(
9146 tree.entries(true, 0)
9147 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9148 .collect::<Vec<_>>(),
9149 vec![
9150 (rel_path(""), false),
9151 (rel_path(".gitignore"), false),
9152 (rel_path("src"), false),
9153 (rel_path("src/main.rs"), false),
9154 (rel_path("target"), true),
9155 (rel_path("target/debug"), true),
9156 (rel_path("target/debug/deps"), true),
9157 (rel_path("target/debug/foo.txt"), true),
9158 ]
9159 );
9160 });
9161
9162 assert_eq!(
9163 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9164 vec![
9165 RepositoryEvent::MergeHeadsChanged,
9166 RepositoryEvent::BranchChanged,
9167 RepositoryEvent::StatusesChanged { full_scan: false },
9168 RepositoryEvent::StatusesChanged { full_scan: false },
9169 ],
9170 "Initial worktree scan should produce a repo update event"
9171 );
9172 assert_eq!(
9173 project_events.lock().drain(..).collect::<Vec<_>>(),
9174 vec![
9175 ("target".to_string(), PathChange::Loaded),
9176 ("target/debug".to_string(), PathChange::Loaded),
9177 ("target/debug/deps".to_string(), PathChange::Loaded),
9178 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9179 ],
9180 "All non-ignored entries and all opened firs should be getting a project event",
9181 );
9182
9183 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9184 // This may happen multiple times during a single flycheck, but once is enough for testing.
9185 fs.emit_fs_event("/root/target/debug/deps", None);
9186 tree.flush_fs_events(cx).await;
9187 project
9188 .update(cx, |project, cx| project.git_scans_complete(cx))
9189 .await;
9190 cx.executor().run_until_parked();
9191
9192 assert_eq!(
9193 repository_updates
9194 .lock()
9195 .iter()
9196 .cloned()
9197 .collect::<Vec<_>>(),
9198 Vec::new(),
9199 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9200 );
9201 assert_eq!(
9202 project_events.lock().as_slice(),
9203 Vec::new(),
9204 "No further project events should happen, as only ignored dirs received FS events",
9205 );
9206}
9207
9208#[gpui::test]
9209async fn test_repos_in_invisible_worktrees(
9210 executor: BackgroundExecutor,
9211 cx: &mut gpui::TestAppContext,
9212) {
9213 init_test(cx);
9214 let fs = FakeFs::new(executor);
9215 fs.insert_tree(
9216 path!("/root"),
9217 json!({
9218 "dir1": {
9219 ".git": {},
9220 "dep1": {
9221 ".git": {},
9222 "src": {
9223 "a.txt": "",
9224 },
9225 },
9226 "b.txt": "",
9227 },
9228 }),
9229 )
9230 .await;
9231
9232 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9233 let _visible_worktree =
9234 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9235 project
9236 .update(cx, |project, cx| project.git_scans_complete(cx))
9237 .await;
9238
9239 let repos = project.read_with(cx, |project, cx| {
9240 project
9241 .repositories(cx)
9242 .values()
9243 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9244 .collect::<Vec<_>>()
9245 });
9246 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9247
9248 let (_invisible_worktree, _) = project
9249 .update(cx, |project, cx| {
9250 project.worktree_store.update(cx, |worktree_store, cx| {
9251 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9252 })
9253 })
9254 .await
9255 .expect("failed to create worktree");
9256 project
9257 .update(cx, |project, cx| project.git_scans_complete(cx))
9258 .await;
9259
9260 let repos = project.read_with(cx, |project, cx| {
9261 project
9262 .repositories(cx)
9263 .values()
9264 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9265 .collect::<Vec<_>>()
9266 });
9267 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9268}
9269
9270#[gpui::test(iterations = 10)]
9271async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9272 init_test(cx);
9273 cx.update(|cx| {
9274 cx.update_global::<SettingsStore, _>(|store, cx| {
9275 store.update_user_settings(cx, |settings| {
9276 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9277 });
9278 });
9279 });
9280 let fs = FakeFs::new(cx.background_executor.clone());
9281 fs.insert_tree(
9282 path!("/root"),
9283 json!({
9284 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9285 "tree": {
9286 ".git": {},
9287 ".gitignore": "ignored-dir\n",
9288 "tracked-dir": {
9289 "tracked-file1": "",
9290 "ancestor-ignored-file1": "",
9291 },
9292 "ignored-dir": {
9293 "ignored-file1": ""
9294 }
9295 }
9296 }),
9297 )
9298 .await;
9299 fs.set_head_and_index_for_repo(
9300 path!("/root/tree/.git").as_ref(),
9301 &[
9302 (".gitignore", "ignored-dir\n".into()),
9303 ("tracked-dir/tracked-file1", "".into()),
9304 ],
9305 );
9306
9307 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9308
9309 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9310 tree.flush_fs_events(cx).await;
9311 project
9312 .update(cx, |project, cx| project.git_scans_complete(cx))
9313 .await;
9314 cx.executor().run_until_parked();
9315
9316 let repository = project.read_with(cx, |project, cx| {
9317 project.repositories(cx).values().next().unwrap().clone()
9318 });
9319
9320 tree.read_with(cx, |tree, _| {
9321 tree.as_local()
9322 .unwrap()
9323 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9324 })
9325 .recv()
9326 .await;
9327
9328 cx.read(|cx| {
9329 assert_entry_git_state(
9330 tree.read(cx),
9331 repository.read(cx),
9332 "tracked-dir/tracked-file1",
9333 None,
9334 false,
9335 );
9336 assert_entry_git_state(
9337 tree.read(cx),
9338 repository.read(cx),
9339 "tracked-dir/ancestor-ignored-file1",
9340 None,
9341 false,
9342 );
9343 assert_entry_git_state(
9344 tree.read(cx),
9345 repository.read(cx),
9346 "ignored-dir/ignored-file1",
9347 None,
9348 true,
9349 );
9350 });
9351
9352 fs.create_file(
9353 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9354 Default::default(),
9355 )
9356 .await
9357 .unwrap();
9358 fs.set_index_for_repo(
9359 path!("/root/tree/.git").as_ref(),
9360 &[
9361 (".gitignore", "ignored-dir\n".into()),
9362 ("tracked-dir/tracked-file1", "".into()),
9363 ("tracked-dir/tracked-file2", "".into()),
9364 ],
9365 );
9366 fs.create_file(
9367 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9368 Default::default(),
9369 )
9370 .await
9371 .unwrap();
9372 fs.create_file(
9373 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9374 Default::default(),
9375 )
9376 .await
9377 .unwrap();
9378
9379 cx.executor().run_until_parked();
9380 cx.read(|cx| {
9381 assert_entry_git_state(
9382 tree.read(cx),
9383 repository.read(cx),
9384 "tracked-dir/tracked-file2",
9385 Some(StatusCode::Added),
9386 false,
9387 );
9388 assert_entry_git_state(
9389 tree.read(cx),
9390 repository.read(cx),
9391 "tracked-dir/ancestor-ignored-file2",
9392 None,
9393 false,
9394 );
9395 assert_entry_git_state(
9396 tree.read(cx),
9397 repository.read(cx),
9398 "ignored-dir/ignored-file2",
9399 None,
9400 true,
9401 );
9402 assert!(
9403 tree.read(cx)
9404 .entry_for_path(&rel_path(".git"))
9405 .unwrap()
9406 .is_ignored
9407 );
9408 });
9409}
9410
9411#[gpui::test]
9412async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9413 init_test(cx);
9414
9415 let fs = FakeFs::new(cx.executor());
9416 fs.insert_tree(
9417 path!("/project"),
9418 json!({
9419 ".git": {
9420 "worktrees": {
9421 "some-worktree": {
9422 "commondir": "../..\n",
9423 // For is_git_dir
9424 "HEAD": "",
9425 "config": ""
9426 }
9427 },
9428 "modules": {
9429 "subdir": {
9430 "some-submodule": {
9431 // For is_git_dir
9432 "HEAD": "",
9433 "config": "",
9434 }
9435 }
9436 }
9437 },
9438 "src": {
9439 "a.txt": "A",
9440 },
9441 "some-worktree": {
9442 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9443 "src": {
9444 "b.txt": "B",
9445 }
9446 },
9447 "subdir": {
9448 "some-submodule": {
9449 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9450 "c.txt": "C",
9451 }
9452 }
9453 }),
9454 )
9455 .await;
9456
9457 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9458 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9459 scan_complete.await;
9460
9461 let mut repositories = project.update(cx, |project, cx| {
9462 project
9463 .repositories(cx)
9464 .values()
9465 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9466 .collect::<Vec<_>>()
9467 });
9468 repositories.sort();
9469 pretty_assertions::assert_eq!(
9470 repositories,
9471 [
9472 Path::new(path!("/project")).into(),
9473 Path::new(path!("/project/some-worktree")).into(),
9474 Path::new(path!("/project/subdir/some-submodule")).into(),
9475 ]
9476 );
9477
9478 // Generate a git-related event for the worktree and check that it's refreshed.
9479 fs.with_git_state(
9480 path!("/project/some-worktree/.git").as_ref(),
9481 true,
9482 |state| {
9483 state
9484 .head_contents
9485 .insert(repo_path("src/b.txt"), "b".to_owned());
9486 state
9487 .index_contents
9488 .insert(repo_path("src/b.txt"), "b".to_owned());
9489 },
9490 )
9491 .unwrap();
9492 cx.run_until_parked();
9493
9494 let buffer = project
9495 .update(cx, |project, cx| {
9496 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9497 })
9498 .await
9499 .unwrap();
9500 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9501 let (repo, _) = project
9502 .git_store()
9503 .read(cx)
9504 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9505 .unwrap();
9506 pretty_assertions::assert_eq!(
9507 repo.read(cx).work_directory_abs_path,
9508 Path::new(path!("/project/some-worktree")).into(),
9509 );
9510 let barrier = repo.update(cx, |repo, _| repo.barrier());
9511 (repo.clone(), barrier)
9512 });
9513 barrier.await.unwrap();
9514 worktree_repo.update(cx, |repo, _| {
9515 pretty_assertions::assert_eq!(
9516 repo.status_for_path(&repo_path("src/b.txt"))
9517 .unwrap()
9518 .status,
9519 StatusCode::Modified.worktree(),
9520 );
9521 });
9522
9523 // The same for the submodule.
9524 fs.with_git_state(
9525 path!("/project/subdir/some-submodule/.git").as_ref(),
9526 true,
9527 |state| {
9528 state
9529 .head_contents
9530 .insert(repo_path("c.txt"), "c".to_owned());
9531 state
9532 .index_contents
9533 .insert(repo_path("c.txt"), "c".to_owned());
9534 },
9535 )
9536 .unwrap();
9537 cx.run_until_parked();
9538
9539 let buffer = project
9540 .update(cx, |project, cx| {
9541 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9542 })
9543 .await
9544 .unwrap();
9545 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9546 let (repo, _) = project
9547 .git_store()
9548 .read(cx)
9549 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9550 .unwrap();
9551 pretty_assertions::assert_eq!(
9552 repo.read(cx).work_directory_abs_path,
9553 Path::new(path!("/project/subdir/some-submodule")).into(),
9554 );
9555 let barrier = repo.update(cx, |repo, _| repo.barrier());
9556 (repo.clone(), barrier)
9557 });
9558 barrier.await.unwrap();
9559 submodule_repo.update(cx, |repo, _| {
9560 pretty_assertions::assert_eq!(
9561 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9562 StatusCode::Modified.worktree(),
9563 );
9564 });
9565}
9566
9567#[gpui::test]
9568async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9569 init_test(cx);
9570 let fs = FakeFs::new(cx.background_executor.clone());
9571 fs.insert_tree(
9572 path!("/root"),
9573 json!({
9574 "project": {
9575 ".git": {},
9576 "child1": {
9577 "a.txt": "A",
9578 },
9579 "child2": {
9580 "b.txt": "B",
9581 }
9582 }
9583 }),
9584 )
9585 .await;
9586
9587 let project = Project::test(
9588 fs.clone(),
9589 [
9590 path!("/root/project/child1").as_ref(),
9591 path!("/root/project/child2").as_ref(),
9592 ],
9593 cx,
9594 )
9595 .await;
9596
9597 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9598 tree.flush_fs_events(cx).await;
9599 project
9600 .update(cx, |project, cx| project.git_scans_complete(cx))
9601 .await;
9602 cx.executor().run_until_parked();
9603
9604 let repos = project.read_with(cx, |project, cx| {
9605 project
9606 .repositories(cx)
9607 .values()
9608 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9609 .collect::<Vec<_>>()
9610 });
9611 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9612}
9613
9614async fn search(
9615 project: &Entity<Project>,
9616 query: SearchQuery,
9617 cx: &mut gpui::TestAppContext,
9618) -> Result<HashMap<String, Vec<Range<usize>>>> {
9619 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9620 let mut results = HashMap::default();
9621 while let Ok(search_result) = search_rx.recv().await {
9622 match search_result {
9623 SearchResult::Buffer { buffer, ranges } => {
9624 results.entry(buffer).or_insert(ranges);
9625 }
9626 SearchResult::LimitReached => {}
9627 }
9628 }
9629 Ok(results
9630 .into_iter()
9631 .map(|(buffer, ranges)| {
9632 buffer.update(cx, |buffer, cx| {
9633 let path = buffer
9634 .file()
9635 .unwrap()
9636 .full_path(cx)
9637 .to_string_lossy()
9638 .to_string();
9639 let ranges = ranges
9640 .into_iter()
9641 .map(|range| range.to_offset(buffer))
9642 .collect::<Vec<_>>();
9643 (path, ranges)
9644 })
9645 })
9646 .collect())
9647}
9648
9649pub fn init_test(cx: &mut gpui::TestAppContext) {
9650 zlog::init_test();
9651
9652 cx.update(|cx| {
9653 let settings_store = SettingsStore::test(cx);
9654 cx.set_global(settings_store);
9655 release_channel::init(SemanticVersion::default(), cx);
9656 language::init(cx);
9657 Project::init_settings(cx);
9658 });
9659}
9660
9661fn json_lang() -> Arc<Language> {
9662 Arc::new(Language::new(
9663 LanguageConfig {
9664 name: "JSON".into(),
9665 matcher: LanguageMatcher {
9666 path_suffixes: vec!["json".to_string()],
9667 ..Default::default()
9668 },
9669 ..Default::default()
9670 },
9671 None,
9672 ))
9673}
9674
9675fn js_lang() -> Arc<Language> {
9676 Arc::new(Language::new(
9677 LanguageConfig {
9678 name: "JavaScript".into(),
9679 matcher: LanguageMatcher {
9680 path_suffixes: vec!["js".to_string()],
9681 ..Default::default()
9682 },
9683 ..Default::default()
9684 },
9685 None,
9686 ))
9687}
9688
9689fn rust_lang() -> Arc<Language> {
9690 Arc::new(Language::new(
9691 LanguageConfig {
9692 name: "Rust".into(),
9693 matcher: LanguageMatcher {
9694 path_suffixes: vec!["rs".to_string()],
9695 ..Default::default()
9696 },
9697 ..Default::default()
9698 },
9699 Some(tree_sitter_rust::LANGUAGE.into()),
9700 ))
9701}
9702
9703fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9704 struct PythonMootToolchainLister(Arc<FakeFs>);
9705 #[async_trait]
9706 impl ToolchainLister for PythonMootToolchainLister {
9707 async fn list(
9708 &self,
9709 worktree_root: PathBuf,
9710 subroot_relative_path: Arc<RelPath>,
9711 _: Option<HashMap<String, String>>,
9712 _: &dyn Fs,
9713 ) -> ToolchainList {
9714 // This lister will always return a path .venv directories within ancestors
9715 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9716 let mut toolchains = vec![];
9717 for ancestor in ancestors {
9718 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9719 if self.0.is_dir(&venv_path).await {
9720 toolchains.push(Toolchain {
9721 name: SharedString::new("Python Venv"),
9722 path: venv_path.to_string_lossy().into_owned().into(),
9723 language_name: LanguageName(SharedString::new_static("Python")),
9724 as_json: serde_json::Value::Null,
9725 })
9726 }
9727 }
9728 ToolchainList {
9729 toolchains,
9730 ..Default::default()
9731 }
9732 }
9733 async fn resolve(
9734 &self,
9735 _: PathBuf,
9736 _: Option<HashMap<String, String>>,
9737 _: &dyn Fs,
9738 ) -> anyhow::Result<Toolchain> {
9739 Err(anyhow::anyhow!("Not implemented"))
9740 }
9741 fn meta(&self) -> ToolchainMetadata {
9742 ToolchainMetadata {
9743 term: SharedString::new_static("Virtual Environment"),
9744 new_toolchain_placeholder: SharedString::new_static(
9745 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9746 ),
9747 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9748 }
9749 }
9750 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9751 vec![]
9752 }
9753 }
9754 Arc::new(
9755 Language::new(
9756 LanguageConfig {
9757 name: "Python".into(),
9758 matcher: LanguageMatcher {
9759 path_suffixes: vec!["py".to_string()],
9760 ..Default::default()
9761 },
9762 ..Default::default()
9763 },
9764 None, // We're not testing Python parsing with this language.
9765 )
9766 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9767 "pyproject.toml",
9768 ))))
9769 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9770 )
9771}
9772
9773fn typescript_lang() -> Arc<Language> {
9774 Arc::new(Language::new(
9775 LanguageConfig {
9776 name: "TypeScript".into(),
9777 matcher: LanguageMatcher {
9778 path_suffixes: vec!["ts".to_string()],
9779 ..Default::default()
9780 },
9781 ..Default::default()
9782 },
9783 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9784 ))
9785}
9786
9787fn tsx_lang() -> Arc<Language> {
9788 Arc::new(Language::new(
9789 LanguageConfig {
9790 name: "tsx".into(),
9791 matcher: LanguageMatcher {
9792 path_suffixes: vec!["tsx".to_string()],
9793 ..Default::default()
9794 },
9795 ..Default::default()
9796 },
9797 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9798 ))
9799}
9800
9801fn get_all_tasks(
9802 project: &Entity<Project>,
9803 task_contexts: Arc<TaskContexts>,
9804 cx: &mut App,
9805) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9806 let new_tasks = project.update(cx, |project, cx| {
9807 project.task_store.update(cx, |task_store, cx| {
9808 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9809 this.used_and_current_resolved_tasks(task_contexts, cx)
9810 })
9811 })
9812 });
9813
9814 cx.background_spawn(async move {
9815 let (mut old, new) = new_tasks.await;
9816 old.extend(new);
9817 old
9818 })
9819}
9820
9821#[track_caller]
9822fn assert_entry_git_state(
9823 tree: &Worktree,
9824 repository: &Repository,
9825 path: &str,
9826 index_status: Option<StatusCode>,
9827 is_ignored: bool,
9828) {
9829 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9830 let entry = tree
9831 .entry_for_path(&rel_path(path))
9832 .unwrap_or_else(|| panic!("entry {path} not found"));
9833 let status = repository
9834 .status_for_path(&repo_path(path))
9835 .map(|entry| entry.status);
9836 let expected = index_status.map(|index_status| {
9837 TrackedStatus {
9838 index_status,
9839 worktree_status: StatusCode::Unmodified,
9840 }
9841 .into()
9842 });
9843 assert_eq!(
9844 status, expected,
9845 "expected {path} to have git status: {expected:?}"
9846 );
9847 assert_eq!(
9848 entry.is_ignored, is_ignored,
9849 "expected {path} to have is_ignored: {is_ignored}"
9850 );
9851}
9852
9853#[track_caller]
9854fn git_init(path: &Path) -> git2::Repository {
9855 let mut init_opts = RepositoryInitOptions::new();
9856 init_opts.initial_head("main");
9857 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9858}
9859
9860#[track_caller]
9861fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9862 let path = path.as_ref();
9863 let mut index = repo.index().expect("Failed to get index");
9864 index.add_path(path).expect("Failed to add file");
9865 index.write().expect("Failed to write index");
9866}
9867
9868#[track_caller]
9869fn git_remove_index(path: &Path, repo: &git2::Repository) {
9870 let mut index = repo.index().expect("Failed to get index");
9871 index.remove_path(path).expect("Failed to add file");
9872 index.write().expect("Failed to write index");
9873}
9874
9875#[track_caller]
9876fn git_commit(msg: &'static str, repo: &git2::Repository) {
9877 use git2::Signature;
9878
9879 let signature = Signature::now("test", "test@zed.dev").unwrap();
9880 let oid = repo.index().unwrap().write_tree().unwrap();
9881 let tree = repo.find_tree(oid).unwrap();
9882 if let Ok(head) = repo.head() {
9883 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9884
9885 let parent_commit = parent_obj.as_commit().unwrap();
9886
9887 repo.commit(
9888 Some("HEAD"),
9889 &signature,
9890 &signature,
9891 msg,
9892 &tree,
9893 &[parent_commit],
9894 )
9895 .expect("Failed to commit with parent");
9896 } else {
9897 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9898 .expect("Failed to commit");
9899 }
9900}
9901
9902#[cfg(any())]
9903#[track_caller]
9904fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9905 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9906}
9907
9908#[track_caller]
9909fn git_stash(repo: &mut git2::Repository) {
9910 use git2::Signature;
9911
9912 let signature = Signature::now("test", "test@zed.dev").unwrap();
9913 repo.stash_save(&signature, "N/A", None)
9914 .expect("Failed to stash");
9915}
9916
9917#[track_caller]
9918fn git_reset(offset: usize, repo: &git2::Repository) {
9919 let head = repo.head().expect("Couldn't get repo head");
9920 let object = head.peel(git2::ObjectType::Commit).unwrap();
9921 let commit = object.as_commit().unwrap();
9922 let new_head = commit
9923 .parents()
9924 .inspect(|parnet| {
9925 parnet.message();
9926 })
9927 .nth(offset)
9928 .expect("Not enough history");
9929 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9930 .expect("Could not reset");
9931}
9932
9933#[cfg(any())]
9934#[track_caller]
9935fn git_branch(name: &str, repo: &git2::Repository) {
9936 let head = repo
9937 .head()
9938 .expect("Couldn't get repo head")
9939 .peel_to_commit()
9940 .expect("HEAD is not a commit");
9941 repo.branch(name, &head, false).expect("Failed to commit");
9942}
9943
9944#[cfg(any())]
9945#[track_caller]
9946fn git_checkout(name: &str, repo: &git2::Repository) {
9947 repo.set_head(name).expect("Failed to set head");
9948 repo.checkout_head(None).expect("Failed to check out head");
9949}
9950
9951#[cfg(any())]
9952#[track_caller]
9953fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9954 repo.statuses(None)
9955 .unwrap()
9956 .iter()
9957 .map(|status| (status.path().unwrap().to_string(), status.status()))
9958 .collect()
9959}
9960
9961#[gpui::test]
9962async fn test_find_project_path_abs(
9963 background_executor: BackgroundExecutor,
9964 cx: &mut gpui::TestAppContext,
9965) {
9966 // find_project_path should work with absolute paths
9967 init_test(cx);
9968
9969 let fs = FakeFs::new(background_executor);
9970 fs.insert_tree(
9971 path!("/root"),
9972 json!({
9973 "project1": {
9974 "file1.txt": "content1",
9975 "subdir": {
9976 "file2.txt": "content2"
9977 }
9978 },
9979 "project2": {
9980 "file3.txt": "content3"
9981 }
9982 }),
9983 )
9984 .await;
9985
9986 let project = Project::test(
9987 fs.clone(),
9988 [
9989 path!("/root/project1").as_ref(),
9990 path!("/root/project2").as_ref(),
9991 ],
9992 cx,
9993 )
9994 .await;
9995
9996 // Make sure the worktrees are fully initialized
9997 project
9998 .update(cx, |project, cx| project.git_scans_complete(cx))
9999 .await;
10000 cx.run_until_parked();
10001
10002 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10003 project.read_with(cx, |project, cx| {
10004 let worktrees: Vec<_> = project.worktrees(cx).collect();
10005 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10006 let id1 = worktrees[0].read(cx).id();
10007 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10008 let id2 = worktrees[1].read(cx).id();
10009 (abs_path1, id1, abs_path2, id2)
10010 });
10011
10012 project.update(cx, |project, cx| {
10013 let abs_path = project1_abs_path.join("file1.txt");
10014 let found_path = project.find_project_path(abs_path, cx).unwrap();
10015 assert_eq!(found_path.worktree_id, project1_id);
10016 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10017
10018 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10019 let found_path = project.find_project_path(abs_path, cx).unwrap();
10020 assert_eq!(found_path.worktree_id, project1_id);
10021 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10022
10023 let abs_path = project2_abs_path.join("file3.txt");
10024 let found_path = project.find_project_path(abs_path, cx).unwrap();
10025 assert_eq!(found_path.worktree_id, project2_id);
10026 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10027
10028 let abs_path = project1_abs_path.join("nonexistent.txt");
10029 let found_path = project.find_project_path(abs_path, cx);
10030 assert!(
10031 found_path.is_some(),
10032 "Should find project path for nonexistent file in worktree"
10033 );
10034
10035 // Test with an absolute path outside any worktree
10036 let abs_path = Path::new("/some/other/path");
10037 let found_path = project.find_project_path(abs_path, cx);
10038 assert!(
10039 found_path.is_none(),
10040 "Should not find project path for path outside any worktree"
10041 );
10042 });
10043}