1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .background_executor()
214 .block(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250 cx.update(|cx| {
251 GitHostingProviderRegistry::default_global(cx);
252 git_hosting_providers::init(cx);
253 });
254
255 let fs = FakeFs::new(cx.executor());
256 let str_path = path!("/dir");
257 let path = Path::new(str_path);
258
259 fs.insert_tree(
260 path!("/dir"),
261 json!({
262 ".zed": {
263 "settings.json": r#"{
264 "git_hosting_providers": [
265 {
266 "provider": "gitlab",
267 "base_url": "https://google.com",
268 "name": "foo"
269 }
270 ]
271 }"#
272 },
273 }),
274 )
275 .await;
276
277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
278 let (_worktree, _) =
279 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
280 cx.executor().run_until_parked();
281
282 cx.update(|cx| {
283 let provider = GitHostingProviderRegistry::global(cx);
284 assert!(
285 provider
286 .list_hosting_providers()
287 .into_iter()
288 .any(|provider| provider.name() == "foo")
289 );
290 });
291
292 fs.atomic_write(
293 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
294 "{}".into(),
295 )
296 .await
297 .unwrap();
298
299 cx.run_until_parked();
300
301 cx.update(|cx| {
302 let provider = GitHostingProviderRegistry::global(cx);
303 assert!(
304 !provider
305 .list_hosting_providers()
306 .into_iter()
307 .any(|provider| provider.name() == "foo")
308 );
309 });
310}
311
312#[gpui::test]
313async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315 TaskStore::init(None);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 path!("/dir"),
320 json!({
321 ".zed": {
322 "settings.json": r#"{ "tab_size": 8 }"#,
323 "tasks.json": r#"[{
324 "label": "cargo check all",
325 "command": "cargo",
326 "args": ["check", "--all"]
327 },]"#,
328 },
329 "a": {
330 "a.rs": "fn a() {\n A\n}"
331 },
332 "b": {
333 ".zed": {
334 "settings.json": r#"{ "tab_size": 2 }"#,
335 "tasks.json": r#"[{
336 "label": "cargo check",
337 "command": "cargo",
338 "args": ["check"]
339 },]"#,
340 },
341 "b.rs": "fn b() {\n B\n}"
342 }
343 }),
344 )
345 .await;
346
347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
348 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
349
350 cx.executor().run_until_parked();
351 let worktree_id = cx.update(|cx| {
352 project.update(cx, |project, cx| {
353 project.worktrees(cx).next().unwrap().read(cx).id()
354 })
355 });
356
357 let mut task_contexts = TaskContexts::default();
358 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
359 let task_contexts = Arc::new(task_contexts);
360
361 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
362 id: worktree_id,
363 directory_in_worktree: rel_path(".zed").into(),
364 id_base: "local worktree tasks from directory \".zed\"".into(),
365 };
366
367 let all_tasks = cx
368 .update(|cx| {
369 let tree = worktree.read(cx);
370
371 let file_a = File::for_entry(
372 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
373 worktree.clone(),
374 ) as _;
375 let settings_a = language_settings(None, Some(&file_a), cx);
376 let file_b = File::for_entry(
377 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
378 worktree.clone(),
379 ) as _;
380 let settings_b = language_settings(None, Some(&file_b), cx);
381
382 assert_eq!(settings_a.tab_size.get(), 8);
383 assert_eq!(settings_b.tab_size.get(), 2);
384
385 get_all_tasks(&project, task_contexts.clone(), cx)
386 })
387 .await
388 .into_iter()
389 .map(|(source_kind, task)| {
390 let resolved = task.resolved;
391 (
392 source_kind,
393 task.resolved_label,
394 resolved.args,
395 resolved.env,
396 )
397 })
398 .collect::<Vec<_>>();
399 assert_eq!(
400 all_tasks,
401 vec![
402 (
403 TaskSourceKind::Worktree {
404 id: worktree_id,
405 directory_in_worktree: rel_path("b/.zed").into(),
406 id_base: "local worktree tasks from directory \"b/.zed\"".into()
407 },
408 "cargo check".to_string(),
409 vec!["check".to_string()],
410 HashMap::default(),
411 ),
412 (
413 topmost_local_task_source_kind.clone(),
414 "cargo check all".to_string(),
415 vec!["check".to_string(), "--all".to_string()],
416 HashMap::default(),
417 ),
418 ]
419 );
420
421 let (_, resolved_task) = cx
422 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
423 .await
424 .into_iter()
425 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
426 .expect("should have one global task");
427 project.update(cx, |project, cx| {
428 let task_inventory = project
429 .task_store
430 .read(cx)
431 .task_inventory()
432 .cloned()
433 .unwrap();
434 task_inventory.update(cx, |inventory, _| {
435 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
436 inventory
437 .update_file_based_tasks(
438 TaskSettingsLocation::Global(tasks_file()),
439 Some(
440 &json!([{
441 "label": "cargo check unstable",
442 "command": "cargo",
443 "args": [
444 "check",
445 "--all",
446 "--all-targets"
447 ],
448 "env": {
449 "RUSTFLAGS": "-Zunstable-options"
450 }
451 }])
452 .to_string(),
453 ),
454 )
455 .unwrap();
456 });
457 });
458 cx.run_until_parked();
459
460 let all_tasks = cx
461 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
462 .await
463 .into_iter()
464 .map(|(source_kind, task)| {
465 let resolved = task.resolved;
466 (
467 source_kind,
468 task.resolved_label,
469 resolved.args,
470 resolved.env,
471 )
472 })
473 .collect::<Vec<_>>();
474 assert_eq!(
475 all_tasks,
476 vec![
477 (
478 topmost_local_task_source_kind.clone(),
479 "cargo check all".to_string(),
480 vec!["check".to_string(), "--all".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: rel_path("b/.zed").into(),
487 id_base: "local worktree tasks from directory \"b/.zed\"".into()
488 },
489 "cargo check".to_string(),
490 vec!["check".to_string()],
491 HashMap::default(),
492 ),
493 (
494 TaskSourceKind::AbsPath {
495 abs_path: paths::tasks_file().clone(),
496 id_base: "global tasks.json".into(),
497 },
498 "cargo check unstable".to_string(),
499 vec![
500 "check".to_string(),
501 "--all".to_string(),
502 "--all-targets".to_string(),
503 ],
504 HashMap::from_iter(Some((
505 "RUSTFLAGS".to_string(),
506 "-Zunstable-options".to_string()
507 ))),
508 ),
509 ]
510 );
511}
512
513#[gpui::test]
514async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
515 init_test(cx);
516 TaskStore::init(None);
517
518 let fs = FakeFs::new(cx.executor());
519 fs.insert_tree(
520 path!("/dir"),
521 json!({
522 ".zed": {
523 "tasks.json": r#"[{
524 "label": "test worktree root",
525 "command": "echo $ZED_WORKTREE_ROOT"
526 }]"#,
527 },
528 "a": {
529 "a.rs": "fn a() {\n A\n}"
530 },
531 }),
532 )
533 .await;
534
535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
536 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
537
538 cx.executor().run_until_parked();
539 let worktree_id = cx.update(|cx| {
540 project.update(cx, |project, cx| {
541 project.worktrees(cx).next().unwrap().read(cx).id()
542 })
543 });
544
545 let active_non_worktree_item_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: None,
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 }),
556 cx,
557 )
558 })
559 .await;
560 assert!(
561 active_non_worktree_item_tasks.is_empty(),
562 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
563 );
564
565 let active_worktree_tasks = cx
566 .update(|cx| {
567 get_all_tasks(
568 &project,
569 Arc::new(TaskContexts {
570 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
571 active_worktree_context: Some((worktree_id, {
572 let mut worktree_context = TaskContext::default();
573 worktree_context
574 .task_variables
575 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
576 worktree_context
577 })),
578 other_worktree_contexts: Vec::new(),
579 lsp_task_sources: HashMap::default(),
580 latest_selection: None,
581 }),
582 cx,
583 )
584 })
585 .await;
586 assert_eq!(
587 active_worktree_tasks
588 .into_iter()
589 .map(|(source_kind, task)| {
590 let resolved = task.resolved;
591 (source_kind, resolved.command.unwrap())
592 })
593 .collect::<Vec<_>>(),
594 vec![(
595 TaskSourceKind::Worktree {
596 id: worktree_id,
597 directory_in_worktree: rel_path(".zed").into(),
598 id_base: "local worktree tasks from directory \".zed\"".into(),
599 },
600 "echo /dir".to_string(),
601 )]
602 );
603}
604
605#[gpui::test]
606async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
607 cx: &mut gpui::TestAppContext,
608) {
609 pub(crate) struct PyprojectTomlManifestProvider;
610
611 impl ManifestProvider for PyprojectTomlManifestProvider {
612 fn name(&self) -> ManifestName {
613 SharedString::new_static("pyproject.toml").into()
614 }
615
616 fn search(
617 &self,
618 ManifestQuery {
619 path,
620 depth,
621 delegate,
622 }: ManifestQuery,
623 ) -> Option<Arc<RelPath>> {
624 for path in path.ancestors().take(depth) {
625 let p = path.join(rel_path("pyproject.toml"));
626 if delegate.exists(&p, Some(false)) {
627 return Some(path.into());
628 }
629 }
630
631 None
632 }
633 }
634
635 init_test(cx);
636 let fs = FakeFs::new(cx.executor());
637
638 fs.insert_tree(
639 path!("/the-root"),
640 json!({
641 ".zed": {
642 "settings.json": r#"
643 {
644 "languages": {
645 "Python": {
646 "language_servers": ["ty"]
647 }
648 }
649 }"#
650 },
651 "project-a": {
652 ".venv": {},
653 "file.py": "",
654 "pyproject.toml": ""
655 },
656 "project-b": {
657 ".venv": {},
658 "source_file.py":"",
659 "another_file.py": "",
660 "pyproject.toml": ""
661 }
662 }),
663 )
664 .await;
665 cx.update(|cx| {
666 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
667 });
668
669 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
671 let _fake_python_server = language_registry.register_fake_lsp(
672 "Python",
673 FakeLspAdapter {
674 name: "ty",
675 capabilities: lsp::ServerCapabilities {
676 ..Default::default()
677 },
678 ..Default::default()
679 },
680 );
681
682 language_registry.add(python_lang(fs.clone()));
683 let (first_buffer, _handle) = project
684 .update(cx, |project, cx| {
685 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
686 })
687 .await
688 .unwrap();
689 cx.executor().run_until_parked();
690 let servers = project.update(cx, |project, cx| {
691 project.lsp_store.update(cx, |this, cx| {
692 first_buffer.update(cx, |buffer, cx| {
693 this.language_servers_for_local_buffer(buffer, cx)
694 .map(|(adapter, server)| (adapter.clone(), server.clone()))
695 .collect::<Vec<_>>()
696 })
697 })
698 });
699 cx.executor().run_until_parked();
700 assert_eq!(servers.len(), 1);
701 let (adapter, server) = servers.into_iter().next().unwrap();
702 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
703 assert_eq!(server.server_id(), LanguageServerId(0));
704 // `workspace_folders` are set to the rooting point.
705 assert_eq!(
706 server.workspace_folders(),
707 BTreeSet::from_iter(
708 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
709 )
710 );
711
712 let (second_project_buffer, _other_handle) = project
713 .update(cx, |project, cx| {
714 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
715 })
716 .await
717 .unwrap();
718 cx.executor().run_until_parked();
719 let servers = project.update(cx, |project, cx| {
720 project.lsp_store.update(cx, |this, cx| {
721 second_project_buffer.update(cx, |buffer, cx| {
722 this.language_servers_for_local_buffer(buffer, cx)
723 .map(|(adapter, server)| (adapter.clone(), server.clone()))
724 .collect::<Vec<_>>()
725 })
726 })
727 });
728 cx.executor().run_until_parked();
729 assert_eq!(servers.len(), 1);
730 let (adapter, server) = servers.into_iter().next().unwrap();
731 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
732 // We're not using venvs at all here, so both folders should fall under the same root.
733 assert_eq!(server.server_id(), LanguageServerId(0));
734 // Now, let's select a different toolchain for one of subprojects.
735
736 let Toolchains {
737 toolchains: available_toolchains_for_b,
738 root_path,
739 ..
740 } = project
741 .update(cx, |this, cx| {
742 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
743 this.available_toolchains(
744 ProjectPath {
745 worktree_id,
746 path: rel_path("project-b/source_file.py").into(),
747 },
748 LanguageName::new("Python"),
749 cx,
750 )
751 })
752 .await
753 .expect("A toolchain to be discovered");
754 assert_eq!(root_path.as_ref(), rel_path("project-b"));
755 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
756 let currently_active_toolchain = project
757 .update(cx, |this, cx| {
758 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
759 this.active_toolchain(
760 ProjectPath {
761 worktree_id,
762 path: rel_path("project-b/source_file.py").into(),
763 },
764 LanguageName::new("Python"),
765 cx,
766 )
767 })
768 .await;
769
770 assert!(currently_active_toolchain.is_none());
771 let _ = project
772 .update(cx, |this, cx| {
773 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
774 this.activate_toolchain(
775 ProjectPath {
776 worktree_id,
777 path: root_path,
778 },
779 available_toolchains_for_b
780 .toolchains
781 .into_iter()
782 .next()
783 .unwrap(),
784 cx,
785 )
786 })
787 .await
788 .unwrap();
789 cx.run_until_parked();
790 let servers = project.update(cx, |project, cx| {
791 project.lsp_store.update(cx, |this, cx| {
792 second_project_buffer.update(cx, |buffer, cx| {
793 this.language_servers_for_local_buffer(buffer, cx)
794 .map(|(adapter, server)| (adapter.clone(), server.clone()))
795 .collect::<Vec<_>>()
796 })
797 })
798 });
799 cx.executor().run_until_parked();
800 assert_eq!(servers.len(), 1);
801 let (adapter, server) = servers.into_iter().next().unwrap();
802 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
803 // There's a new language server in town.
804 assert_eq!(server.server_id(), LanguageServerId(1));
805}
806
807#[gpui::test]
808async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
809 init_test(cx);
810
811 let fs = FakeFs::new(cx.executor());
812 fs.insert_tree(
813 path!("/dir"),
814 json!({
815 "test.rs": "const A: i32 = 1;",
816 "test2.rs": "",
817 "Cargo.toml": "a = 1",
818 "package.json": "{\"a\": 1}",
819 }),
820 )
821 .await;
822
823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825
826 let mut fake_rust_servers = language_registry.register_fake_lsp(
827 "Rust",
828 FakeLspAdapter {
829 name: "the-rust-language-server",
830 capabilities: lsp::ServerCapabilities {
831 completion_provider: Some(lsp::CompletionOptions {
832 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
833 ..Default::default()
834 }),
835 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
836 lsp::TextDocumentSyncOptions {
837 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
838 ..Default::default()
839 },
840 )),
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 );
846 let mut fake_json_servers = language_registry.register_fake_lsp(
847 "JSON",
848 FakeLspAdapter {
849 name: "the-json-language-server",
850 capabilities: lsp::ServerCapabilities {
851 completion_provider: Some(lsp::CompletionOptions {
852 trigger_characters: Some(vec![":".to_string()]),
853 ..Default::default()
854 }),
855 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
856 lsp::TextDocumentSyncOptions {
857 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
858 ..Default::default()
859 },
860 )),
861 ..Default::default()
862 },
863 ..Default::default()
864 },
865 );
866
867 // Open a buffer without an associated language server.
868 let (toml_buffer, _handle) = project
869 .update(cx, |project, cx| {
870 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
871 })
872 .await
873 .unwrap();
874
875 // Open a buffer with an associated language server before the language for it has been loaded.
876 let (rust_buffer, _handle2) = project
877 .update(cx, |project, cx| {
878 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
879 })
880 .await
881 .unwrap();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), None);
884 });
885
886 // Now we add the languages to the project, and ensure they get assigned to all
887 // the relevant open buffers.
888 language_registry.add(json_lang());
889 language_registry.add(rust_lang());
890 cx.executor().run_until_parked();
891 rust_buffer.update(cx, |buffer, _| {
892 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
893 });
894
895 // A server is started up, and it is notified about Rust files.
896 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
897 assert_eq!(
898 fake_rust_server
899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
900 .await
901 .text_document,
902 lsp::TextDocumentItem {
903 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
904 version: 0,
905 text: "const A: i32 = 1;".to_string(),
906 language_id: "rust".to_string(),
907 }
908 );
909
910 // The buffer is configured based on the language server's capabilities.
911 rust_buffer.update(cx, |buffer, _| {
912 assert_eq!(
913 buffer
914 .completion_triggers()
915 .iter()
916 .cloned()
917 .collect::<Vec<_>>(),
918 &[".".to_string(), "::".to_string()]
919 );
920 });
921 toml_buffer.update(cx, |buffer, _| {
922 assert!(buffer.completion_triggers().is_empty());
923 });
924
925 // Edit a buffer. The changes are reported to the language server.
926 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
927 assert_eq!(
928 fake_rust_server
929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
930 .await
931 .text_document,
932 lsp::VersionedTextDocumentIdentifier::new(
933 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
934 1
935 )
936 );
937
938 // Open a third buffer with a different associated language server.
939 let (json_buffer, _json_handle) = project
940 .update(cx, |project, cx| {
941 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
942 })
943 .await
944 .unwrap();
945
946 // A json language server is started up and is only notified about the json buffer.
947 let mut fake_json_server = fake_json_servers.next().await.unwrap();
948 assert_eq!(
949 fake_json_server
950 .receive_notification::<lsp::notification::DidOpenTextDocument>()
951 .await
952 .text_document,
953 lsp::TextDocumentItem {
954 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
955 version: 0,
956 text: "{\"a\": 1}".to_string(),
957 language_id: "json".to_string(),
958 }
959 );
960
961 // This buffer is configured based on the second language server's
962 // capabilities.
963 json_buffer.update(cx, |buffer, _| {
964 assert_eq!(
965 buffer
966 .completion_triggers()
967 .iter()
968 .cloned()
969 .collect::<Vec<_>>(),
970 &[":".to_string()]
971 );
972 });
973
974 // When opening another buffer whose language server is already running,
975 // it is also configured based on the existing language server's capabilities.
976 let (rust_buffer2, _handle4) = project
977 .update(cx, |project, cx| {
978 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
979 })
980 .await
981 .unwrap();
982 rust_buffer2.update(cx, |buffer, _| {
983 assert_eq!(
984 buffer
985 .completion_triggers()
986 .iter()
987 .cloned()
988 .collect::<Vec<_>>(),
989 &[".".to_string(), "::".to_string()]
990 );
991 });
992
993 // Changes are reported only to servers matching the buffer's language.
994 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
995 rust_buffer2.update(cx, |buffer, cx| {
996 buffer.edit([(0..0, "let x = 1;")], None, cx)
997 });
998 assert_eq!(
999 fake_rust_server
1000 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1001 .await
1002 .text_document,
1003 lsp::VersionedTextDocumentIdentifier::new(
1004 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1005 1
1006 )
1007 );
1008
1009 // Save notifications are reported to all servers.
1010 project
1011 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1012 .await
1013 .unwrap();
1014 assert_eq!(
1015 fake_rust_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023 assert_eq!(
1024 fake_json_server
1025 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1026 .await
1027 .text_document,
1028 lsp::TextDocumentIdentifier::new(
1029 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1030 )
1031 );
1032
1033 // Renames are reported only to servers matching the buffer's language.
1034 fs.rename(
1035 Path::new(path!("/dir/test2.rs")),
1036 Path::new(path!("/dir/test3.rs")),
1037 Default::default(),
1038 )
1039 .await
1040 .unwrap();
1041 assert_eq!(
1042 fake_rust_server
1043 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1044 .await
1045 .text_document,
1046 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1047 );
1048 assert_eq!(
1049 fake_rust_server
1050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1051 .await
1052 .text_document,
1053 lsp::TextDocumentItem {
1054 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1055 version: 0,
1056 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1057 language_id: "rust".to_string(),
1058 },
1059 );
1060
1061 rust_buffer2.update(cx, |buffer, cx| {
1062 buffer.update_diagnostics(
1063 LanguageServerId(0),
1064 DiagnosticSet::from_sorted_entries(
1065 vec![DiagnosticEntry {
1066 diagnostic: Default::default(),
1067 range: Anchor::MIN..Anchor::MAX,
1068 }],
1069 &buffer.snapshot(),
1070 ),
1071 cx,
1072 );
1073 assert_eq!(
1074 buffer
1075 .snapshot()
1076 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1077 .count(),
1078 1
1079 );
1080 });
1081
1082 // When the rename changes the extension of the file, the buffer gets closed on the old
1083 // language server and gets opened on the new one.
1084 fs.rename(
1085 Path::new(path!("/dir/test3.rs")),
1086 Path::new(path!("/dir/test3.json")),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091 assert_eq!(
1092 fake_rust_server
1093 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1094 .await
1095 .text_document,
1096 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1097 );
1098 assert_eq!(
1099 fake_json_server
1100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1101 .await
1102 .text_document,
1103 lsp::TextDocumentItem {
1104 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1105 version: 0,
1106 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1107 language_id: "json".to_string(),
1108 },
1109 );
1110
1111 // We clear the diagnostics, since the language has changed.
1112 rust_buffer2.update(cx, |buffer, _| {
1113 assert_eq!(
1114 buffer
1115 .snapshot()
1116 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1117 .count(),
1118 0
1119 );
1120 });
1121
1122 // The renamed file's version resets after changing language server.
1123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1124 assert_eq!(
1125 fake_json_server
1126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1127 .await
1128 .text_document,
1129 lsp::VersionedTextDocumentIdentifier::new(
1130 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1131 1
1132 )
1133 );
1134
1135 // Restart language servers
1136 project.update(cx, |project, cx| {
1137 project.restart_language_servers_for_buffers(
1138 vec![rust_buffer.clone(), json_buffer.clone()],
1139 HashSet::default(),
1140 cx,
1141 );
1142 });
1143
1144 let mut rust_shutdown_requests = fake_rust_server
1145 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1146 let mut json_shutdown_requests = fake_json_server
1147 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1148 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1149
1150 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1151 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1152
1153 // Ensure rust document is reopened in new rust language server
1154 assert_eq!(
1155 fake_rust_server
1156 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1157 .await
1158 .text_document,
1159 lsp::TextDocumentItem {
1160 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1161 version: 0,
1162 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1163 language_id: "rust".to_string(),
1164 }
1165 );
1166
1167 // Ensure json documents are reopened in new json language server
1168 assert_set_eq!(
1169 [
1170 fake_json_server
1171 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1172 .await
1173 .text_document,
1174 fake_json_server
1175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1176 .await
1177 .text_document,
1178 ],
1179 [
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1182 version: 0,
1183 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 },
1186 lsp::TextDocumentItem {
1187 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1188 version: 0,
1189 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1190 language_id: "json".to_string(),
1191 }
1192 ]
1193 );
1194
1195 // Close notifications are reported only to servers matching the buffer's language.
1196 cx.update(|_| drop(_json_handle));
1197 let close_message = lsp::DidCloseTextDocumentParams {
1198 text_document: lsp::TextDocumentIdentifier::new(
1199 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1200 ),
1201 };
1202 assert_eq!(
1203 fake_json_server
1204 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1205 .await,
1206 close_message,
1207 );
1208}
1209
1210#[gpui::test]
1211async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let fs = FakeFs::new(cx.executor());
1215 fs.insert_tree(
1216 path!("/the-root"),
1217 json!({
1218 ".gitignore": "target\n",
1219 "Cargo.lock": "",
1220 "src": {
1221 "a.rs": "",
1222 "b.rs": "",
1223 },
1224 "target": {
1225 "x": {
1226 "out": {
1227 "x.rs": ""
1228 }
1229 },
1230 "y": {
1231 "out": {
1232 "y.rs": "",
1233 }
1234 },
1235 "z": {
1236 "out": {
1237 "z.rs": ""
1238 }
1239 }
1240 }
1241 }),
1242 )
1243 .await;
1244 fs.insert_tree(
1245 path!("/the-registry"),
1246 json!({
1247 "dep1": {
1248 "src": {
1249 "dep1.rs": "",
1250 }
1251 },
1252 "dep2": {
1253 "src": {
1254 "dep2.rs": "",
1255 }
1256 },
1257 }),
1258 )
1259 .await;
1260 fs.insert_tree(
1261 path!("/the/stdlib"),
1262 json!({
1263 "LICENSE": "",
1264 "src": {
1265 "string.rs": "",
1266 }
1267 }),
1268 )
1269 .await;
1270
1271 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1272 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1273 (project.languages().clone(), project.lsp_store())
1274 });
1275 language_registry.add(rust_lang());
1276 let mut fake_servers = language_registry.register_fake_lsp(
1277 "Rust",
1278 FakeLspAdapter {
1279 name: "the-language-server",
1280 ..Default::default()
1281 },
1282 );
1283
1284 cx.executor().run_until_parked();
1285
1286 // Start the language server by opening a buffer with a compatible file extension.
1287 project
1288 .update(cx, |project, cx| {
1289 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1290 })
1291 .await
1292 .unwrap();
1293
1294 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1295 project.update(cx, |project, cx| {
1296 let worktree = project.worktrees(cx).next().unwrap();
1297 assert_eq!(
1298 worktree
1299 .read(cx)
1300 .snapshot()
1301 .entries(true, 0)
1302 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1303 .collect::<Vec<_>>(),
1304 &[
1305 ("", false),
1306 (".gitignore", false),
1307 ("Cargo.lock", false),
1308 ("src", false),
1309 ("src/a.rs", false),
1310 ("src/b.rs", false),
1311 ("target", true),
1312 ]
1313 );
1314 });
1315
1316 let prev_read_dir_count = fs.read_dir_call_count();
1317
1318 let fake_server = fake_servers.next().await.unwrap();
1319 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1320 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1321 id
1322 });
1323
1324 // Simulate jumping to a definition in a dependency outside of the worktree.
1325 let _out_of_worktree_buffer = project
1326 .update(cx, |project, cx| {
1327 project.open_local_buffer_via_lsp(
1328 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1329 server_id,
1330 cx,
1331 )
1332 })
1333 .await
1334 .unwrap();
1335
1336 // Keep track of the FS events reported to the language server.
1337 let file_changes = Arc::new(Mutex::new(Vec::new()));
1338 fake_server
1339 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1340 registrations: vec![lsp::Registration {
1341 id: Default::default(),
1342 method: "workspace/didChangeWatchedFiles".to_string(),
1343 register_options: serde_json::to_value(
1344 lsp::DidChangeWatchedFilesRegistrationOptions {
1345 watchers: vec![
1346 lsp::FileSystemWatcher {
1347 glob_pattern: lsp::GlobPattern::String(
1348 path!("/the-root/Cargo.toml").to_string(),
1349 ),
1350 kind: None,
1351 },
1352 lsp::FileSystemWatcher {
1353 glob_pattern: lsp::GlobPattern::String(
1354 path!("/the-root/src/*.{rs,c}").to_string(),
1355 ),
1356 kind: None,
1357 },
1358 lsp::FileSystemWatcher {
1359 glob_pattern: lsp::GlobPattern::String(
1360 path!("/the-root/target/y/**/*.rs").to_string(),
1361 ),
1362 kind: None,
1363 },
1364 lsp::FileSystemWatcher {
1365 glob_pattern: lsp::GlobPattern::String(
1366 path!("/the/stdlib/src/**/*.rs").to_string(),
1367 ),
1368 kind: None,
1369 },
1370 lsp::FileSystemWatcher {
1371 glob_pattern: lsp::GlobPattern::String(
1372 path!("**/Cargo.lock").to_string(),
1373 ),
1374 kind: None,
1375 },
1376 ],
1377 },
1378 )
1379 .ok(),
1380 }],
1381 })
1382 .await
1383 .into_response()
1384 .unwrap();
1385 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1386 let file_changes = file_changes.clone();
1387 move |params, _| {
1388 let mut file_changes = file_changes.lock();
1389 file_changes.extend(params.changes);
1390 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1391 }
1392 });
1393
1394 cx.executor().run_until_parked();
1395 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1396 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1397
1398 let mut new_watched_paths = fs.watched_paths();
1399 new_watched_paths.retain(|path| {
1400 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1401 });
1402 assert_eq!(
1403 &new_watched_paths,
1404 &[
1405 Path::new(path!("/the-root")),
1406 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1407 Path::new(path!("/the/stdlib/src"))
1408 ]
1409 );
1410
1411 // Now the language server has asked us to watch an ignored directory path,
1412 // so we recursively load it.
1413 project.update(cx, |project, cx| {
1414 let worktree = project.visible_worktrees(cx).next().unwrap();
1415 assert_eq!(
1416 worktree
1417 .read(cx)
1418 .snapshot()
1419 .entries(true, 0)
1420 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1421 .collect::<Vec<_>>(),
1422 &[
1423 ("", false),
1424 (".gitignore", false),
1425 ("Cargo.lock", false),
1426 ("src", false),
1427 ("src/a.rs", false),
1428 ("src/b.rs", false),
1429 ("target", true),
1430 ("target/x", true),
1431 ("target/y", true),
1432 ("target/y/out", true),
1433 ("target/y/out/y.rs", true),
1434 ("target/z", true),
1435 ]
1436 );
1437 });
1438
1439 // Perform some file system mutations, two of which match the watched patterns,
1440 // and one of which does not.
1441 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1442 .await
1443 .unwrap();
1444 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1445 .await
1446 .unwrap();
1447 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1448 .await
1449 .unwrap();
1450 fs.create_file(
1451 path!("/the-root/target/x/out/x2.rs").as_ref(),
1452 Default::default(),
1453 )
1454 .await
1455 .unwrap();
1456 fs.create_file(
1457 path!("/the-root/target/y/out/y2.rs").as_ref(),
1458 Default::default(),
1459 )
1460 .await
1461 .unwrap();
1462 fs.save(
1463 path!("/the-root/Cargo.lock").as_ref(),
1464 &"".into(),
1465 Default::default(),
1466 )
1467 .await
1468 .unwrap();
1469 fs.save(
1470 path!("/the-stdlib/LICENSE").as_ref(),
1471 &"".into(),
1472 Default::default(),
1473 )
1474 .await
1475 .unwrap();
1476 fs.save(
1477 path!("/the/stdlib/src/string.rs").as_ref(),
1478 &"".into(),
1479 Default::default(),
1480 )
1481 .await
1482 .unwrap();
1483
1484 // The language server receives events for the FS mutations that match its watch patterns.
1485 cx.executor().run_until_parked();
1486 assert_eq!(
1487 &*file_changes.lock(),
1488 &[
1489 lsp::FileEvent {
1490 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1491 typ: lsp::FileChangeType::CHANGED,
1492 },
1493 lsp::FileEvent {
1494 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1495 typ: lsp::FileChangeType::DELETED,
1496 },
1497 lsp::FileEvent {
1498 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1499 typ: lsp::FileChangeType::CREATED,
1500 },
1501 lsp::FileEvent {
1502 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1503 typ: lsp::FileChangeType::CREATED,
1504 },
1505 lsp::FileEvent {
1506 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1507 typ: lsp::FileChangeType::CHANGED,
1508 },
1509 ]
1510 );
1511}
1512
1513#[gpui::test]
1514async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1515 init_test(cx);
1516
1517 let fs = FakeFs::new(cx.executor());
1518 fs.insert_tree(
1519 path!("/dir"),
1520 json!({
1521 "a.rs": "let a = 1;",
1522 "b.rs": "let b = 2;"
1523 }),
1524 )
1525 .await;
1526
1527 let project = Project::test(
1528 fs,
1529 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1530 cx,
1531 )
1532 .await;
1533 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1534
1535 let buffer_a = project
1536 .update(cx, |project, cx| {
1537 project.open_local_buffer(path!("/dir/a.rs"), cx)
1538 })
1539 .await
1540 .unwrap();
1541 let buffer_b = project
1542 .update(cx, |project, cx| {
1543 project.open_local_buffer(path!("/dir/b.rs"), cx)
1544 })
1545 .await
1546 .unwrap();
1547
1548 lsp_store.update(cx, |lsp_store, cx| {
1549 lsp_store
1550 .update_diagnostics(
1551 LanguageServerId(0),
1552 lsp::PublishDiagnosticsParams {
1553 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1554 version: None,
1555 diagnostics: vec![lsp::Diagnostic {
1556 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1557 severity: Some(lsp::DiagnosticSeverity::ERROR),
1558 message: "error 1".to_string(),
1559 ..Default::default()
1560 }],
1561 },
1562 None,
1563 DiagnosticSourceKind::Pushed,
1564 &[],
1565 cx,
1566 )
1567 .unwrap();
1568 lsp_store
1569 .update_diagnostics(
1570 LanguageServerId(0),
1571 lsp::PublishDiagnosticsParams {
1572 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1573 version: None,
1574 diagnostics: vec![lsp::Diagnostic {
1575 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1576 severity: Some(DiagnosticSeverity::WARNING),
1577 message: "error 2".to_string(),
1578 ..Default::default()
1579 }],
1580 },
1581 None,
1582 DiagnosticSourceKind::Pushed,
1583 &[],
1584 cx,
1585 )
1586 .unwrap();
1587 });
1588
1589 buffer_a.update(cx, |buffer, _| {
1590 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1591 assert_eq!(
1592 chunks
1593 .iter()
1594 .map(|(s, d)| (s.as_str(), *d))
1595 .collect::<Vec<_>>(),
1596 &[
1597 ("let ", None),
1598 ("a", Some(DiagnosticSeverity::ERROR)),
1599 (" = 1;", None),
1600 ]
1601 );
1602 });
1603 buffer_b.update(cx, |buffer, _| {
1604 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1605 assert_eq!(
1606 chunks
1607 .iter()
1608 .map(|(s, d)| (s.as_str(), *d))
1609 .collect::<Vec<_>>(),
1610 &[
1611 ("let ", None),
1612 ("b", Some(DiagnosticSeverity::WARNING)),
1613 (" = 2;", None),
1614 ]
1615 );
1616 });
1617}
1618
1619#[gpui::test]
1620async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1621 init_test(cx);
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree(
1625 path!("/root"),
1626 json!({
1627 "dir": {
1628 ".git": {
1629 "HEAD": "ref: refs/heads/main",
1630 },
1631 ".gitignore": "b.rs",
1632 "a.rs": "let a = 1;",
1633 "b.rs": "let b = 2;",
1634 },
1635 "other.rs": "let b = c;"
1636 }),
1637 )
1638 .await;
1639
1640 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1641 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1642 let (worktree, _) = project
1643 .update(cx, |project, cx| {
1644 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1645 })
1646 .await
1647 .unwrap();
1648 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1649
1650 let (worktree, _) = project
1651 .update(cx, |project, cx| {
1652 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1653 })
1654 .await
1655 .unwrap();
1656 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1657
1658 let server_id = LanguageServerId(0);
1659 lsp_store.update(cx, |lsp_store, cx| {
1660 lsp_store
1661 .update_diagnostics(
1662 server_id,
1663 lsp::PublishDiagnosticsParams {
1664 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1665 version: None,
1666 diagnostics: vec![lsp::Diagnostic {
1667 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1668 severity: Some(lsp::DiagnosticSeverity::ERROR),
1669 message: "unused variable 'b'".to_string(),
1670 ..Default::default()
1671 }],
1672 },
1673 None,
1674 DiagnosticSourceKind::Pushed,
1675 &[],
1676 cx,
1677 )
1678 .unwrap();
1679 lsp_store
1680 .update_diagnostics(
1681 server_id,
1682 lsp::PublishDiagnosticsParams {
1683 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1684 version: None,
1685 diagnostics: vec![lsp::Diagnostic {
1686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1687 severity: Some(lsp::DiagnosticSeverity::ERROR),
1688 message: "unknown variable 'c'".to_string(),
1689 ..Default::default()
1690 }],
1691 },
1692 None,
1693 DiagnosticSourceKind::Pushed,
1694 &[],
1695 cx,
1696 )
1697 .unwrap();
1698 });
1699
1700 let main_ignored_buffer = project
1701 .update(cx, |project, cx| {
1702 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1703 })
1704 .await
1705 .unwrap();
1706 main_ignored_buffer.update(cx, |buffer, _| {
1707 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1708 assert_eq!(
1709 chunks
1710 .iter()
1711 .map(|(s, d)| (s.as_str(), *d))
1712 .collect::<Vec<_>>(),
1713 &[
1714 ("let ", None),
1715 ("b", Some(DiagnosticSeverity::ERROR)),
1716 (" = 2;", None),
1717 ],
1718 "Gigitnored buffers should still get in-buffer diagnostics",
1719 );
1720 });
1721 let other_buffer = project
1722 .update(cx, |project, cx| {
1723 project.open_buffer((other_worktree_id, rel_path("")), cx)
1724 })
1725 .await
1726 .unwrap();
1727 other_buffer.update(cx, |buffer, _| {
1728 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1729 assert_eq!(
1730 chunks
1731 .iter()
1732 .map(|(s, d)| (s.as_str(), *d))
1733 .collect::<Vec<_>>(),
1734 &[
1735 ("let b = ", None),
1736 ("c", Some(DiagnosticSeverity::ERROR)),
1737 (";", None),
1738 ],
1739 "Buffers from hidden projects should still get in-buffer diagnostics"
1740 );
1741 });
1742
1743 project.update(cx, |project, cx| {
1744 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1745 assert_eq!(
1746 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1747 vec![(
1748 ProjectPath {
1749 worktree_id: main_worktree_id,
1750 path: rel_path("b.rs").into(),
1751 },
1752 server_id,
1753 DiagnosticSummary {
1754 error_count: 1,
1755 warning_count: 0,
1756 }
1757 )]
1758 );
1759 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1760 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1761 });
1762}
1763
1764#[gpui::test]
1765async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1766 init_test(cx);
1767
1768 let progress_token = "the-progress-token";
1769
1770 let fs = FakeFs::new(cx.executor());
1771 fs.insert_tree(
1772 path!("/dir"),
1773 json!({
1774 "a.rs": "fn a() { A }",
1775 "b.rs": "const y: i32 = 1",
1776 }),
1777 )
1778 .await;
1779
1780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1781 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1782
1783 language_registry.add(rust_lang());
1784 let mut fake_servers = language_registry.register_fake_lsp(
1785 "Rust",
1786 FakeLspAdapter {
1787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1788 disk_based_diagnostics_sources: vec!["disk".into()],
1789 ..Default::default()
1790 },
1791 );
1792
1793 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1794
1795 // Cause worktree to start the fake language server
1796 let _ = project
1797 .update(cx, |project, cx| {
1798 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1799 })
1800 .await
1801 .unwrap();
1802
1803 let mut events = cx.events(&project);
1804
1805 let fake_server = fake_servers.next().await.unwrap();
1806 assert_eq!(
1807 events.next().await.unwrap(),
1808 Event::LanguageServerAdded(
1809 LanguageServerId(0),
1810 fake_server.server.name(),
1811 Some(worktree_id)
1812 ),
1813 );
1814
1815 fake_server
1816 .start_progress(format!("{}/0", progress_token))
1817 .await;
1818 assert_eq!(
1819 events.next().await.unwrap(),
1820 Event::RefreshInlayHints(fake_server.server.server_id())
1821 );
1822 assert_eq!(
1823 events.next().await.unwrap(),
1824 Event::DiskBasedDiagnosticsStarted {
1825 language_server_id: LanguageServerId(0),
1826 }
1827 );
1828
1829 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1830 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1831 version: None,
1832 diagnostics: vec![lsp::Diagnostic {
1833 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1834 severity: Some(lsp::DiagnosticSeverity::ERROR),
1835 message: "undefined variable 'A'".to_string(),
1836 ..Default::default()
1837 }],
1838 });
1839 assert_eq!(
1840 events.next().await.unwrap(),
1841 Event::DiagnosticsUpdated {
1842 language_server_id: LanguageServerId(0),
1843 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1844 }
1845 );
1846
1847 fake_server.end_progress(format!("{}/0", progress_token));
1848 assert_eq!(
1849 events.next().await.unwrap(),
1850 Event::DiskBasedDiagnosticsFinished {
1851 language_server_id: LanguageServerId(0)
1852 }
1853 );
1854
1855 let buffer = project
1856 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1857 .await
1858 .unwrap();
1859
1860 buffer.update(cx, |buffer, _| {
1861 let snapshot = buffer.snapshot();
1862 let diagnostics = snapshot
1863 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1864 .collect::<Vec<_>>();
1865 assert_eq!(
1866 diagnostics,
1867 &[DiagnosticEntryRef {
1868 range: Point::new(0, 9)..Point::new(0, 10),
1869 diagnostic: &Diagnostic {
1870 severity: lsp::DiagnosticSeverity::ERROR,
1871 message: "undefined variable 'A'".to_string(),
1872 group_id: 0,
1873 is_primary: true,
1874 source_kind: DiagnosticSourceKind::Pushed,
1875 ..Diagnostic::default()
1876 }
1877 }]
1878 )
1879 });
1880
1881 // Ensure publishing empty diagnostics twice only results in one update event.
1882 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1883 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1884 version: None,
1885 diagnostics: Default::default(),
1886 });
1887 assert_eq!(
1888 events.next().await.unwrap(),
1889 Event::DiagnosticsUpdated {
1890 language_server_id: LanguageServerId(0),
1891 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1892 }
1893 );
1894
1895 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1896 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1897 version: None,
1898 diagnostics: Default::default(),
1899 });
1900 cx.executor().run_until_parked();
1901 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1902}
1903
1904#[gpui::test]
1905async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1906 init_test(cx);
1907
1908 let progress_token = "the-progress-token";
1909
1910 let fs = FakeFs::new(cx.executor());
1911 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1912
1913 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1914
1915 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1916 language_registry.add(rust_lang());
1917 let mut fake_servers = language_registry.register_fake_lsp(
1918 "Rust",
1919 FakeLspAdapter {
1920 name: "the-language-server",
1921 disk_based_diagnostics_sources: vec!["disk".into()],
1922 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1923 ..FakeLspAdapter::default()
1924 },
1925 );
1926
1927 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1928
1929 let (buffer, _handle) = project
1930 .update(cx, |project, cx| {
1931 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1932 })
1933 .await
1934 .unwrap();
1935 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1936 // Simulate diagnostics starting to update.
1937 let fake_server = fake_servers.next().await.unwrap();
1938 fake_server.start_progress(progress_token).await;
1939
1940 // Restart the server before the diagnostics finish updating.
1941 project.update(cx, |project, cx| {
1942 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1943 });
1944 let mut events = cx.events(&project);
1945
1946 // Simulate the newly started server sending more diagnostics.
1947 let fake_server = fake_servers.next().await.unwrap();
1948 assert_eq!(
1949 events.next().await.unwrap(),
1950 Event::LanguageServerRemoved(LanguageServerId(0))
1951 );
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(1),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 )
1959 );
1960 assert_eq!(
1961 events.next().await.unwrap(),
1962 Event::RefreshInlayHints(fake_server.server.server_id())
1963 );
1964 fake_server.start_progress(progress_token).await;
1965 assert_eq!(
1966 events.next().await.unwrap(),
1967 Event::LanguageServerBufferRegistered {
1968 server_id: LanguageServerId(1),
1969 buffer_id,
1970 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1971 name: Some(fake_server.server.name())
1972 }
1973 );
1974 assert_eq!(
1975 events.next().await.unwrap(),
1976 Event::DiskBasedDiagnosticsStarted {
1977 language_server_id: LanguageServerId(1)
1978 }
1979 );
1980 project.update(cx, |project, cx| {
1981 assert_eq!(
1982 project
1983 .language_servers_running_disk_based_diagnostics(cx)
1984 .collect::<Vec<_>>(),
1985 [LanguageServerId(1)]
1986 );
1987 });
1988
1989 // All diagnostics are considered done, despite the old server's diagnostic
1990 // task never completing.
1991 fake_server.end_progress(progress_token);
1992 assert_eq!(
1993 events.next().await.unwrap(),
1994 Event::DiskBasedDiagnosticsFinished {
1995 language_server_id: LanguageServerId(1)
1996 }
1997 );
1998 project.update(cx, |project, cx| {
1999 assert_eq!(
2000 project
2001 .language_servers_running_disk_based_diagnostics(cx)
2002 .collect::<Vec<_>>(),
2003 [] as [language::LanguageServerId; 0]
2004 );
2005 });
2006}
2007
2008#[gpui::test]
2009async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2010 init_test(cx);
2011
2012 let fs = FakeFs::new(cx.executor());
2013 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2014
2015 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2016
2017 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2018 language_registry.add(rust_lang());
2019 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2020
2021 let (buffer, _) = project
2022 .update(cx, |project, cx| {
2023 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2024 })
2025 .await
2026 .unwrap();
2027
2028 // Publish diagnostics
2029 let fake_server = fake_servers.next().await.unwrap();
2030 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2031 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2032 version: None,
2033 diagnostics: vec![lsp::Diagnostic {
2034 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2035 severity: Some(lsp::DiagnosticSeverity::ERROR),
2036 message: "the message".to_string(),
2037 ..Default::default()
2038 }],
2039 });
2040
2041 cx.executor().run_until_parked();
2042 buffer.update(cx, |buffer, _| {
2043 assert_eq!(
2044 buffer
2045 .snapshot()
2046 .diagnostics_in_range::<_, usize>(0..1, false)
2047 .map(|entry| entry.diagnostic.message.clone())
2048 .collect::<Vec<_>>(),
2049 ["the message".to_string()]
2050 );
2051 });
2052 project.update(cx, |project, cx| {
2053 assert_eq!(
2054 project.diagnostic_summary(false, cx),
2055 DiagnosticSummary {
2056 error_count: 1,
2057 warning_count: 0,
2058 }
2059 );
2060 });
2061
2062 project.update(cx, |project, cx| {
2063 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2064 });
2065
2066 // The diagnostics are cleared.
2067 cx.executor().run_until_parked();
2068 buffer.update(cx, |buffer, _| {
2069 assert_eq!(
2070 buffer
2071 .snapshot()
2072 .diagnostics_in_range::<_, usize>(0..1, false)
2073 .map(|entry| entry.diagnostic.message.clone())
2074 .collect::<Vec<_>>(),
2075 Vec::<String>::new(),
2076 );
2077 });
2078 project.update(cx, |project, cx| {
2079 assert_eq!(
2080 project.diagnostic_summary(false, cx),
2081 DiagnosticSummary {
2082 error_count: 0,
2083 warning_count: 0,
2084 }
2085 );
2086 });
2087}
2088
2089#[gpui::test]
2090async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2091 init_test(cx);
2092
2093 let fs = FakeFs::new(cx.executor());
2094 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2095
2096 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2097 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2098
2099 language_registry.add(rust_lang());
2100 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2101
2102 let (buffer, _handle) = project
2103 .update(cx, |project, cx| {
2104 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2105 })
2106 .await
2107 .unwrap();
2108
2109 // Before restarting the server, report diagnostics with an unknown buffer version.
2110 let fake_server = fake_servers.next().await.unwrap();
2111 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2112 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2113 version: Some(10000),
2114 diagnostics: Vec::new(),
2115 });
2116 cx.executor().run_until_parked();
2117 project.update(cx, |project, cx| {
2118 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2119 });
2120
2121 let mut fake_server = fake_servers.next().await.unwrap();
2122 let notification = fake_server
2123 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2124 .await
2125 .text_document;
2126 assert_eq!(notification.version, 0);
2127}
2128
2129#[gpui::test]
2130async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2131 init_test(cx);
2132
2133 let progress_token = "the-progress-token";
2134
2135 let fs = FakeFs::new(cx.executor());
2136 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2137
2138 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2139
2140 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2141 language_registry.add(rust_lang());
2142 let mut fake_servers = language_registry.register_fake_lsp(
2143 "Rust",
2144 FakeLspAdapter {
2145 name: "the-language-server",
2146 disk_based_diagnostics_sources: vec!["disk".into()],
2147 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2148 ..Default::default()
2149 },
2150 );
2151
2152 let (buffer, _handle) = project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Simulate diagnostics starting to update.
2160 let mut fake_server = fake_servers.next().await.unwrap();
2161 fake_server
2162 .start_progress_with(
2163 "another-token",
2164 lsp::WorkDoneProgressBegin {
2165 cancellable: Some(false),
2166 ..Default::default()
2167 },
2168 )
2169 .await;
2170 fake_server
2171 .start_progress_with(
2172 progress_token,
2173 lsp::WorkDoneProgressBegin {
2174 cancellable: Some(true),
2175 ..Default::default()
2176 },
2177 )
2178 .await;
2179 cx.executor().run_until_parked();
2180
2181 project.update(cx, |project, cx| {
2182 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2183 });
2184
2185 let cancel_notification = fake_server
2186 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2187 .await;
2188 assert_eq!(
2189 cancel_notification.token,
2190 NumberOrString::String(progress_token.into())
2191 );
2192}
2193
2194#[gpui::test]
2195async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2196 init_test(cx);
2197
2198 let fs = FakeFs::new(cx.executor());
2199 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2200 .await;
2201
2202 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2204
2205 let mut fake_rust_servers = language_registry.register_fake_lsp(
2206 "Rust",
2207 FakeLspAdapter {
2208 name: "rust-lsp",
2209 ..Default::default()
2210 },
2211 );
2212 let mut fake_js_servers = language_registry.register_fake_lsp(
2213 "JavaScript",
2214 FakeLspAdapter {
2215 name: "js-lsp",
2216 ..Default::default()
2217 },
2218 );
2219 language_registry.add(rust_lang());
2220 language_registry.add(js_lang());
2221
2222 let _rs_buffer = project
2223 .update(cx, |project, cx| {
2224 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2225 })
2226 .await
2227 .unwrap();
2228 let _js_buffer = project
2229 .update(cx, |project, cx| {
2230 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2231 })
2232 .await
2233 .unwrap();
2234
2235 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2236 assert_eq!(
2237 fake_rust_server_1
2238 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2239 .await
2240 .text_document
2241 .uri
2242 .as_str(),
2243 uri!("file:///dir/a.rs")
2244 );
2245
2246 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2247 assert_eq!(
2248 fake_js_server
2249 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2250 .await
2251 .text_document
2252 .uri
2253 .as_str(),
2254 uri!("file:///dir/b.js")
2255 );
2256
2257 // Disable Rust language server, ensuring only that server gets stopped.
2258 cx.update(|cx| {
2259 SettingsStore::update_global(cx, |settings, cx| {
2260 settings.update_user_settings(cx, |settings| {
2261 settings.languages_mut().insert(
2262 "Rust".into(),
2263 LanguageSettingsContent {
2264 enable_language_server: Some(false),
2265 ..Default::default()
2266 },
2267 );
2268 });
2269 })
2270 });
2271 fake_rust_server_1
2272 .receive_notification::<lsp::notification::Exit>()
2273 .await;
2274
2275 // Enable Rust and disable JavaScript language servers, ensuring that the
2276 // former gets started again and that the latter stops.
2277 cx.update(|cx| {
2278 SettingsStore::update_global(cx, |settings, cx| {
2279 settings.update_user_settings(cx, |settings| {
2280 settings.languages_mut().insert(
2281 "Rust".into(),
2282 LanguageSettingsContent {
2283 enable_language_server: Some(true),
2284 ..Default::default()
2285 },
2286 );
2287 settings.languages_mut().insert(
2288 "JavaScript".into(),
2289 LanguageSettingsContent {
2290 enable_language_server: Some(false),
2291 ..Default::default()
2292 },
2293 );
2294 });
2295 })
2296 });
2297 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2298 assert_eq!(
2299 fake_rust_server_2
2300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2301 .await
2302 .text_document
2303 .uri
2304 .as_str(),
2305 uri!("file:///dir/a.rs")
2306 );
2307 fake_js_server
2308 .receive_notification::<lsp::notification::Exit>()
2309 .await;
2310}
2311
2312#[gpui::test(iterations = 3)]
2313async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2314 init_test(cx);
2315
2316 let text = "
2317 fn a() { A }
2318 fn b() { BB }
2319 fn c() { CCC }
2320 "
2321 .unindent();
2322
2323 let fs = FakeFs::new(cx.executor());
2324 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2325
2326 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2327 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2328
2329 language_registry.add(rust_lang());
2330 let mut fake_servers = language_registry.register_fake_lsp(
2331 "Rust",
2332 FakeLspAdapter {
2333 disk_based_diagnostics_sources: vec!["disk".into()],
2334 ..Default::default()
2335 },
2336 );
2337
2338 let buffer = project
2339 .update(cx, |project, cx| {
2340 project.open_local_buffer(path!("/dir/a.rs"), cx)
2341 })
2342 .await
2343 .unwrap();
2344
2345 let _handle = project.update(cx, |project, cx| {
2346 project.register_buffer_with_language_servers(&buffer, cx)
2347 });
2348
2349 let mut fake_server = fake_servers.next().await.unwrap();
2350 let open_notification = fake_server
2351 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2352 .await;
2353
2354 // Edit the buffer, moving the content down
2355 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2356 let change_notification_1 = fake_server
2357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2358 .await;
2359 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2360
2361 // Report some diagnostics for the initial version of the buffer
2362 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2363 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2364 version: Some(open_notification.text_document.version),
2365 diagnostics: vec![
2366 lsp::Diagnostic {
2367 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2368 severity: Some(DiagnosticSeverity::ERROR),
2369 message: "undefined variable 'A'".to_string(),
2370 source: Some("disk".to_string()),
2371 ..Default::default()
2372 },
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 message: "undefined variable 'BB'".to_string(),
2377 source: Some("disk".to_string()),
2378 ..Default::default()
2379 },
2380 lsp::Diagnostic {
2381 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2382 severity: Some(DiagnosticSeverity::ERROR),
2383 source: Some("disk".to_string()),
2384 message: "undefined variable 'CCC'".to_string(),
2385 ..Default::default()
2386 },
2387 ],
2388 });
2389
2390 // The diagnostics have moved down since they were created.
2391 cx.executor().run_until_parked();
2392 buffer.update(cx, |buffer, _| {
2393 assert_eq!(
2394 buffer
2395 .snapshot()
2396 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2397 .collect::<Vec<_>>(),
2398 &[
2399 DiagnosticEntry {
2400 range: Point::new(3, 9)..Point::new(3, 11),
2401 diagnostic: Diagnostic {
2402 source: Some("disk".into()),
2403 severity: DiagnosticSeverity::ERROR,
2404 message: "undefined variable 'BB'".to_string(),
2405 is_disk_based: true,
2406 group_id: 1,
2407 is_primary: true,
2408 source_kind: DiagnosticSourceKind::Pushed,
2409 ..Diagnostic::default()
2410 },
2411 },
2412 DiagnosticEntry {
2413 range: Point::new(4, 9)..Point::new(4, 12),
2414 diagnostic: Diagnostic {
2415 source: Some("disk".into()),
2416 severity: DiagnosticSeverity::ERROR,
2417 message: "undefined variable 'CCC'".to_string(),
2418 is_disk_based: true,
2419 group_id: 2,
2420 is_primary: true,
2421 source_kind: DiagnosticSourceKind::Pushed,
2422 ..Diagnostic::default()
2423 }
2424 }
2425 ]
2426 );
2427 assert_eq!(
2428 chunks_with_diagnostics(buffer, 0..buffer.len()),
2429 [
2430 ("\n\nfn a() { ".to_string(), None),
2431 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2432 (" }\nfn b() { ".to_string(), None),
2433 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2434 (" }\nfn c() { ".to_string(), None),
2435 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2436 (" }\n".to_string(), None),
2437 ]
2438 );
2439 assert_eq!(
2440 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2441 [
2442 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2443 (" }\nfn c() { ".to_string(), None),
2444 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2445 ]
2446 );
2447 });
2448
2449 // Ensure overlapping diagnostics are highlighted correctly.
2450 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2451 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2452 version: Some(open_notification.text_document.version),
2453 diagnostics: vec![
2454 lsp::Diagnostic {
2455 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2456 severity: Some(DiagnosticSeverity::ERROR),
2457 message: "undefined variable 'A'".to_string(),
2458 source: Some("disk".to_string()),
2459 ..Default::default()
2460 },
2461 lsp::Diagnostic {
2462 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2463 severity: Some(DiagnosticSeverity::WARNING),
2464 message: "unreachable statement".to_string(),
2465 source: Some("disk".to_string()),
2466 ..Default::default()
2467 },
2468 ],
2469 });
2470
2471 cx.executor().run_until_parked();
2472 buffer.update(cx, |buffer, _| {
2473 assert_eq!(
2474 buffer
2475 .snapshot()
2476 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2477 .collect::<Vec<_>>(),
2478 &[
2479 DiagnosticEntry {
2480 range: Point::new(2, 9)..Point::new(2, 12),
2481 diagnostic: Diagnostic {
2482 source: Some("disk".into()),
2483 severity: DiagnosticSeverity::WARNING,
2484 message: "unreachable statement".to_string(),
2485 is_disk_based: true,
2486 group_id: 4,
2487 is_primary: true,
2488 source_kind: DiagnosticSourceKind::Pushed,
2489 ..Diagnostic::default()
2490 }
2491 },
2492 DiagnosticEntry {
2493 range: Point::new(2, 9)..Point::new(2, 10),
2494 diagnostic: Diagnostic {
2495 source: Some("disk".into()),
2496 severity: DiagnosticSeverity::ERROR,
2497 message: "undefined variable 'A'".to_string(),
2498 is_disk_based: true,
2499 group_id: 3,
2500 is_primary: true,
2501 source_kind: DiagnosticSourceKind::Pushed,
2502 ..Diagnostic::default()
2503 },
2504 }
2505 ]
2506 );
2507 assert_eq!(
2508 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2509 [
2510 ("fn a() { ".to_string(), None),
2511 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2512 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2513 ("\n".to_string(), None),
2514 ]
2515 );
2516 assert_eq!(
2517 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2518 [
2519 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2520 ("\n".to_string(), None),
2521 ]
2522 );
2523 });
2524
2525 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2526 // changes since the last save.
2527 buffer.update(cx, |buffer, cx| {
2528 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2529 buffer.edit(
2530 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2531 None,
2532 cx,
2533 );
2534 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2535 });
2536 let change_notification_2 = fake_server
2537 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2538 .await;
2539 assert!(
2540 change_notification_2.text_document.version > change_notification_1.text_document.version
2541 );
2542
2543 // Handle out-of-order diagnostics
2544 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2545 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2546 version: Some(change_notification_2.text_document.version),
2547 diagnostics: vec![
2548 lsp::Diagnostic {
2549 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2550 severity: Some(DiagnosticSeverity::ERROR),
2551 message: "undefined variable 'BB'".to_string(),
2552 source: Some("disk".to_string()),
2553 ..Default::default()
2554 },
2555 lsp::Diagnostic {
2556 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2557 severity: Some(DiagnosticSeverity::WARNING),
2558 message: "undefined variable 'A'".to_string(),
2559 source: Some("disk".to_string()),
2560 ..Default::default()
2561 },
2562 ],
2563 });
2564
2565 cx.executor().run_until_parked();
2566 buffer.update(cx, |buffer, _| {
2567 assert_eq!(
2568 buffer
2569 .snapshot()
2570 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2571 .collect::<Vec<_>>(),
2572 &[
2573 DiagnosticEntry {
2574 range: Point::new(2, 21)..Point::new(2, 22),
2575 diagnostic: Diagnostic {
2576 source: Some("disk".into()),
2577 severity: DiagnosticSeverity::WARNING,
2578 message: "undefined variable 'A'".to_string(),
2579 is_disk_based: true,
2580 group_id: 6,
2581 is_primary: true,
2582 source_kind: DiagnosticSourceKind::Pushed,
2583 ..Diagnostic::default()
2584 }
2585 },
2586 DiagnosticEntry {
2587 range: Point::new(3, 9)..Point::new(3, 14),
2588 diagnostic: Diagnostic {
2589 source: Some("disk".into()),
2590 severity: DiagnosticSeverity::ERROR,
2591 message: "undefined variable 'BB'".to_string(),
2592 is_disk_based: true,
2593 group_id: 5,
2594 is_primary: true,
2595 source_kind: DiagnosticSourceKind::Pushed,
2596 ..Diagnostic::default()
2597 },
2598 }
2599 ]
2600 );
2601 });
2602}
2603
2604#[gpui::test]
2605async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2606 init_test(cx);
2607
2608 let text = concat!(
2609 "let one = ;\n", //
2610 "let two = \n",
2611 "let three = 3;\n",
2612 );
2613
2614 let fs = FakeFs::new(cx.executor());
2615 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2616
2617 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2618 let buffer = project
2619 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2620 .await
2621 .unwrap();
2622
2623 project.update(cx, |project, cx| {
2624 project.lsp_store.update(cx, |lsp_store, cx| {
2625 lsp_store
2626 .update_diagnostic_entries(
2627 LanguageServerId(0),
2628 PathBuf::from("/dir/a.rs"),
2629 None,
2630 None,
2631 vec![
2632 DiagnosticEntry {
2633 range: Unclipped(PointUtf16::new(0, 10))
2634 ..Unclipped(PointUtf16::new(0, 10)),
2635 diagnostic: Diagnostic {
2636 severity: DiagnosticSeverity::ERROR,
2637 message: "syntax error 1".to_string(),
2638 source_kind: DiagnosticSourceKind::Pushed,
2639 ..Diagnostic::default()
2640 },
2641 },
2642 DiagnosticEntry {
2643 range: Unclipped(PointUtf16::new(1, 10))
2644 ..Unclipped(PointUtf16::new(1, 10)),
2645 diagnostic: Diagnostic {
2646 severity: DiagnosticSeverity::ERROR,
2647 message: "syntax error 2".to_string(),
2648 source_kind: DiagnosticSourceKind::Pushed,
2649 ..Diagnostic::default()
2650 },
2651 },
2652 ],
2653 cx,
2654 )
2655 .unwrap();
2656 })
2657 });
2658
2659 // An empty range is extended forward to include the following character.
2660 // At the end of a line, an empty range is extended backward to include
2661 // the preceding character.
2662 buffer.update(cx, |buffer, _| {
2663 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2664 assert_eq!(
2665 chunks
2666 .iter()
2667 .map(|(s, d)| (s.as_str(), *d))
2668 .collect::<Vec<_>>(),
2669 &[
2670 ("let one = ", None),
2671 (";", Some(DiagnosticSeverity::ERROR)),
2672 ("\nlet two =", None),
2673 (" ", Some(DiagnosticSeverity::ERROR)),
2674 ("\nlet three = 3;\n", None)
2675 ]
2676 );
2677 });
2678}
2679
2680#[gpui::test]
2681async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2682 init_test(cx);
2683
2684 let fs = FakeFs::new(cx.executor());
2685 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2686 .await;
2687
2688 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2689 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2690
2691 lsp_store.update(cx, |lsp_store, cx| {
2692 lsp_store
2693 .update_diagnostic_entries(
2694 LanguageServerId(0),
2695 Path::new("/dir/a.rs").to_owned(),
2696 None,
2697 None,
2698 vec![DiagnosticEntry {
2699 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2700 diagnostic: Diagnostic {
2701 severity: DiagnosticSeverity::ERROR,
2702 is_primary: true,
2703 message: "syntax error a1".to_string(),
2704 source_kind: DiagnosticSourceKind::Pushed,
2705 ..Diagnostic::default()
2706 },
2707 }],
2708 cx,
2709 )
2710 .unwrap();
2711 lsp_store
2712 .update_diagnostic_entries(
2713 LanguageServerId(1),
2714 Path::new("/dir/a.rs").to_owned(),
2715 None,
2716 None,
2717 vec![DiagnosticEntry {
2718 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2719 diagnostic: Diagnostic {
2720 severity: DiagnosticSeverity::ERROR,
2721 is_primary: true,
2722 message: "syntax error b1".to_string(),
2723 source_kind: DiagnosticSourceKind::Pushed,
2724 ..Diagnostic::default()
2725 },
2726 }],
2727 cx,
2728 )
2729 .unwrap();
2730
2731 assert_eq!(
2732 lsp_store.diagnostic_summary(false, cx),
2733 DiagnosticSummary {
2734 error_count: 2,
2735 warning_count: 0,
2736 }
2737 );
2738 });
2739}
2740
2741#[gpui::test]
2742async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2743 init_test(cx);
2744
2745 let text = "
2746 fn a() {
2747 f1();
2748 }
2749 fn b() {
2750 f2();
2751 }
2752 fn c() {
2753 f3();
2754 }
2755 "
2756 .unindent();
2757
2758 let fs = FakeFs::new(cx.executor());
2759 fs.insert_tree(
2760 path!("/dir"),
2761 json!({
2762 "a.rs": text.clone(),
2763 }),
2764 )
2765 .await;
2766
2767 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2768 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2769
2770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2771 language_registry.add(rust_lang());
2772 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2773
2774 let (buffer, _handle) = project
2775 .update(cx, |project, cx| {
2776 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2777 })
2778 .await
2779 .unwrap();
2780
2781 let mut fake_server = fake_servers.next().await.unwrap();
2782 let lsp_document_version = fake_server
2783 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2784 .await
2785 .text_document
2786 .version;
2787
2788 // Simulate editing the buffer after the language server computes some edits.
2789 buffer.update(cx, |buffer, cx| {
2790 buffer.edit(
2791 [(
2792 Point::new(0, 0)..Point::new(0, 0),
2793 "// above first function\n",
2794 )],
2795 None,
2796 cx,
2797 );
2798 buffer.edit(
2799 [(
2800 Point::new(2, 0)..Point::new(2, 0),
2801 " // inside first function\n",
2802 )],
2803 None,
2804 cx,
2805 );
2806 buffer.edit(
2807 [(
2808 Point::new(6, 4)..Point::new(6, 4),
2809 "// inside second function ",
2810 )],
2811 None,
2812 cx,
2813 );
2814
2815 assert_eq!(
2816 buffer.text(),
2817 "
2818 // above first function
2819 fn a() {
2820 // inside first function
2821 f1();
2822 }
2823 fn b() {
2824 // inside second function f2();
2825 }
2826 fn c() {
2827 f3();
2828 }
2829 "
2830 .unindent()
2831 );
2832 });
2833
2834 let edits = lsp_store
2835 .update(cx, |lsp_store, cx| {
2836 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2837 &buffer,
2838 vec![
2839 // replace body of first function
2840 lsp::TextEdit {
2841 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2842 new_text: "
2843 fn a() {
2844 f10();
2845 }
2846 "
2847 .unindent(),
2848 },
2849 // edit inside second function
2850 lsp::TextEdit {
2851 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2852 new_text: "00".into(),
2853 },
2854 // edit inside third function via two distinct edits
2855 lsp::TextEdit {
2856 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2857 new_text: "4000".into(),
2858 },
2859 lsp::TextEdit {
2860 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2861 new_text: "".into(),
2862 },
2863 ],
2864 LanguageServerId(0),
2865 Some(lsp_document_version),
2866 cx,
2867 )
2868 })
2869 .await
2870 .unwrap();
2871
2872 buffer.update(cx, |buffer, cx| {
2873 for (range, new_text) in edits {
2874 buffer.edit([(range, new_text)], None, cx);
2875 }
2876 assert_eq!(
2877 buffer.text(),
2878 "
2879 // above first function
2880 fn a() {
2881 // inside first function
2882 f10();
2883 }
2884 fn b() {
2885 // inside second function f200();
2886 }
2887 fn c() {
2888 f4000();
2889 }
2890 "
2891 .unindent()
2892 );
2893 });
2894}
2895
2896#[gpui::test]
2897async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2898 init_test(cx);
2899
2900 let text = "
2901 use a::b;
2902 use a::c;
2903
2904 fn f() {
2905 b();
2906 c();
2907 }
2908 "
2909 .unindent();
2910
2911 let fs = FakeFs::new(cx.executor());
2912 fs.insert_tree(
2913 path!("/dir"),
2914 json!({
2915 "a.rs": text.clone(),
2916 }),
2917 )
2918 .await;
2919
2920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2921 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2922 let buffer = project
2923 .update(cx, |project, cx| {
2924 project.open_local_buffer(path!("/dir/a.rs"), cx)
2925 })
2926 .await
2927 .unwrap();
2928
2929 // Simulate the language server sending us a small edit in the form of a very large diff.
2930 // Rust-analyzer does this when performing a merge-imports code action.
2931 let edits = lsp_store
2932 .update(cx, |lsp_store, cx| {
2933 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2934 &buffer,
2935 [
2936 // Replace the first use statement without editing the semicolon.
2937 lsp::TextEdit {
2938 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2939 new_text: "a::{b, c}".into(),
2940 },
2941 // Reinsert the remainder of the file between the semicolon and the final
2942 // newline of the file.
2943 lsp::TextEdit {
2944 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2945 new_text: "\n\n".into(),
2946 },
2947 lsp::TextEdit {
2948 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2949 new_text: "
2950 fn f() {
2951 b();
2952 c();
2953 }"
2954 .unindent(),
2955 },
2956 // Delete everything after the first newline of the file.
2957 lsp::TextEdit {
2958 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2959 new_text: "".into(),
2960 },
2961 ],
2962 LanguageServerId(0),
2963 None,
2964 cx,
2965 )
2966 })
2967 .await
2968 .unwrap();
2969
2970 buffer.update(cx, |buffer, cx| {
2971 let edits = edits
2972 .into_iter()
2973 .map(|(range, text)| {
2974 (
2975 range.start.to_point(buffer)..range.end.to_point(buffer),
2976 text,
2977 )
2978 })
2979 .collect::<Vec<_>>();
2980
2981 assert_eq!(
2982 edits,
2983 [
2984 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2985 (Point::new(1, 0)..Point::new(2, 0), "".into())
2986 ]
2987 );
2988
2989 for (range, new_text) in edits {
2990 buffer.edit([(range, new_text)], None, cx);
2991 }
2992 assert_eq!(
2993 buffer.text(),
2994 "
2995 use a::{b, c};
2996
2997 fn f() {
2998 b();
2999 c();
3000 }
3001 "
3002 .unindent()
3003 );
3004 });
3005}
3006
3007#[gpui::test]
3008async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3009 cx: &mut gpui::TestAppContext,
3010) {
3011 init_test(cx);
3012
3013 let text = "Path()";
3014
3015 let fs = FakeFs::new(cx.executor());
3016 fs.insert_tree(
3017 path!("/dir"),
3018 json!({
3019 "a.rs": text
3020 }),
3021 )
3022 .await;
3023
3024 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3025 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3026 let buffer = project
3027 .update(cx, |project, cx| {
3028 project.open_local_buffer(path!("/dir/a.rs"), cx)
3029 })
3030 .await
3031 .unwrap();
3032
3033 // Simulate the language server sending us a pair of edits at the same location,
3034 // with an insertion following a replacement (which violates the LSP spec).
3035 let edits = lsp_store
3036 .update(cx, |lsp_store, cx| {
3037 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3038 &buffer,
3039 [
3040 lsp::TextEdit {
3041 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3042 new_text: "Path".into(),
3043 },
3044 lsp::TextEdit {
3045 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3046 new_text: "from path import Path\n\n\n".into(),
3047 },
3048 ],
3049 LanguageServerId(0),
3050 None,
3051 cx,
3052 )
3053 })
3054 .await
3055 .unwrap();
3056
3057 buffer.update(cx, |buffer, cx| {
3058 buffer.edit(edits, None, cx);
3059 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3060 });
3061}
3062
3063#[gpui::test]
3064async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3065 init_test(cx);
3066
3067 let text = "
3068 use a::b;
3069 use a::c;
3070
3071 fn f() {
3072 b();
3073 c();
3074 }
3075 "
3076 .unindent();
3077
3078 let fs = FakeFs::new(cx.executor());
3079 fs.insert_tree(
3080 path!("/dir"),
3081 json!({
3082 "a.rs": text.clone(),
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3088 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3089 let buffer = project
3090 .update(cx, |project, cx| {
3091 project.open_local_buffer(path!("/dir/a.rs"), cx)
3092 })
3093 .await
3094 .unwrap();
3095
3096 // Simulate the language server sending us edits in a non-ordered fashion,
3097 // with ranges sometimes being inverted or pointing to invalid locations.
3098 let edits = lsp_store
3099 .update(cx, |lsp_store, cx| {
3100 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3101 &buffer,
3102 [
3103 lsp::TextEdit {
3104 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3105 new_text: "\n\n".into(),
3106 },
3107 lsp::TextEdit {
3108 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3109 new_text: "a::{b, c}".into(),
3110 },
3111 lsp::TextEdit {
3112 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3113 new_text: "".into(),
3114 },
3115 lsp::TextEdit {
3116 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3117 new_text: "
3118 fn f() {
3119 b();
3120 c();
3121 }"
3122 .unindent(),
3123 },
3124 ],
3125 LanguageServerId(0),
3126 None,
3127 cx,
3128 )
3129 })
3130 .await
3131 .unwrap();
3132
3133 buffer.update(cx, |buffer, cx| {
3134 let edits = edits
3135 .into_iter()
3136 .map(|(range, text)| {
3137 (
3138 range.start.to_point(buffer)..range.end.to_point(buffer),
3139 text,
3140 )
3141 })
3142 .collect::<Vec<_>>();
3143
3144 assert_eq!(
3145 edits,
3146 [
3147 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3148 (Point::new(1, 0)..Point::new(2, 0), "".into())
3149 ]
3150 );
3151
3152 for (range, new_text) in edits {
3153 buffer.edit([(range, new_text)], None, cx);
3154 }
3155 assert_eq!(
3156 buffer.text(),
3157 "
3158 use a::{b, c};
3159
3160 fn f() {
3161 b();
3162 c();
3163 }
3164 "
3165 .unindent()
3166 );
3167 });
3168}
3169
3170fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3171 buffer: &Buffer,
3172 range: Range<T>,
3173) -> Vec<(String, Option<DiagnosticSeverity>)> {
3174 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3175 for chunk in buffer.snapshot().chunks(range, true) {
3176 if chunks
3177 .last()
3178 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3179 {
3180 chunks.last_mut().unwrap().0.push_str(chunk.text);
3181 } else {
3182 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3183 }
3184 }
3185 chunks
3186}
3187
3188#[gpui::test(iterations = 10)]
3189async fn test_definition(cx: &mut gpui::TestAppContext) {
3190 init_test(cx);
3191
3192 let fs = FakeFs::new(cx.executor());
3193 fs.insert_tree(
3194 path!("/dir"),
3195 json!({
3196 "a.rs": "const fn a() { A }",
3197 "b.rs": "const y: i32 = crate::a()",
3198 }),
3199 )
3200 .await;
3201
3202 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3203
3204 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3205 language_registry.add(rust_lang());
3206 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3207
3208 let (buffer, _handle) = project
3209 .update(cx, |project, cx| {
3210 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3211 })
3212 .await
3213 .unwrap();
3214
3215 let fake_server = fake_servers.next().await.unwrap();
3216 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3217 let params = params.text_document_position_params;
3218 assert_eq!(
3219 params.text_document.uri.to_file_path().unwrap(),
3220 Path::new(path!("/dir/b.rs")),
3221 );
3222 assert_eq!(params.position, lsp::Position::new(0, 22));
3223
3224 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3225 lsp::Location::new(
3226 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3227 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3228 ),
3229 )))
3230 });
3231 let mut definitions = project
3232 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3233 .await
3234 .unwrap()
3235 .unwrap();
3236
3237 // Assert no new language server started
3238 cx.executor().run_until_parked();
3239 assert!(fake_servers.try_next().is_err());
3240
3241 assert_eq!(definitions.len(), 1);
3242 let definition = definitions.pop().unwrap();
3243 cx.update(|cx| {
3244 let target_buffer = definition.target.buffer.read(cx);
3245 assert_eq!(
3246 target_buffer
3247 .file()
3248 .unwrap()
3249 .as_local()
3250 .unwrap()
3251 .abs_path(cx),
3252 Path::new(path!("/dir/a.rs")),
3253 );
3254 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3255 assert_eq!(
3256 list_worktrees(&project, cx),
3257 [
3258 (path!("/dir/a.rs").as_ref(), false),
3259 (path!("/dir/b.rs").as_ref(), true)
3260 ],
3261 );
3262
3263 drop(definition);
3264 });
3265 cx.update(|cx| {
3266 assert_eq!(
3267 list_worktrees(&project, cx),
3268 [(path!("/dir/b.rs").as_ref(), true)]
3269 );
3270 });
3271
3272 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3273 project
3274 .read(cx)
3275 .worktrees(cx)
3276 .map(|worktree| {
3277 let worktree = worktree.read(cx);
3278 (
3279 worktree.as_local().unwrap().abs_path().as_ref(),
3280 worktree.is_visible(),
3281 )
3282 })
3283 .collect::<Vec<_>>()
3284 }
3285}
3286
3287#[gpui::test]
3288async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3289 init_test(cx);
3290
3291 let fs = FakeFs::new(cx.executor());
3292 fs.insert_tree(
3293 path!("/dir"),
3294 json!({
3295 "a.ts": "",
3296 }),
3297 )
3298 .await;
3299
3300 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3301
3302 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3303 language_registry.add(typescript_lang());
3304 let mut fake_language_servers = language_registry.register_fake_lsp(
3305 "TypeScript",
3306 FakeLspAdapter {
3307 capabilities: lsp::ServerCapabilities {
3308 completion_provider: Some(lsp::CompletionOptions {
3309 trigger_characters: Some(vec![".".to_string()]),
3310 ..Default::default()
3311 }),
3312 ..Default::default()
3313 },
3314 ..Default::default()
3315 },
3316 );
3317
3318 let (buffer, _handle) = project
3319 .update(cx, |p, cx| {
3320 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3321 })
3322 .await
3323 .unwrap();
3324
3325 let fake_server = fake_language_servers.next().await.unwrap();
3326
3327 // When text_edit exists, it takes precedence over insert_text and label
3328 let text = "let a = obj.fqn";
3329 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3330 let completions = project.update(cx, |project, cx| {
3331 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3332 });
3333
3334 fake_server
3335 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3336 Ok(Some(lsp::CompletionResponse::Array(vec![
3337 lsp::CompletionItem {
3338 label: "labelText".into(),
3339 insert_text: Some("insertText".into()),
3340 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3341 range: lsp::Range::new(
3342 lsp::Position::new(0, text.len() as u32 - 3),
3343 lsp::Position::new(0, text.len() as u32),
3344 ),
3345 new_text: "textEditText".into(),
3346 })),
3347 ..Default::default()
3348 },
3349 ])))
3350 })
3351 .next()
3352 .await;
3353
3354 let completions = completions
3355 .await
3356 .unwrap()
3357 .into_iter()
3358 .flat_map(|response| response.completions)
3359 .collect::<Vec<_>>();
3360 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3361
3362 assert_eq!(completions.len(), 1);
3363 assert_eq!(completions[0].new_text, "textEditText");
3364 assert_eq!(
3365 completions[0].replace_range.to_offset(&snapshot),
3366 text.len() - 3..text.len()
3367 );
3368}
3369
3370#[gpui::test]
3371async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3372 init_test(cx);
3373
3374 let fs = FakeFs::new(cx.executor());
3375 fs.insert_tree(
3376 path!("/dir"),
3377 json!({
3378 "a.ts": "",
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3384
3385 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3386 language_registry.add(typescript_lang());
3387 let mut fake_language_servers = language_registry.register_fake_lsp(
3388 "TypeScript",
3389 FakeLspAdapter {
3390 capabilities: lsp::ServerCapabilities {
3391 completion_provider: Some(lsp::CompletionOptions {
3392 trigger_characters: Some(vec![".".to_string()]),
3393 ..Default::default()
3394 }),
3395 ..Default::default()
3396 },
3397 ..Default::default()
3398 },
3399 );
3400
3401 let (buffer, _handle) = project
3402 .update(cx, |p, cx| {
3403 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3404 })
3405 .await
3406 .unwrap();
3407
3408 let fake_server = fake_language_servers.next().await.unwrap();
3409 let text = "let a = obj.fqn";
3410
3411 // Test 1: When text_edit is None but insert_text exists with default edit_range
3412 {
3413 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3414 let completions = project.update(cx, |project, cx| {
3415 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3416 });
3417
3418 fake_server
3419 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3420 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3421 is_incomplete: false,
3422 item_defaults: Some(lsp::CompletionListItemDefaults {
3423 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3424 lsp::Range::new(
3425 lsp::Position::new(0, text.len() as u32 - 3),
3426 lsp::Position::new(0, text.len() as u32),
3427 ),
3428 )),
3429 ..Default::default()
3430 }),
3431 items: vec![lsp::CompletionItem {
3432 label: "labelText".into(),
3433 insert_text: Some("insertText".into()),
3434 text_edit: None,
3435 ..Default::default()
3436 }],
3437 })))
3438 })
3439 .next()
3440 .await;
3441
3442 let completions = completions
3443 .await
3444 .unwrap()
3445 .into_iter()
3446 .flat_map(|response| response.completions)
3447 .collect::<Vec<_>>();
3448 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3449
3450 assert_eq!(completions.len(), 1);
3451 assert_eq!(completions[0].new_text, "insertText");
3452 assert_eq!(
3453 completions[0].replace_range.to_offset(&snapshot),
3454 text.len() - 3..text.len()
3455 );
3456 }
3457
3458 // Test 2: When both text_edit and insert_text are None with default edit_range
3459 {
3460 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3461 let completions = project.update(cx, |project, cx| {
3462 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3463 });
3464
3465 fake_server
3466 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3467 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3468 is_incomplete: false,
3469 item_defaults: Some(lsp::CompletionListItemDefaults {
3470 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3471 lsp::Range::new(
3472 lsp::Position::new(0, text.len() as u32 - 3),
3473 lsp::Position::new(0, text.len() as u32),
3474 ),
3475 )),
3476 ..Default::default()
3477 }),
3478 items: vec![lsp::CompletionItem {
3479 label: "labelText".into(),
3480 insert_text: None,
3481 text_edit: None,
3482 ..Default::default()
3483 }],
3484 })))
3485 })
3486 .next()
3487 .await;
3488
3489 let completions = completions
3490 .await
3491 .unwrap()
3492 .into_iter()
3493 .flat_map(|response| response.completions)
3494 .collect::<Vec<_>>();
3495 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3496
3497 assert_eq!(completions.len(), 1);
3498 assert_eq!(completions[0].new_text, "labelText");
3499 assert_eq!(
3500 completions[0].replace_range.to_offset(&snapshot),
3501 text.len() - 3..text.len()
3502 );
3503 }
3504}
3505
3506#[gpui::test]
3507async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3508 init_test(cx);
3509
3510 let fs = FakeFs::new(cx.executor());
3511 fs.insert_tree(
3512 path!("/dir"),
3513 json!({
3514 "a.ts": "",
3515 }),
3516 )
3517 .await;
3518
3519 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3520
3521 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3522 language_registry.add(typescript_lang());
3523 let mut fake_language_servers = language_registry.register_fake_lsp(
3524 "TypeScript",
3525 FakeLspAdapter {
3526 capabilities: lsp::ServerCapabilities {
3527 completion_provider: Some(lsp::CompletionOptions {
3528 trigger_characters: Some(vec![":".to_string()]),
3529 ..Default::default()
3530 }),
3531 ..Default::default()
3532 },
3533 ..Default::default()
3534 },
3535 );
3536
3537 let (buffer, _handle) = project
3538 .update(cx, |p, cx| {
3539 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3540 })
3541 .await
3542 .unwrap();
3543
3544 let fake_server = fake_language_servers.next().await.unwrap();
3545
3546 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3547 let text = "let a = b.fqn";
3548 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3549 let completions = project.update(cx, |project, cx| {
3550 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3551 });
3552
3553 fake_server
3554 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3555 Ok(Some(lsp::CompletionResponse::Array(vec![
3556 lsp::CompletionItem {
3557 label: "fullyQualifiedName?".into(),
3558 insert_text: Some("fullyQualifiedName".into()),
3559 ..Default::default()
3560 },
3561 ])))
3562 })
3563 .next()
3564 .await;
3565 let completions = completions
3566 .await
3567 .unwrap()
3568 .into_iter()
3569 .flat_map(|response| response.completions)
3570 .collect::<Vec<_>>();
3571 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3572 assert_eq!(completions.len(), 1);
3573 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3574 assert_eq!(
3575 completions[0].replace_range.to_offset(&snapshot),
3576 text.len() - 3..text.len()
3577 );
3578
3579 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3580 let text = "let a = \"atoms/cmp\"";
3581 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3582 let completions = project.update(cx, |project, cx| {
3583 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3584 });
3585
3586 fake_server
3587 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3588 Ok(Some(lsp::CompletionResponse::Array(vec![
3589 lsp::CompletionItem {
3590 label: "component".into(),
3591 ..Default::default()
3592 },
3593 ])))
3594 })
3595 .next()
3596 .await;
3597 let completions = completions
3598 .await
3599 .unwrap()
3600 .into_iter()
3601 .flat_map(|response| response.completions)
3602 .collect::<Vec<_>>();
3603 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3604 assert_eq!(completions.len(), 1);
3605 assert_eq!(completions[0].new_text, "component");
3606 assert_eq!(
3607 completions[0].replace_range.to_offset(&snapshot),
3608 text.len() - 4..text.len() - 1
3609 );
3610}
3611
3612#[gpui::test]
3613async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3614 init_test(cx);
3615
3616 let fs = FakeFs::new(cx.executor());
3617 fs.insert_tree(
3618 path!("/dir"),
3619 json!({
3620 "a.ts": "",
3621 }),
3622 )
3623 .await;
3624
3625 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3626
3627 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3628 language_registry.add(typescript_lang());
3629 let mut fake_language_servers = language_registry.register_fake_lsp(
3630 "TypeScript",
3631 FakeLspAdapter {
3632 capabilities: lsp::ServerCapabilities {
3633 completion_provider: Some(lsp::CompletionOptions {
3634 trigger_characters: Some(vec![":".to_string()]),
3635 ..Default::default()
3636 }),
3637 ..Default::default()
3638 },
3639 ..Default::default()
3640 },
3641 );
3642
3643 let (buffer, _handle) = project
3644 .update(cx, |p, cx| {
3645 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3646 })
3647 .await
3648 .unwrap();
3649
3650 let fake_server = fake_language_servers.next().await.unwrap();
3651
3652 let text = "let a = b.fqn";
3653 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3654 let completions = project.update(cx, |project, cx| {
3655 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3656 });
3657
3658 fake_server
3659 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3660 Ok(Some(lsp::CompletionResponse::Array(vec![
3661 lsp::CompletionItem {
3662 label: "fullyQualifiedName?".into(),
3663 insert_text: Some("fully\rQualified\r\nName".into()),
3664 ..Default::default()
3665 },
3666 ])))
3667 })
3668 .next()
3669 .await;
3670 let completions = completions
3671 .await
3672 .unwrap()
3673 .into_iter()
3674 .flat_map(|response| response.completions)
3675 .collect::<Vec<_>>();
3676 assert_eq!(completions.len(), 1);
3677 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3678}
3679
3680#[gpui::test(iterations = 10)]
3681async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3682 init_test(cx);
3683
3684 let fs = FakeFs::new(cx.executor());
3685 fs.insert_tree(
3686 path!("/dir"),
3687 json!({
3688 "a.ts": "a",
3689 }),
3690 )
3691 .await;
3692
3693 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3694
3695 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3696 language_registry.add(typescript_lang());
3697 let mut fake_language_servers = language_registry.register_fake_lsp(
3698 "TypeScript",
3699 FakeLspAdapter {
3700 capabilities: lsp::ServerCapabilities {
3701 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3702 lsp::CodeActionOptions {
3703 resolve_provider: Some(true),
3704 ..lsp::CodeActionOptions::default()
3705 },
3706 )),
3707 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3708 commands: vec!["_the/command".to_string()],
3709 ..lsp::ExecuteCommandOptions::default()
3710 }),
3711 ..lsp::ServerCapabilities::default()
3712 },
3713 ..FakeLspAdapter::default()
3714 },
3715 );
3716
3717 let (buffer, _handle) = project
3718 .update(cx, |p, cx| {
3719 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3720 })
3721 .await
3722 .unwrap();
3723
3724 let fake_server = fake_language_servers.next().await.unwrap();
3725
3726 // Language server returns code actions that contain commands, and not edits.
3727 let actions = project.update(cx, |project, cx| {
3728 project.code_actions(&buffer, 0..0, None, cx)
3729 });
3730 fake_server
3731 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3732 Ok(Some(vec![
3733 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3734 title: "The code action".into(),
3735 data: Some(serde_json::json!({
3736 "command": "_the/command",
3737 })),
3738 ..lsp::CodeAction::default()
3739 }),
3740 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3741 title: "two".into(),
3742 ..lsp::CodeAction::default()
3743 }),
3744 ]))
3745 })
3746 .next()
3747 .await;
3748
3749 let action = actions.await.unwrap().unwrap()[0].clone();
3750 let apply = project.update(cx, |project, cx| {
3751 project.apply_code_action(buffer.clone(), action, true, cx)
3752 });
3753
3754 // Resolving the code action does not populate its edits. In absence of
3755 // edits, we must execute the given command.
3756 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3757 |mut action, _| async move {
3758 if action.data.is_some() {
3759 action.command = Some(lsp::Command {
3760 title: "The command".into(),
3761 command: "_the/command".into(),
3762 arguments: Some(vec![json!("the-argument")]),
3763 });
3764 }
3765 Ok(action)
3766 },
3767 );
3768
3769 // While executing the command, the language server sends the editor
3770 // a `workspaceEdit` request.
3771 fake_server
3772 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3773 let fake = fake_server.clone();
3774 move |params, _| {
3775 assert_eq!(params.command, "_the/command");
3776 let fake = fake.clone();
3777 async move {
3778 fake.server
3779 .request::<lsp::request::ApplyWorkspaceEdit>(
3780 lsp::ApplyWorkspaceEditParams {
3781 label: None,
3782 edit: lsp::WorkspaceEdit {
3783 changes: Some(
3784 [(
3785 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3786 vec![lsp::TextEdit {
3787 range: lsp::Range::new(
3788 lsp::Position::new(0, 0),
3789 lsp::Position::new(0, 0),
3790 ),
3791 new_text: "X".into(),
3792 }],
3793 )]
3794 .into_iter()
3795 .collect(),
3796 ),
3797 ..Default::default()
3798 },
3799 },
3800 )
3801 .await
3802 .into_response()
3803 .unwrap();
3804 Ok(Some(json!(null)))
3805 }
3806 }
3807 })
3808 .next()
3809 .await;
3810
3811 // Applying the code action returns a project transaction containing the edits
3812 // sent by the language server in its `workspaceEdit` request.
3813 let transaction = apply.await.unwrap();
3814 assert!(transaction.0.contains_key(&buffer));
3815 buffer.update(cx, |buffer, cx| {
3816 assert_eq!(buffer.text(), "Xa");
3817 buffer.undo(cx);
3818 assert_eq!(buffer.text(), "a");
3819 });
3820}
3821
3822#[gpui::test]
3823async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3824 init_test(cx);
3825 let fs = FakeFs::new(cx.background_executor.clone());
3826 let expected_contents = "content";
3827 fs.as_fake()
3828 .insert_tree(
3829 "/root",
3830 json!({
3831 "test.txt": expected_contents
3832 }),
3833 )
3834 .await;
3835
3836 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3837
3838 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3839 let worktree = project.worktrees(cx).next().unwrap();
3840 let entry_id = worktree
3841 .read(cx)
3842 .entry_for_path(rel_path("test.txt"))
3843 .unwrap()
3844 .id;
3845 (worktree, entry_id)
3846 });
3847 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3848 let _result = project
3849 .update(cx, |project, cx| {
3850 project.rename_entry(
3851 entry_id,
3852 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3853 cx,
3854 )
3855 })
3856 .await
3857 .unwrap();
3858 worktree.read_with(cx, |worktree, _| {
3859 assert!(
3860 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3861 "Old file should have been removed"
3862 );
3863 assert!(
3864 worktree
3865 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3866 .is_some(),
3867 "Whole directory hierarchy and the new file should have been created"
3868 );
3869 });
3870 assert_eq!(
3871 worktree
3872 .update(cx, |worktree, cx| {
3873 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3874 })
3875 .await
3876 .unwrap()
3877 .text,
3878 expected_contents,
3879 "Moved file's contents should be preserved"
3880 );
3881
3882 let entry_id = worktree.read_with(cx, |worktree, _| {
3883 worktree
3884 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3885 .unwrap()
3886 .id
3887 });
3888
3889 let _result = project
3890 .update(cx, |project, cx| {
3891 project.rename_entry(
3892 entry_id,
3893 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3894 cx,
3895 )
3896 })
3897 .await
3898 .unwrap();
3899 worktree.read_with(cx, |worktree, _| {
3900 assert!(
3901 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3902 "First file should not reappear"
3903 );
3904 assert!(
3905 worktree
3906 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3907 .is_none(),
3908 "Old file should have been removed"
3909 );
3910 assert!(
3911 worktree
3912 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3913 .is_some(),
3914 "No error should have occurred after moving into existing directory"
3915 );
3916 });
3917 assert_eq!(
3918 worktree
3919 .update(cx, |worktree, cx| {
3920 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3921 })
3922 .await
3923 .unwrap()
3924 .text,
3925 expected_contents,
3926 "Moved file's contents should be preserved"
3927 );
3928}
3929
3930#[gpui::test(iterations = 10)]
3931async fn test_save_file(cx: &mut gpui::TestAppContext) {
3932 init_test(cx);
3933
3934 let fs = FakeFs::new(cx.executor());
3935 fs.insert_tree(
3936 path!("/dir"),
3937 json!({
3938 "file1": "the old contents",
3939 }),
3940 )
3941 .await;
3942
3943 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3944 let buffer = project
3945 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3946 .await
3947 .unwrap();
3948 buffer.update(cx, |buffer, cx| {
3949 assert_eq!(buffer.text(), "the old contents");
3950 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3951 });
3952
3953 project
3954 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3955 .await
3956 .unwrap();
3957
3958 let new_text = fs
3959 .load(Path::new(path!("/dir/file1")))
3960 .await
3961 .unwrap()
3962 .replace("\r\n", "\n");
3963 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3964}
3965
3966#[gpui::test(iterations = 10)]
3967async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3968 // Issue: #24349
3969 init_test(cx);
3970
3971 let fs = FakeFs::new(cx.executor());
3972 fs.insert_tree(path!("/dir"), json!({})).await;
3973
3974 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3975 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3976
3977 language_registry.add(rust_lang());
3978 let mut fake_rust_servers = language_registry.register_fake_lsp(
3979 "Rust",
3980 FakeLspAdapter {
3981 name: "the-rust-language-server",
3982 capabilities: lsp::ServerCapabilities {
3983 completion_provider: Some(lsp::CompletionOptions {
3984 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3985 ..Default::default()
3986 }),
3987 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3988 lsp::TextDocumentSyncOptions {
3989 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3990 ..Default::default()
3991 },
3992 )),
3993 ..Default::default()
3994 },
3995 ..Default::default()
3996 },
3997 );
3998
3999 let buffer = project
4000 .update(cx, |this, cx| this.create_buffer(false, cx))
4001 .unwrap()
4002 .await;
4003 project.update(cx, |this, cx| {
4004 this.register_buffer_with_language_servers(&buffer, cx);
4005 buffer.update(cx, |buffer, cx| {
4006 assert!(!this.has_language_servers_for(buffer, cx));
4007 })
4008 });
4009
4010 project
4011 .update(cx, |this, cx| {
4012 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4013 this.save_buffer_as(
4014 buffer.clone(),
4015 ProjectPath {
4016 worktree_id,
4017 path: rel_path("file.rs").into(),
4018 },
4019 cx,
4020 )
4021 })
4022 .await
4023 .unwrap();
4024 // A server is started up, and it is notified about Rust files.
4025 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4026 assert_eq!(
4027 fake_rust_server
4028 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4029 .await
4030 .text_document,
4031 lsp::TextDocumentItem {
4032 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4033 version: 0,
4034 text: "".to_string(),
4035 language_id: "rust".to_string(),
4036 }
4037 );
4038
4039 project.update(cx, |this, cx| {
4040 buffer.update(cx, |buffer, cx| {
4041 assert!(this.has_language_servers_for(buffer, cx));
4042 })
4043 });
4044}
4045
4046#[gpui::test(iterations = 30)]
4047async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4048 init_test(cx);
4049
4050 let fs = FakeFs::new(cx.executor());
4051 fs.insert_tree(
4052 path!("/dir"),
4053 json!({
4054 "file1": "the original contents",
4055 }),
4056 )
4057 .await;
4058
4059 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4060 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4061 let buffer = project
4062 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4063 .await
4064 .unwrap();
4065
4066 // Simulate buffer diffs being slow, so that they don't complete before
4067 // the next file change occurs.
4068 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4069
4070 // Change the buffer's file on disk, and then wait for the file change
4071 // to be detected by the worktree, so that the buffer starts reloading.
4072 fs.save(
4073 path!("/dir/file1").as_ref(),
4074 &"the first contents".into(),
4075 Default::default(),
4076 )
4077 .await
4078 .unwrap();
4079 worktree.next_event(cx).await;
4080
4081 // Change the buffer's file again. Depending on the random seed, the
4082 // previous file change may still be in progress.
4083 fs.save(
4084 path!("/dir/file1").as_ref(),
4085 &"the second contents".into(),
4086 Default::default(),
4087 )
4088 .await
4089 .unwrap();
4090 worktree.next_event(cx).await;
4091
4092 cx.executor().run_until_parked();
4093 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4094 buffer.read_with(cx, |buffer, _| {
4095 assert_eq!(buffer.text(), on_disk_text);
4096 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4097 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4098 });
4099}
4100
4101#[gpui::test(iterations = 30)]
4102async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4103 init_test(cx);
4104
4105 let fs = FakeFs::new(cx.executor());
4106 fs.insert_tree(
4107 path!("/dir"),
4108 json!({
4109 "file1": "the original contents",
4110 }),
4111 )
4112 .await;
4113
4114 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4115 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4116 let buffer = project
4117 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4118 .await
4119 .unwrap();
4120
4121 // Simulate buffer diffs being slow, so that they don't complete before
4122 // the next file change occurs.
4123 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4124
4125 // Change the buffer's file on disk, and then wait for the file change
4126 // to be detected by the worktree, so that the buffer starts reloading.
4127 fs.save(
4128 path!("/dir/file1").as_ref(),
4129 &"the first contents".into(),
4130 Default::default(),
4131 )
4132 .await
4133 .unwrap();
4134 worktree.next_event(cx).await;
4135
4136 cx.executor()
4137 .spawn(cx.executor().simulate_random_delay())
4138 .await;
4139
4140 // Perform a noop edit, causing the buffer's version to increase.
4141 buffer.update(cx, |buffer, cx| {
4142 buffer.edit([(0..0, " ")], None, cx);
4143 buffer.undo(cx);
4144 });
4145
4146 cx.executor().run_until_parked();
4147 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4148 buffer.read_with(cx, |buffer, _| {
4149 let buffer_text = buffer.text();
4150 if buffer_text == on_disk_text {
4151 assert!(
4152 !buffer.is_dirty() && !buffer.has_conflict(),
4153 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4154 );
4155 }
4156 // If the file change occurred while the buffer was processing the first
4157 // change, the buffer will be in a conflicting state.
4158 else {
4159 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4160 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4161 }
4162 });
4163}
4164
4165#[gpui::test]
4166async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4167 init_test(cx);
4168
4169 let fs = FakeFs::new(cx.executor());
4170 fs.insert_tree(
4171 path!("/dir"),
4172 json!({
4173 "file1": "the old contents",
4174 }),
4175 )
4176 .await;
4177
4178 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4179 let buffer = project
4180 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4181 .await
4182 .unwrap();
4183 buffer.update(cx, |buffer, cx| {
4184 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4185 });
4186
4187 project
4188 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4189 .await
4190 .unwrap();
4191
4192 let new_text = fs
4193 .load(Path::new(path!("/dir/file1")))
4194 .await
4195 .unwrap()
4196 .replace("\r\n", "\n");
4197 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4198}
4199
4200#[gpui::test]
4201async fn test_save_as(cx: &mut gpui::TestAppContext) {
4202 init_test(cx);
4203
4204 let fs = FakeFs::new(cx.executor());
4205 fs.insert_tree("/dir", json!({})).await;
4206
4207 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4208
4209 let languages = project.update(cx, |project, _| project.languages().clone());
4210 languages.add(rust_lang());
4211
4212 let buffer = project.update(cx, |project, cx| {
4213 project.create_local_buffer("", None, false, cx)
4214 });
4215 buffer.update(cx, |buffer, cx| {
4216 buffer.edit([(0..0, "abc")], None, cx);
4217 assert!(buffer.is_dirty());
4218 assert!(!buffer.has_conflict());
4219 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4220 });
4221 project
4222 .update(cx, |project, cx| {
4223 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4224 let path = ProjectPath {
4225 worktree_id,
4226 path: rel_path("file1.rs").into(),
4227 };
4228 project.save_buffer_as(buffer.clone(), path, cx)
4229 })
4230 .await
4231 .unwrap();
4232 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4233
4234 cx.executor().run_until_parked();
4235 buffer.update(cx, |buffer, cx| {
4236 assert_eq!(
4237 buffer.file().unwrap().full_path(cx),
4238 Path::new("dir/file1.rs")
4239 );
4240 assert!(!buffer.is_dirty());
4241 assert!(!buffer.has_conflict());
4242 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4243 });
4244
4245 let opened_buffer = project
4246 .update(cx, |project, cx| {
4247 project.open_local_buffer("/dir/file1.rs", cx)
4248 })
4249 .await
4250 .unwrap();
4251 assert_eq!(opened_buffer, buffer);
4252}
4253
4254#[gpui::test]
4255async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4256 init_test(cx);
4257
4258 let fs = FakeFs::new(cx.executor());
4259 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4260
4261 fs.insert_tree(
4262 path!("/dir"),
4263 json!({
4264 "data_a.txt": "data about a"
4265 }),
4266 )
4267 .await;
4268
4269 let buffer = project
4270 .update(cx, |project, cx| {
4271 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4272 })
4273 .await
4274 .unwrap();
4275
4276 buffer.update(cx, |buffer, cx| {
4277 buffer.edit([(11..12, "b")], None, cx);
4278 });
4279
4280 // Save buffer's contents as a new file and confirm that the buffer's now
4281 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4282 // file associated with the buffer has now been updated to `data_b.txt`
4283 project
4284 .update(cx, |project, cx| {
4285 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4286 let new_path = ProjectPath {
4287 worktree_id,
4288 path: rel_path("data_b.txt").into(),
4289 };
4290
4291 project.save_buffer_as(buffer.clone(), new_path, cx)
4292 })
4293 .await
4294 .unwrap();
4295
4296 buffer.update(cx, |buffer, cx| {
4297 assert_eq!(
4298 buffer.file().unwrap().full_path(cx),
4299 Path::new("dir/data_b.txt")
4300 )
4301 });
4302
4303 // Open the original `data_a.txt` file, confirming that its contents are
4304 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4305 let original_buffer = project
4306 .update(cx, |project, cx| {
4307 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4308 })
4309 .await
4310 .unwrap();
4311
4312 original_buffer.update(cx, |buffer, cx| {
4313 assert_eq!(buffer.text(), "data about a");
4314 assert_eq!(
4315 buffer.file().unwrap().full_path(cx),
4316 Path::new("dir/data_a.txt")
4317 )
4318 });
4319}
4320
4321#[gpui::test(retries = 5)]
4322async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4323 use worktree::WorktreeModelHandle as _;
4324
4325 init_test(cx);
4326 cx.executor().allow_parking();
4327
4328 let dir = TempTree::new(json!({
4329 "a": {
4330 "file1": "",
4331 "file2": "",
4332 "file3": "",
4333 },
4334 "b": {
4335 "c": {
4336 "file4": "",
4337 "file5": "",
4338 }
4339 }
4340 }));
4341
4342 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4343
4344 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4345 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4346 async move { buffer.await.unwrap() }
4347 };
4348 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4349 project.update(cx, |project, cx| {
4350 let tree = project.worktrees(cx).next().unwrap();
4351 tree.read(cx)
4352 .entry_for_path(rel_path(path))
4353 .unwrap_or_else(|| panic!("no entry for path {}", path))
4354 .id
4355 })
4356 };
4357
4358 let buffer2 = buffer_for_path("a/file2", cx).await;
4359 let buffer3 = buffer_for_path("a/file3", cx).await;
4360 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4361 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4362
4363 let file2_id = id_for_path("a/file2", cx);
4364 let file3_id = id_for_path("a/file3", cx);
4365 let file4_id = id_for_path("b/c/file4", cx);
4366
4367 // Create a remote copy of this worktree.
4368 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4369 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4370
4371 let updates = Arc::new(Mutex::new(Vec::new()));
4372 tree.update(cx, |tree, cx| {
4373 let updates = updates.clone();
4374 tree.observe_updates(0, cx, move |update| {
4375 updates.lock().push(update);
4376 async { true }
4377 });
4378 });
4379
4380 let remote = cx.update(|cx| {
4381 Worktree::remote(
4382 0,
4383 ReplicaId::REMOTE_SERVER,
4384 metadata,
4385 project.read(cx).client().into(),
4386 project.read(cx).path_style(cx),
4387 cx,
4388 )
4389 });
4390
4391 cx.executor().run_until_parked();
4392
4393 cx.update(|cx| {
4394 assert!(!buffer2.read(cx).is_dirty());
4395 assert!(!buffer3.read(cx).is_dirty());
4396 assert!(!buffer4.read(cx).is_dirty());
4397 assert!(!buffer5.read(cx).is_dirty());
4398 });
4399
4400 // Rename and delete files and directories.
4401 tree.flush_fs_events(cx).await;
4402 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4403 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4404 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4405 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4406 tree.flush_fs_events(cx).await;
4407
4408 cx.update(|app| {
4409 assert_eq!(
4410 tree.read(app).paths().collect::<Vec<_>>(),
4411 vec![
4412 rel_path("a"),
4413 rel_path("a/file1"),
4414 rel_path("a/file2.new"),
4415 rel_path("b"),
4416 rel_path("d"),
4417 rel_path("d/file3"),
4418 rel_path("d/file4"),
4419 ]
4420 );
4421 });
4422
4423 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4424 assert_eq!(id_for_path("d/file3", cx), file3_id);
4425 assert_eq!(id_for_path("d/file4", cx), file4_id);
4426
4427 cx.update(|cx| {
4428 assert_eq!(
4429 buffer2.read(cx).file().unwrap().path().as_ref(),
4430 rel_path("a/file2.new")
4431 );
4432 assert_eq!(
4433 buffer3.read(cx).file().unwrap().path().as_ref(),
4434 rel_path("d/file3")
4435 );
4436 assert_eq!(
4437 buffer4.read(cx).file().unwrap().path().as_ref(),
4438 rel_path("d/file4")
4439 );
4440 assert_eq!(
4441 buffer5.read(cx).file().unwrap().path().as_ref(),
4442 rel_path("b/c/file5")
4443 );
4444
4445 assert_matches!(
4446 buffer2.read(cx).file().unwrap().disk_state(),
4447 DiskState::Present { .. }
4448 );
4449 assert_matches!(
4450 buffer3.read(cx).file().unwrap().disk_state(),
4451 DiskState::Present { .. }
4452 );
4453 assert_matches!(
4454 buffer4.read(cx).file().unwrap().disk_state(),
4455 DiskState::Present { .. }
4456 );
4457 assert_eq!(
4458 buffer5.read(cx).file().unwrap().disk_state(),
4459 DiskState::Deleted
4460 );
4461 });
4462
4463 // Update the remote worktree. Check that it becomes consistent with the
4464 // local worktree.
4465 cx.executor().run_until_parked();
4466
4467 remote.update(cx, |remote, _| {
4468 for update in updates.lock().drain(..) {
4469 remote.as_remote_mut().unwrap().update_from_remote(update);
4470 }
4471 });
4472 cx.executor().run_until_parked();
4473 remote.update(cx, |remote, _| {
4474 assert_eq!(
4475 remote.paths().collect::<Vec<_>>(),
4476 vec![
4477 rel_path("a"),
4478 rel_path("a/file1"),
4479 rel_path("a/file2.new"),
4480 rel_path("b"),
4481 rel_path("d"),
4482 rel_path("d/file3"),
4483 rel_path("d/file4"),
4484 ]
4485 );
4486 });
4487}
4488
4489#[gpui::test(iterations = 10)]
4490async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4491 init_test(cx);
4492
4493 let fs = FakeFs::new(cx.executor());
4494 fs.insert_tree(
4495 path!("/dir"),
4496 json!({
4497 "a": {
4498 "file1": "",
4499 }
4500 }),
4501 )
4502 .await;
4503
4504 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4505 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4506 let tree_id = tree.update(cx, |tree, _| tree.id());
4507
4508 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4509 project.update(cx, |project, cx| {
4510 let tree = project.worktrees(cx).next().unwrap();
4511 tree.read(cx)
4512 .entry_for_path(rel_path(path))
4513 .unwrap_or_else(|| panic!("no entry for path {}", path))
4514 .id
4515 })
4516 };
4517
4518 let dir_id = id_for_path("a", cx);
4519 let file_id = id_for_path("a/file1", cx);
4520 let buffer = project
4521 .update(cx, |p, cx| {
4522 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4523 })
4524 .await
4525 .unwrap();
4526 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4527
4528 project
4529 .update(cx, |project, cx| {
4530 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4531 })
4532 .unwrap()
4533 .await
4534 .into_included()
4535 .unwrap();
4536 cx.executor().run_until_parked();
4537
4538 assert_eq!(id_for_path("b", cx), dir_id);
4539 assert_eq!(id_for_path("b/file1", cx), file_id);
4540 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4541}
4542
4543#[gpui::test]
4544async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4545 init_test(cx);
4546
4547 let fs = FakeFs::new(cx.executor());
4548 fs.insert_tree(
4549 "/dir",
4550 json!({
4551 "a.txt": "a-contents",
4552 "b.txt": "b-contents",
4553 }),
4554 )
4555 .await;
4556
4557 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4558
4559 // Spawn multiple tasks to open paths, repeating some paths.
4560 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4561 (
4562 p.open_local_buffer("/dir/a.txt", cx),
4563 p.open_local_buffer("/dir/b.txt", cx),
4564 p.open_local_buffer("/dir/a.txt", cx),
4565 )
4566 });
4567
4568 let buffer_a_1 = buffer_a_1.await.unwrap();
4569 let buffer_a_2 = buffer_a_2.await.unwrap();
4570 let buffer_b = buffer_b.await.unwrap();
4571 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4572 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4573
4574 // There is only one buffer per path.
4575 let buffer_a_id = buffer_a_1.entity_id();
4576 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4577
4578 // Open the same path again while it is still open.
4579 drop(buffer_a_1);
4580 let buffer_a_3 = project
4581 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4582 .await
4583 .unwrap();
4584
4585 // There's still only one buffer per path.
4586 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4587}
4588
4589#[gpui::test]
4590async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4591 init_test(cx);
4592
4593 let fs = FakeFs::new(cx.executor());
4594 fs.insert_tree(
4595 path!("/dir"),
4596 json!({
4597 "file1": "abc",
4598 "file2": "def",
4599 "file3": "ghi",
4600 }),
4601 )
4602 .await;
4603
4604 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4605
4606 let buffer1 = project
4607 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4608 .await
4609 .unwrap();
4610 let events = Arc::new(Mutex::new(Vec::new()));
4611
4612 // initially, the buffer isn't dirty.
4613 buffer1.update(cx, |buffer, cx| {
4614 cx.subscribe(&buffer1, {
4615 let events = events.clone();
4616 move |_, _, event, _| match event {
4617 BufferEvent::Operation { .. } => {}
4618 _ => events.lock().push(event.clone()),
4619 }
4620 })
4621 .detach();
4622
4623 assert!(!buffer.is_dirty());
4624 assert!(events.lock().is_empty());
4625
4626 buffer.edit([(1..2, "")], None, cx);
4627 });
4628
4629 // after the first edit, the buffer is dirty, and emits a dirtied event.
4630 buffer1.update(cx, |buffer, cx| {
4631 assert!(buffer.text() == "ac");
4632 assert!(buffer.is_dirty());
4633 assert_eq!(
4634 *events.lock(),
4635 &[
4636 language::BufferEvent::Edited,
4637 language::BufferEvent::DirtyChanged
4638 ]
4639 );
4640 events.lock().clear();
4641 buffer.did_save(
4642 buffer.version(),
4643 buffer.file().unwrap().disk_state().mtime(),
4644 cx,
4645 );
4646 });
4647
4648 // after saving, the buffer is not dirty, and emits a saved event.
4649 buffer1.update(cx, |buffer, cx| {
4650 assert!(!buffer.is_dirty());
4651 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4652 events.lock().clear();
4653
4654 buffer.edit([(1..1, "B")], None, cx);
4655 buffer.edit([(2..2, "D")], None, cx);
4656 });
4657
4658 // after editing again, the buffer is dirty, and emits another dirty event.
4659 buffer1.update(cx, |buffer, cx| {
4660 assert!(buffer.text() == "aBDc");
4661 assert!(buffer.is_dirty());
4662 assert_eq!(
4663 *events.lock(),
4664 &[
4665 language::BufferEvent::Edited,
4666 language::BufferEvent::DirtyChanged,
4667 language::BufferEvent::Edited,
4668 ],
4669 );
4670 events.lock().clear();
4671
4672 // After restoring the buffer to its previously-saved state,
4673 // the buffer is not considered dirty anymore.
4674 buffer.edit([(1..3, "")], None, cx);
4675 assert!(buffer.text() == "ac");
4676 assert!(!buffer.is_dirty());
4677 });
4678
4679 assert_eq!(
4680 *events.lock(),
4681 &[
4682 language::BufferEvent::Edited,
4683 language::BufferEvent::DirtyChanged
4684 ]
4685 );
4686
4687 // When a file is deleted, it is not considered dirty.
4688 let events = Arc::new(Mutex::new(Vec::new()));
4689 let buffer2 = project
4690 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4691 .await
4692 .unwrap();
4693 buffer2.update(cx, |_, cx| {
4694 cx.subscribe(&buffer2, {
4695 let events = events.clone();
4696 move |_, _, event, _| match event {
4697 BufferEvent::Operation { .. } => {}
4698 _ => events.lock().push(event.clone()),
4699 }
4700 })
4701 .detach();
4702 });
4703
4704 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4705 .await
4706 .unwrap();
4707 cx.executor().run_until_parked();
4708 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4709 assert_eq!(
4710 mem::take(&mut *events.lock()),
4711 &[language::BufferEvent::FileHandleChanged]
4712 );
4713
4714 // Buffer becomes dirty when edited.
4715 buffer2.update(cx, |buffer, cx| {
4716 buffer.edit([(2..3, "")], None, cx);
4717 assert_eq!(buffer.is_dirty(), true);
4718 });
4719 assert_eq!(
4720 mem::take(&mut *events.lock()),
4721 &[
4722 language::BufferEvent::Edited,
4723 language::BufferEvent::DirtyChanged
4724 ]
4725 );
4726
4727 // Buffer becomes clean again when all of its content is removed, because
4728 // the file was deleted.
4729 buffer2.update(cx, |buffer, cx| {
4730 buffer.edit([(0..2, "")], None, cx);
4731 assert_eq!(buffer.is_empty(), true);
4732 assert_eq!(buffer.is_dirty(), false);
4733 });
4734 assert_eq!(
4735 *events.lock(),
4736 &[
4737 language::BufferEvent::Edited,
4738 language::BufferEvent::DirtyChanged
4739 ]
4740 );
4741
4742 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4743 let events = Arc::new(Mutex::new(Vec::new()));
4744 let buffer3 = project
4745 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4746 .await
4747 .unwrap();
4748 buffer3.update(cx, |_, cx| {
4749 cx.subscribe(&buffer3, {
4750 let events = events.clone();
4751 move |_, _, event, _| match event {
4752 BufferEvent::Operation { .. } => {}
4753 _ => events.lock().push(event.clone()),
4754 }
4755 })
4756 .detach();
4757 });
4758
4759 buffer3.update(cx, |buffer, cx| {
4760 buffer.edit([(0..0, "x")], None, cx);
4761 });
4762 events.lock().clear();
4763 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4764 .await
4765 .unwrap();
4766 cx.executor().run_until_parked();
4767 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4768 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4769}
4770
4771#[gpui::test]
4772async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4773 init_test(cx);
4774
4775 let (initial_contents, initial_offsets) =
4776 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4777 let fs = FakeFs::new(cx.executor());
4778 fs.insert_tree(
4779 path!("/dir"),
4780 json!({
4781 "the-file": initial_contents,
4782 }),
4783 )
4784 .await;
4785 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4786 let buffer = project
4787 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4788 .await
4789 .unwrap();
4790
4791 let anchors = initial_offsets
4792 .iter()
4793 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4794 .collect::<Vec<_>>();
4795
4796 // Change the file on disk, adding two new lines of text, and removing
4797 // one line.
4798 buffer.update(cx, |buffer, _| {
4799 assert!(!buffer.is_dirty());
4800 assert!(!buffer.has_conflict());
4801 });
4802
4803 let (new_contents, new_offsets) =
4804 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4805 fs.save(
4806 path!("/dir/the-file").as_ref(),
4807 &new_contents.as_str().into(),
4808 LineEnding::Unix,
4809 )
4810 .await
4811 .unwrap();
4812
4813 // Because the buffer was not modified, it is reloaded from disk. Its
4814 // contents are edited according to the diff between the old and new
4815 // file contents.
4816 cx.executor().run_until_parked();
4817 buffer.update(cx, |buffer, _| {
4818 assert_eq!(buffer.text(), new_contents);
4819 assert!(!buffer.is_dirty());
4820 assert!(!buffer.has_conflict());
4821
4822 let anchor_offsets = anchors
4823 .iter()
4824 .map(|anchor| anchor.to_offset(&*buffer))
4825 .collect::<Vec<_>>();
4826 assert_eq!(anchor_offsets, new_offsets);
4827 });
4828
4829 // Modify the buffer
4830 buffer.update(cx, |buffer, cx| {
4831 buffer.edit([(0..0, " ")], None, cx);
4832 assert!(buffer.is_dirty());
4833 assert!(!buffer.has_conflict());
4834 });
4835
4836 // Change the file on disk again, adding blank lines to the beginning.
4837 fs.save(
4838 path!("/dir/the-file").as_ref(),
4839 &"\n\n\nAAAA\naaa\nBB\nbbbbb\n".into(),
4840 LineEnding::Unix,
4841 )
4842 .await
4843 .unwrap();
4844
4845 // Because the buffer is modified, it doesn't reload from disk, but is
4846 // marked as having a conflict.
4847 cx.executor().run_until_parked();
4848 buffer.update(cx, |buffer, _| {
4849 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4850 assert!(buffer.has_conflict());
4851 });
4852}
4853
4854#[gpui::test]
4855async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4856 init_test(cx);
4857
4858 let fs = FakeFs::new(cx.executor());
4859 fs.insert_tree(
4860 path!("/dir"),
4861 json!({
4862 "file1": "a\nb\nc\n",
4863 "file2": "one\r\ntwo\r\nthree\r\n",
4864 }),
4865 )
4866 .await;
4867
4868 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4869 let buffer1 = project
4870 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4871 .await
4872 .unwrap();
4873 let buffer2 = project
4874 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4875 .await
4876 .unwrap();
4877
4878 buffer1.update(cx, |buffer, _| {
4879 assert_eq!(buffer.text(), "a\nb\nc\n");
4880 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4881 });
4882 buffer2.update(cx, |buffer, _| {
4883 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4884 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4885 });
4886
4887 // Change a file's line endings on disk from unix to windows. The buffer's
4888 // state updates correctly.
4889 fs.save(
4890 path!("/dir/file1").as_ref(),
4891 &"aaa\nb\nc\n".into(),
4892 LineEnding::Windows,
4893 )
4894 .await
4895 .unwrap();
4896 cx.executor().run_until_parked();
4897 buffer1.update(cx, |buffer, _| {
4898 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4899 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4900 });
4901
4902 // Save a file with windows line endings. The file is written correctly.
4903 buffer2.update(cx, |buffer, cx| {
4904 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4905 });
4906 project
4907 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4908 .await
4909 .unwrap();
4910 assert_eq!(
4911 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4912 "one\r\ntwo\r\nthree\r\nfour\r\n",
4913 );
4914}
4915
4916#[gpui::test]
4917async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4918 init_test(cx);
4919
4920 let fs = FakeFs::new(cx.executor());
4921 fs.insert_tree(
4922 path!("/dir"),
4923 json!({
4924 "a.rs": "
4925 fn foo(mut v: Vec<usize>) {
4926 for x in &v {
4927 v.push(1);
4928 }
4929 }
4930 "
4931 .unindent(),
4932 }),
4933 )
4934 .await;
4935
4936 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4937 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4938 let buffer = project
4939 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4940 .await
4941 .unwrap();
4942
4943 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4944 let message = lsp::PublishDiagnosticsParams {
4945 uri: buffer_uri.clone(),
4946 diagnostics: vec![
4947 lsp::Diagnostic {
4948 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4949 severity: Some(DiagnosticSeverity::WARNING),
4950 message: "error 1".to_string(),
4951 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4952 location: lsp::Location {
4953 uri: buffer_uri.clone(),
4954 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4955 },
4956 message: "error 1 hint 1".to_string(),
4957 }]),
4958 ..Default::default()
4959 },
4960 lsp::Diagnostic {
4961 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4962 severity: Some(DiagnosticSeverity::HINT),
4963 message: "error 1 hint 1".to_string(),
4964 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4965 location: lsp::Location {
4966 uri: buffer_uri.clone(),
4967 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4968 },
4969 message: "original diagnostic".to_string(),
4970 }]),
4971 ..Default::default()
4972 },
4973 lsp::Diagnostic {
4974 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4975 severity: Some(DiagnosticSeverity::ERROR),
4976 message: "error 2".to_string(),
4977 related_information: Some(vec![
4978 lsp::DiagnosticRelatedInformation {
4979 location: lsp::Location {
4980 uri: buffer_uri.clone(),
4981 range: lsp::Range::new(
4982 lsp::Position::new(1, 13),
4983 lsp::Position::new(1, 15),
4984 ),
4985 },
4986 message: "error 2 hint 1".to_string(),
4987 },
4988 lsp::DiagnosticRelatedInformation {
4989 location: lsp::Location {
4990 uri: buffer_uri.clone(),
4991 range: lsp::Range::new(
4992 lsp::Position::new(1, 13),
4993 lsp::Position::new(1, 15),
4994 ),
4995 },
4996 message: "error 2 hint 2".to_string(),
4997 },
4998 ]),
4999 ..Default::default()
5000 },
5001 lsp::Diagnostic {
5002 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5003 severity: Some(DiagnosticSeverity::HINT),
5004 message: "error 2 hint 1".to_string(),
5005 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5006 location: lsp::Location {
5007 uri: buffer_uri.clone(),
5008 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5009 },
5010 message: "original diagnostic".to_string(),
5011 }]),
5012 ..Default::default()
5013 },
5014 lsp::Diagnostic {
5015 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5016 severity: Some(DiagnosticSeverity::HINT),
5017 message: "error 2 hint 2".to_string(),
5018 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5019 location: lsp::Location {
5020 uri: buffer_uri,
5021 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5022 },
5023 message: "original diagnostic".to_string(),
5024 }]),
5025 ..Default::default()
5026 },
5027 ],
5028 version: None,
5029 };
5030
5031 lsp_store
5032 .update(cx, |lsp_store, cx| {
5033 lsp_store.update_diagnostics(
5034 LanguageServerId(0),
5035 message,
5036 None,
5037 DiagnosticSourceKind::Pushed,
5038 &[],
5039 cx,
5040 )
5041 })
5042 .unwrap();
5043 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5044
5045 assert_eq!(
5046 buffer
5047 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5048 .collect::<Vec<_>>(),
5049 &[
5050 DiagnosticEntry {
5051 range: Point::new(1, 8)..Point::new(1, 9),
5052 diagnostic: Diagnostic {
5053 severity: DiagnosticSeverity::WARNING,
5054 message: "error 1".to_string(),
5055 group_id: 1,
5056 is_primary: true,
5057 source_kind: DiagnosticSourceKind::Pushed,
5058 ..Diagnostic::default()
5059 }
5060 },
5061 DiagnosticEntry {
5062 range: Point::new(1, 8)..Point::new(1, 9),
5063 diagnostic: Diagnostic {
5064 severity: DiagnosticSeverity::HINT,
5065 message: "error 1 hint 1".to_string(),
5066 group_id: 1,
5067 is_primary: false,
5068 source_kind: DiagnosticSourceKind::Pushed,
5069 ..Diagnostic::default()
5070 }
5071 },
5072 DiagnosticEntry {
5073 range: Point::new(1, 13)..Point::new(1, 15),
5074 diagnostic: Diagnostic {
5075 severity: DiagnosticSeverity::HINT,
5076 message: "error 2 hint 1".to_string(),
5077 group_id: 0,
5078 is_primary: false,
5079 source_kind: DiagnosticSourceKind::Pushed,
5080 ..Diagnostic::default()
5081 }
5082 },
5083 DiagnosticEntry {
5084 range: Point::new(1, 13)..Point::new(1, 15),
5085 diagnostic: Diagnostic {
5086 severity: DiagnosticSeverity::HINT,
5087 message: "error 2 hint 2".to_string(),
5088 group_id: 0,
5089 is_primary: false,
5090 source_kind: DiagnosticSourceKind::Pushed,
5091 ..Diagnostic::default()
5092 }
5093 },
5094 DiagnosticEntry {
5095 range: Point::new(2, 8)..Point::new(2, 17),
5096 diagnostic: Diagnostic {
5097 severity: DiagnosticSeverity::ERROR,
5098 message: "error 2".to_string(),
5099 group_id: 0,
5100 is_primary: true,
5101 source_kind: DiagnosticSourceKind::Pushed,
5102 ..Diagnostic::default()
5103 }
5104 }
5105 ]
5106 );
5107
5108 assert_eq!(
5109 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5110 &[
5111 DiagnosticEntry {
5112 range: Point::new(1, 13)..Point::new(1, 15),
5113 diagnostic: Diagnostic {
5114 severity: DiagnosticSeverity::HINT,
5115 message: "error 2 hint 1".to_string(),
5116 group_id: 0,
5117 is_primary: false,
5118 source_kind: DiagnosticSourceKind::Pushed,
5119 ..Diagnostic::default()
5120 }
5121 },
5122 DiagnosticEntry {
5123 range: Point::new(1, 13)..Point::new(1, 15),
5124 diagnostic: Diagnostic {
5125 severity: DiagnosticSeverity::HINT,
5126 message: "error 2 hint 2".to_string(),
5127 group_id: 0,
5128 is_primary: false,
5129 source_kind: DiagnosticSourceKind::Pushed,
5130 ..Diagnostic::default()
5131 }
5132 },
5133 DiagnosticEntry {
5134 range: Point::new(2, 8)..Point::new(2, 17),
5135 diagnostic: Diagnostic {
5136 severity: DiagnosticSeverity::ERROR,
5137 message: "error 2".to_string(),
5138 group_id: 0,
5139 is_primary: true,
5140 source_kind: DiagnosticSourceKind::Pushed,
5141 ..Diagnostic::default()
5142 }
5143 }
5144 ]
5145 );
5146
5147 assert_eq!(
5148 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5149 &[
5150 DiagnosticEntry {
5151 range: Point::new(1, 8)..Point::new(1, 9),
5152 diagnostic: Diagnostic {
5153 severity: DiagnosticSeverity::WARNING,
5154 message: "error 1".to_string(),
5155 group_id: 1,
5156 is_primary: true,
5157 source_kind: DiagnosticSourceKind::Pushed,
5158 ..Diagnostic::default()
5159 }
5160 },
5161 DiagnosticEntry {
5162 range: Point::new(1, 8)..Point::new(1, 9),
5163 diagnostic: Diagnostic {
5164 severity: DiagnosticSeverity::HINT,
5165 message: "error 1 hint 1".to_string(),
5166 group_id: 1,
5167 is_primary: false,
5168 source_kind: DiagnosticSourceKind::Pushed,
5169 ..Diagnostic::default()
5170 }
5171 },
5172 ]
5173 );
5174}
5175
5176#[gpui::test]
5177async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5178 init_test(cx);
5179
5180 let fs = FakeFs::new(cx.executor());
5181 fs.insert_tree(
5182 path!("/dir"),
5183 json!({
5184 "one.rs": "const ONE: usize = 1;",
5185 "two": {
5186 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5187 }
5188
5189 }),
5190 )
5191 .await;
5192 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5193
5194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5195 language_registry.add(rust_lang());
5196 let watched_paths = lsp::FileOperationRegistrationOptions {
5197 filters: vec![
5198 FileOperationFilter {
5199 scheme: Some("file".to_owned()),
5200 pattern: lsp::FileOperationPattern {
5201 glob: "**/*.rs".to_owned(),
5202 matches: Some(lsp::FileOperationPatternKind::File),
5203 options: None,
5204 },
5205 },
5206 FileOperationFilter {
5207 scheme: Some("file".to_owned()),
5208 pattern: lsp::FileOperationPattern {
5209 glob: "**/**".to_owned(),
5210 matches: Some(lsp::FileOperationPatternKind::Folder),
5211 options: None,
5212 },
5213 },
5214 ],
5215 };
5216 let mut fake_servers = language_registry.register_fake_lsp(
5217 "Rust",
5218 FakeLspAdapter {
5219 capabilities: lsp::ServerCapabilities {
5220 workspace: Some(lsp::WorkspaceServerCapabilities {
5221 workspace_folders: None,
5222 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5223 did_rename: Some(watched_paths.clone()),
5224 will_rename: Some(watched_paths),
5225 ..Default::default()
5226 }),
5227 }),
5228 ..Default::default()
5229 },
5230 ..Default::default()
5231 },
5232 );
5233
5234 let _ = project
5235 .update(cx, |project, cx| {
5236 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5237 })
5238 .await
5239 .unwrap();
5240
5241 let fake_server = fake_servers.next().await.unwrap();
5242 let response = project.update(cx, |project, cx| {
5243 let worktree = project.worktrees(cx).next().unwrap();
5244 let entry = worktree
5245 .read(cx)
5246 .entry_for_path(rel_path("one.rs"))
5247 .unwrap();
5248 project.rename_entry(
5249 entry.id,
5250 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5251 cx,
5252 )
5253 });
5254 let expected_edit = lsp::WorkspaceEdit {
5255 changes: None,
5256 document_changes: Some(DocumentChanges::Edits({
5257 vec![TextDocumentEdit {
5258 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5259 range: lsp::Range {
5260 start: lsp::Position {
5261 line: 0,
5262 character: 1,
5263 },
5264 end: lsp::Position {
5265 line: 0,
5266 character: 3,
5267 },
5268 },
5269 new_text: "This is not a drill".to_owned(),
5270 })],
5271 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5272 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5273 version: Some(1337),
5274 },
5275 }]
5276 })),
5277 change_annotations: None,
5278 };
5279 let resolved_workspace_edit = Arc::new(OnceLock::new());
5280 fake_server
5281 .set_request_handler::<WillRenameFiles, _, _>({
5282 let resolved_workspace_edit = resolved_workspace_edit.clone();
5283 let expected_edit = expected_edit.clone();
5284 move |params, _| {
5285 let resolved_workspace_edit = resolved_workspace_edit.clone();
5286 let expected_edit = expected_edit.clone();
5287 async move {
5288 assert_eq!(params.files.len(), 1);
5289 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5290 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5291 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5292 Ok(Some(expected_edit))
5293 }
5294 }
5295 })
5296 .next()
5297 .await
5298 .unwrap();
5299 let _ = response.await.unwrap();
5300 fake_server
5301 .handle_notification::<DidRenameFiles, _>(|params, _| {
5302 assert_eq!(params.files.len(), 1);
5303 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5304 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5305 })
5306 .next()
5307 .await
5308 .unwrap();
5309 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5310}
5311
5312#[gpui::test]
5313async fn test_rename(cx: &mut gpui::TestAppContext) {
5314 // hi
5315 init_test(cx);
5316
5317 let fs = FakeFs::new(cx.executor());
5318 fs.insert_tree(
5319 path!("/dir"),
5320 json!({
5321 "one.rs": "const ONE: usize = 1;",
5322 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5323 }),
5324 )
5325 .await;
5326
5327 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5328
5329 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5330 language_registry.add(rust_lang());
5331 let mut fake_servers = language_registry.register_fake_lsp(
5332 "Rust",
5333 FakeLspAdapter {
5334 capabilities: lsp::ServerCapabilities {
5335 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5336 prepare_provider: Some(true),
5337 work_done_progress_options: Default::default(),
5338 })),
5339 ..Default::default()
5340 },
5341 ..Default::default()
5342 },
5343 );
5344
5345 let (buffer, _handle) = project
5346 .update(cx, |project, cx| {
5347 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5348 })
5349 .await
5350 .unwrap();
5351
5352 let fake_server = fake_servers.next().await.unwrap();
5353
5354 let response = project.update(cx, |project, cx| {
5355 project.prepare_rename(buffer.clone(), 7, cx)
5356 });
5357 fake_server
5358 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5359 assert_eq!(
5360 params.text_document.uri.as_str(),
5361 uri!("file:///dir/one.rs")
5362 );
5363 assert_eq!(params.position, lsp::Position::new(0, 7));
5364 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5365 lsp::Position::new(0, 6),
5366 lsp::Position::new(0, 9),
5367 ))))
5368 })
5369 .next()
5370 .await
5371 .unwrap();
5372 let response = response.await.unwrap();
5373 let PrepareRenameResponse::Success(range) = response else {
5374 panic!("{:?}", response);
5375 };
5376 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5377 assert_eq!(range, 6..9);
5378
5379 let response = project.update(cx, |project, cx| {
5380 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5381 });
5382 fake_server
5383 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5384 assert_eq!(
5385 params.text_document_position.text_document.uri.as_str(),
5386 uri!("file:///dir/one.rs")
5387 );
5388 assert_eq!(
5389 params.text_document_position.position,
5390 lsp::Position::new(0, 7)
5391 );
5392 assert_eq!(params.new_name, "THREE");
5393 Ok(Some(lsp::WorkspaceEdit {
5394 changes: Some(
5395 [
5396 (
5397 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5398 vec![lsp::TextEdit::new(
5399 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5400 "THREE".to_string(),
5401 )],
5402 ),
5403 (
5404 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5405 vec![
5406 lsp::TextEdit::new(
5407 lsp::Range::new(
5408 lsp::Position::new(0, 24),
5409 lsp::Position::new(0, 27),
5410 ),
5411 "THREE".to_string(),
5412 ),
5413 lsp::TextEdit::new(
5414 lsp::Range::new(
5415 lsp::Position::new(0, 35),
5416 lsp::Position::new(0, 38),
5417 ),
5418 "THREE".to_string(),
5419 ),
5420 ],
5421 ),
5422 ]
5423 .into_iter()
5424 .collect(),
5425 ),
5426 ..Default::default()
5427 }))
5428 })
5429 .next()
5430 .await
5431 .unwrap();
5432 let mut transaction = response.await.unwrap().0;
5433 assert_eq!(transaction.len(), 2);
5434 assert_eq!(
5435 transaction
5436 .remove_entry(&buffer)
5437 .unwrap()
5438 .0
5439 .update(cx, |buffer, _| buffer.text()),
5440 "const THREE: usize = 1;"
5441 );
5442 assert_eq!(
5443 transaction
5444 .into_keys()
5445 .next()
5446 .unwrap()
5447 .update(cx, |buffer, _| buffer.text()),
5448 "const TWO: usize = one::THREE + one::THREE;"
5449 );
5450}
5451
5452#[gpui::test]
5453async fn test_search(cx: &mut gpui::TestAppContext) {
5454 init_test(cx);
5455
5456 let fs = FakeFs::new(cx.executor());
5457 fs.insert_tree(
5458 path!("/dir"),
5459 json!({
5460 "one.rs": "const ONE: usize = 1;",
5461 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5462 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5463 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5464 }),
5465 )
5466 .await;
5467 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5468 assert_eq!(
5469 search(
5470 &project,
5471 SearchQuery::text(
5472 "TWO",
5473 false,
5474 true,
5475 false,
5476 Default::default(),
5477 Default::default(),
5478 false,
5479 None
5480 )
5481 .unwrap(),
5482 cx
5483 )
5484 .await
5485 .unwrap(),
5486 HashMap::from_iter([
5487 (path!("dir/two.rs").to_string(), vec![6..9]),
5488 (path!("dir/three.rs").to_string(), vec![37..40])
5489 ])
5490 );
5491
5492 let buffer_4 = project
5493 .update(cx, |project, cx| {
5494 project.open_local_buffer(path!("/dir/four.rs"), cx)
5495 })
5496 .await
5497 .unwrap();
5498 buffer_4.update(cx, |buffer, cx| {
5499 let text = "two::TWO";
5500 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5501 });
5502
5503 assert_eq!(
5504 search(
5505 &project,
5506 SearchQuery::text(
5507 "TWO",
5508 false,
5509 true,
5510 false,
5511 Default::default(),
5512 Default::default(),
5513 false,
5514 None,
5515 )
5516 .unwrap(),
5517 cx
5518 )
5519 .await
5520 .unwrap(),
5521 HashMap::from_iter([
5522 (path!("dir/two.rs").to_string(), vec![6..9]),
5523 (path!("dir/three.rs").to_string(), vec![37..40]),
5524 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5525 ])
5526 );
5527}
5528
5529#[gpui::test]
5530async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5531 init_test(cx);
5532
5533 let search_query = "file";
5534
5535 let fs = FakeFs::new(cx.executor());
5536 fs.insert_tree(
5537 path!("/dir"),
5538 json!({
5539 "one.rs": r#"// Rust file one"#,
5540 "one.ts": r#"// TypeScript file one"#,
5541 "two.rs": r#"// Rust file two"#,
5542 "two.ts": r#"// TypeScript file two"#,
5543 }),
5544 )
5545 .await;
5546 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5547
5548 assert!(
5549 search(
5550 &project,
5551 SearchQuery::text(
5552 search_query,
5553 false,
5554 true,
5555 false,
5556 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5557 Default::default(),
5558 false,
5559 None
5560 )
5561 .unwrap(),
5562 cx
5563 )
5564 .await
5565 .unwrap()
5566 .is_empty(),
5567 "If no inclusions match, no files should be returned"
5568 );
5569
5570 assert_eq!(
5571 search(
5572 &project,
5573 SearchQuery::text(
5574 search_query,
5575 false,
5576 true,
5577 false,
5578 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5579 Default::default(),
5580 false,
5581 None
5582 )
5583 .unwrap(),
5584 cx
5585 )
5586 .await
5587 .unwrap(),
5588 HashMap::from_iter([
5589 (path!("dir/one.rs").to_string(), vec![8..12]),
5590 (path!("dir/two.rs").to_string(), vec![8..12]),
5591 ]),
5592 "Rust only search should give only Rust files"
5593 );
5594
5595 assert_eq!(
5596 search(
5597 &project,
5598 SearchQuery::text(
5599 search_query,
5600 false,
5601 true,
5602 false,
5603 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5604 .unwrap(),
5605 Default::default(),
5606 false,
5607 None,
5608 )
5609 .unwrap(),
5610 cx
5611 )
5612 .await
5613 .unwrap(),
5614 HashMap::from_iter([
5615 (path!("dir/one.ts").to_string(), vec![14..18]),
5616 (path!("dir/two.ts").to_string(), vec![14..18]),
5617 ]),
5618 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5619 );
5620
5621 assert_eq!(
5622 search(
5623 &project,
5624 SearchQuery::text(
5625 search_query,
5626 false,
5627 true,
5628 false,
5629 PathMatcher::new(
5630 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5631 PathStyle::local()
5632 )
5633 .unwrap(),
5634 Default::default(),
5635 false,
5636 None,
5637 )
5638 .unwrap(),
5639 cx
5640 )
5641 .await
5642 .unwrap(),
5643 HashMap::from_iter([
5644 (path!("dir/two.ts").to_string(), vec![14..18]),
5645 (path!("dir/one.rs").to_string(), vec![8..12]),
5646 (path!("dir/one.ts").to_string(), vec![14..18]),
5647 (path!("dir/two.rs").to_string(), vec![8..12]),
5648 ]),
5649 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5650 );
5651}
5652
5653#[gpui::test]
5654async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5655 init_test(cx);
5656
5657 let search_query = "file";
5658
5659 let fs = FakeFs::new(cx.executor());
5660 fs.insert_tree(
5661 path!("/dir"),
5662 json!({
5663 "one.rs": r#"// Rust file one"#,
5664 "one.ts": r#"// TypeScript file one"#,
5665 "two.rs": r#"// Rust file two"#,
5666 "two.ts": r#"// TypeScript file two"#,
5667 }),
5668 )
5669 .await;
5670 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5671
5672 assert_eq!(
5673 search(
5674 &project,
5675 SearchQuery::text(
5676 search_query,
5677 false,
5678 true,
5679 false,
5680 Default::default(),
5681 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5682 false,
5683 None,
5684 )
5685 .unwrap(),
5686 cx
5687 )
5688 .await
5689 .unwrap(),
5690 HashMap::from_iter([
5691 (path!("dir/one.rs").to_string(), vec![8..12]),
5692 (path!("dir/one.ts").to_string(), vec![14..18]),
5693 (path!("dir/two.rs").to_string(), vec![8..12]),
5694 (path!("dir/two.ts").to_string(), vec![14..18]),
5695 ]),
5696 "If no exclusions match, all files should be returned"
5697 );
5698
5699 assert_eq!(
5700 search(
5701 &project,
5702 SearchQuery::text(
5703 search_query,
5704 false,
5705 true,
5706 false,
5707 Default::default(),
5708 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5709 false,
5710 None,
5711 )
5712 .unwrap(),
5713 cx
5714 )
5715 .await
5716 .unwrap(),
5717 HashMap::from_iter([
5718 (path!("dir/one.ts").to_string(), vec![14..18]),
5719 (path!("dir/two.ts").to_string(), vec![14..18]),
5720 ]),
5721 "Rust exclusion search should give only TypeScript files"
5722 );
5723
5724 assert_eq!(
5725 search(
5726 &project,
5727 SearchQuery::text(
5728 search_query,
5729 false,
5730 true,
5731 false,
5732 Default::default(),
5733 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5734 .unwrap(),
5735 false,
5736 None,
5737 )
5738 .unwrap(),
5739 cx
5740 )
5741 .await
5742 .unwrap(),
5743 HashMap::from_iter([
5744 (path!("dir/one.rs").to_string(), vec![8..12]),
5745 (path!("dir/two.rs").to_string(), vec![8..12]),
5746 ]),
5747 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5748 );
5749
5750 assert!(
5751 search(
5752 &project,
5753 SearchQuery::text(
5754 search_query,
5755 false,
5756 true,
5757 false,
5758 Default::default(),
5759 PathMatcher::new(
5760 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5761 PathStyle::local(),
5762 )
5763 .unwrap(),
5764 false,
5765 None,
5766 )
5767 .unwrap(),
5768 cx
5769 )
5770 .await
5771 .unwrap()
5772 .is_empty(),
5773 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5774 );
5775}
5776
5777#[gpui::test]
5778async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5779 init_test(cx);
5780
5781 let search_query = "file";
5782
5783 let fs = FakeFs::new(cx.executor());
5784 fs.insert_tree(
5785 path!("/dir"),
5786 json!({
5787 "one.rs": r#"// Rust file one"#,
5788 "one.ts": r#"// TypeScript file one"#,
5789 "two.rs": r#"// Rust file two"#,
5790 "two.ts": r#"// TypeScript file two"#,
5791 }),
5792 )
5793 .await;
5794
5795 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5796 let path_style = PathStyle::local();
5797 let _buffer = project.update(cx, |project, cx| {
5798 project.create_local_buffer("file", None, false, cx)
5799 });
5800
5801 assert_eq!(
5802 search(
5803 &project,
5804 SearchQuery::text(
5805 search_query,
5806 false,
5807 true,
5808 false,
5809 Default::default(),
5810 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5811 false,
5812 None,
5813 )
5814 .unwrap(),
5815 cx
5816 )
5817 .await
5818 .unwrap(),
5819 HashMap::from_iter([
5820 (path!("dir/one.rs").to_string(), vec![8..12]),
5821 (path!("dir/one.ts").to_string(), vec![14..18]),
5822 (path!("dir/two.rs").to_string(), vec![8..12]),
5823 (path!("dir/two.ts").to_string(), vec![14..18]),
5824 ]),
5825 "If no exclusions match, all files should be returned"
5826 );
5827
5828 assert_eq!(
5829 search(
5830 &project,
5831 SearchQuery::text(
5832 search_query,
5833 false,
5834 true,
5835 false,
5836 Default::default(),
5837 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5838 false,
5839 None,
5840 )
5841 .unwrap(),
5842 cx
5843 )
5844 .await
5845 .unwrap(),
5846 HashMap::from_iter([
5847 (path!("dir/one.ts").to_string(), vec![14..18]),
5848 (path!("dir/two.ts").to_string(), vec![14..18]),
5849 ]),
5850 "Rust exclusion search should give only TypeScript files"
5851 );
5852
5853 assert_eq!(
5854 search(
5855 &project,
5856 SearchQuery::text(
5857 search_query,
5858 false,
5859 true,
5860 false,
5861 Default::default(),
5862 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5863 false,
5864 None,
5865 )
5866 .unwrap(),
5867 cx
5868 )
5869 .await
5870 .unwrap(),
5871 HashMap::from_iter([
5872 (path!("dir/one.rs").to_string(), vec![8..12]),
5873 (path!("dir/two.rs").to_string(), vec![8..12]),
5874 ]),
5875 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5876 );
5877
5878 assert!(
5879 search(
5880 &project,
5881 SearchQuery::text(
5882 search_query,
5883 false,
5884 true,
5885 false,
5886 Default::default(),
5887 PathMatcher::new(
5888 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5889 PathStyle::local(),
5890 )
5891 .unwrap(),
5892 false,
5893 None,
5894 )
5895 .unwrap(),
5896 cx
5897 )
5898 .await
5899 .unwrap()
5900 .is_empty(),
5901 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5902 );
5903}
5904
5905#[gpui::test]
5906async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5907 init_test(cx);
5908
5909 let search_query = "file";
5910
5911 let fs = FakeFs::new(cx.executor());
5912 fs.insert_tree(
5913 path!("/dir"),
5914 json!({
5915 "one.rs": r#"// Rust file one"#,
5916 "one.ts": r#"// TypeScript file one"#,
5917 "two.rs": r#"// Rust file two"#,
5918 "two.ts": r#"// TypeScript file two"#,
5919 }),
5920 )
5921 .await;
5922 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5923 assert!(
5924 search(
5925 &project,
5926 SearchQuery::text(
5927 search_query,
5928 false,
5929 true,
5930 false,
5931 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5932 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5933 false,
5934 None,
5935 )
5936 .unwrap(),
5937 cx
5938 )
5939 .await
5940 .unwrap()
5941 .is_empty(),
5942 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5943 );
5944
5945 assert!(
5946 search(
5947 &project,
5948 SearchQuery::text(
5949 search_query,
5950 false,
5951 true,
5952 false,
5953 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5954 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5955 false,
5956 None,
5957 )
5958 .unwrap(),
5959 cx
5960 )
5961 .await
5962 .unwrap()
5963 .is_empty(),
5964 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5965 );
5966
5967 assert!(
5968 search(
5969 &project,
5970 SearchQuery::text(
5971 search_query,
5972 false,
5973 true,
5974 false,
5975 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5976 .unwrap(),
5977 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5978 .unwrap(),
5979 false,
5980 None,
5981 )
5982 .unwrap(),
5983 cx
5984 )
5985 .await
5986 .unwrap()
5987 .is_empty(),
5988 "Non-matching inclusions and exclusions should not change that."
5989 );
5990
5991 assert_eq!(
5992 search(
5993 &project,
5994 SearchQuery::text(
5995 search_query,
5996 false,
5997 true,
5998 false,
5999 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6000 .unwrap(),
6001 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6002 .unwrap(),
6003 false,
6004 None,
6005 )
6006 .unwrap(),
6007 cx
6008 )
6009 .await
6010 .unwrap(),
6011 HashMap::from_iter([
6012 (path!("dir/one.ts").to_string(), vec![14..18]),
6013 (path!("dir/two.ts").to_string(), vec![14..18]),
6014 ]),
6015 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6016 );
6017}
6018
6019#[gpui::test]
6020async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6021 init_test(cx);
6022
6023 let fs = FakeFs::new(cx.executor());
6024 fs.insert_tree(
6025 path!("/worktree-a"),
6026 json!({
6027 "haystack.rs": r#"// NEEDLE"#,
6028 "haystack.ts": r#"// NEEDLE"#,
6029 }),
6030 )
6031 .await;
6032 fs.insert_tree(
6033 path!("/worktree-b"),
6034 json!({
6035 "haystack.rs": r#"// NEEDLE"#,
6036 "haystack.ts": r#"// NEEDLE"#,
6037 }),
6038 )
6039 .await;
6040
6041 let path_style = PathStyle::local();
6042 let project = Project::test(
6043 fs.clone(),
6044 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6045 cx,
6046 )
6047 .await;
6048
6049 assert_eq!(
6050 search(
6051 &project,
6052 SearchQuery::text(
6053 "NEEDLE",
6054 false,
6055 true,
6056 false,
6057 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6058 Default::default(),
6059 true,
6060 None,
6061 )
6062 .unwrap(),
6063 cx
6064 )
6065 .await
6066 .unwrap(),
6067 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6068 "should only return results from included worktree"
6069 );
6070 assert_eq!(
6071 search(
6072 &project,
6073 SearchQuery::text(
6074 "NEEDLE",
6075 false,
6076 true,
6077 false,
6078 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6079 Default::default(),
6080 true,
6081 None,
6082 )
6083 .unwrap(),
6084 cx
6085 )
6086 .await
6087 .unwrap(),
6088 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6089 "should only return results from included worktree"
6090 );
6091
6092 assert_eq!(
6093 search(
6094 &project,
6095 SearchQuery::text(
6096 "NEEDLE",
6097 false,
6098 true,
6099 false,
6100 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6101 Default::default(),
6102 false,
6103 None,
6104 )
6105 .unwrap(),
6106 cx
6107 )
6108 .await
6109 .unwrap(),
6110 HashMap::from_iter([
6111 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6112 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6113 ]),
6114 "should return results from both worktrees"
6115 );
6116}
6117
6118#[gpui::test]
6119async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6120 init_test(cx);
6121
6122 let fs = FakeFs::new(cx.background_executor.clone());
6123 fs.insert_tree(
6124 path!("/dir"),
6125 json!({
6126 ".git": {},
6127 ".gitignore": "**/target\n/node_modules\n",
6128 "target": {
6129 "index.txt": "index_key:index_value"
6130 },
6131 "node_modules": {
6132 "eslint": {
6133 "index.ts": "const eslint_key = 'eslint value'",
6134 "package.json": r#"{ "some_key": "some value" }"#,
6135 },
6136 "prettier": {
6137 "index.ts": "const prettier_key = 'prettier value'",
6138 "package.json": r#"{ "other_key": "other value" }"#,
6139 },
6140 },
6141 "package.json": r#"{ "main_key": "main value" }"#,
6142 }),
6143 )
6144 .await;
6145 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6146
6147 let query = "key";
6148 assert_eq!(
6149 search(
6150 &project,
6151 SearchQuery::text(
6152 query,
6153 false,
6154 false,
6155 false,
6156 Default::default(),
6157 Default::default(),
6158 false,
6159 None,
6160 )
6161 .unwrap(),
6162 cx
6163 )
6164 .await
6165 .unwrap(),
6166 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6167 "Only one non-ignored file should have the query"
6168 );
6169
6170 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6171 let path_style = PathStyle::local();
6172 assert_eq!(
6173 search(
6174 &project,
6175 SearchQuery::text(
6176 query,
6177 false,
6178 false,
6179 true,
6180 Default::default(),
6181 Default::default(),
6182 false,
6183 None,
6184 )
6185 .unwrap(),
6186 cx
6187 )
6188 .await
6189 .unwrap(),
6190 HashMap::from_iter([
6191 (path!("dir/package.json").to_string(), vec![8..11]),
6192 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6193 (
6194 path!("dir/node_modules/prettier/package.json").to_string(),
6195 vec![9..12]
6196 ),
6197 (
6198 path!("dir/node_modules/prettier/index.ts").to_string(),
6199 vec![15..18]
6200 ),
6201 (
6202 path!("dir/node_modules/eslint/index.ts").to_string(),
6203 vec![13..16]
6204 ),
6205 (
6206 path!("dir/node_modules/eslint/package.json").to_string(),
6207 vec![8..11]
6208 ),
6209 ]),
6210 "Unrestricted search with ignored directories should find every file with the query"
6211 );
6212
6213 let files_to_include =
6214 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6215 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6216 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6217 assert_eq!(
6218 search(
6219 &project,
6220 SearchQuery::text(
6221 query,
6222 false,
6223 false,
6224 true,
6225 files_to_include,
6226 files_to_exclude,
6227 false,
6228 None,
6229 )
6230 .unwrap(),
6231 cx
6232 )
6233 .await
6234 .unwrap(),
6235 HashMap::from_iter([(
6236 path!("dir/node_modules/prettier/package.json").to_string(),
6237 vec![9..12]
6238 )]),
6239 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6240 );
6241}
6242
6243#[gpui::test]
6244async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6245 init_test(cx);
6246
6247 let fs = FakeFs::new(cx.executor());
6248 fs.insert_tree(
6249 path!("/dir"),
6250 json!({
6251 "one.rs": "// ПРИВЕТ? привет!",
6252 "two.rs": "// ПРИВЕТ.",
6253 "three.rs": "// привет",
6254 }),
6255 )
6256 .await;
6257 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6258 let unicode_case_sensitive_query = SearchQuery::text(
6259 "привет",
6260 false,
6261 true,
6262 false,
6263 Default::default(),
6264 Default::default(),
6265 false,
6266 None,
6267 );
6268 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6269 assert_eq!(
6270 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6271 .await
6272 .unwrap(),
6273 HashMap::from_iter([
6274 (path!("dir/one.rs").to_string(), vec![17..29]),
6275 (path!("dir/three.rs").to_string(), vec![3..15]),
6276 ])
6277 );
6278
6279 let unicode_case_insensitive_query = SearchQuery::text(
6280 "привет",
6281 false,
6282 false,
6283 false,
6284 Default::default(),
6285 Default::default(),
6286 false,
6287 None,
6288 );
6289 assert_matches!(
6290 unicode_case_insensitive_query,
6291 Ok(SearchQuery::Regex { .. })
6292 );
6293 assert_eq!(
6294 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6295 .await
6296 .unwrap(),
6297 HashMap::from_iter([
6298 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6299 (path!("dir/two.rs").to_string(), vec![3..15]),
6300 (path!("dir/three.rs").to_string(), vec![3..15]),
6301 ])
6302 );
6303
6304 assert_eq!(
6305 search(
6306 &project,
6307 SearchQuery::text(
6308 "привет.",
6309 false,
6310 false,
6311 false,
6312 Default::default(),
6313 Default::default(),
6314 false,
6315 None,
6316 )
6317 .unwrap(),
6318 cx
6319 )
6320 .await
6321 .unwrap(),
6322 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6323 );
6324}
6325
6326#[gpui::test]
6327async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6328 init_test(cx);
6329
6330 let fs = FakeFs::new(cx.executor());
6331 fs.insert_tree(
6332 "/one/two",
6333 json!({
6334 "three": {
6335 "a.txt": "",
6336 "four": {}
6337 },
6338 "c.rs": ""
6339 }),
6340 )
6341 .await;
6342
6343 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6344 project
6345 .update(cx, |project, cx| {
6346 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6347 project.create_entry((id, rel_path("b..")), true, cx)
6348 })
6349 .await
6350 .unwrap()
6351 .into_included()
6352 .unwrap();
6353
6354 assert_eq!(
6355 fs.paths(true),
6356 vec![
6357 PathBuf::from(path!("/")),
6358 PathBuf::from(path!("/one")),
6359 PathBuf::from(path!("/one/two")),
6360 PathBuf::from(path!("/one/two/c.rs")),
6361 PathBuf::from(path!("/one/two/three")),
6362 PathBuf::from(path!("/one/two/three/a.txt")),
6363 PathBuf::from(path!("/one/two/three/b..")),
6364 PathBuf::from(path!("/one/two/three/four")),
6365 ]
6366 );
6367}
6368
6369#[gpui::test]
6370async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6371 init_test(cx);
6372
6373 let fs = FakeFs::new(cx.executor());
6374 fs.insert_tree(
6375 path!("/dir"),
6376 json!({
6377 "a.tsx": "a",
6378 }),
6379 )
6380 .await;
6381
6382 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6383
6384 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6385 language_registry.add(tsx_lang());
6386 let language_server_names = [
6387 "TypeScriptServer",
6388 "TailwindServer",
6389 "ESLintServer",
6390 "NoHoverCapabilitiesServer",
6391 ];
6392 let mut language_servers = [
6393 language_registry.register_fake_lsp(
6394 "tsx",
6395 FakeLspAdapter {
6396 name: language_server_names[0],
6397 capabilities: lsp::ServerCapabilities {
6398 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6399 ..lsp::ServerCapabilities::default()
6400 },
6401 ..FakeLspAdapter::default()
6402 },
6403 ),
6404 language_registry.register_fake_lsp(
6405 "tsx",
6406 FakeLspAdapter {
6407 name: language_server_names[1],
6408 capabilities: lsp::ServerCapabilities {
6409 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6410 ..lsp::ServerCapabilities::default()
6411 },
6412 ..FakeLspAdapter::default()
6413 },
6414 ),
6415 language_registry.register_fake_lsp(
6416 "tsx",
6417 FakeLspAdapter {
6418 name: language_server_names[2],
6419 capabilities: lsp::ServerCapabilities {
6420 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6421 ..lsp::ServerCapabilities::default()
6422 },
6423 ..FakeLspAdapter::default()
6424 },
6425 ),
6426 language_registry.register_fake_lsp(
6427 "tsx",
6428 FakeLspAdapter {
6429 name: language_server_names[3],
6430 capabilities: lsp::ServerCapabilities {
6431 hover_provider: None,
6432 ..lsp::ServerCapabilities::default()
6433 },
6434 ..FakeLspAdapter::default()
6435 },
6436 ),
6437 ];
6438
6439 let (buffer, _handle) = project
6440 .update(cx, |p, cx| {
6441 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6442 })
6443 .await
6444 .unwrap();
6445 cx.executor().run_until_parked();
6446
6447 let mut servers_with_hover_requests = HashMap::default();
6448 for i in 0..language_server_names.len() {
6449 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6450 panic!(
6451 "Failed to get language server #{i} with name {}",
6452 &language_server_names[i]
6453 )
6454 });
6455 let new_server_name = new_server.server.name();
6456 assert!(
6457 !servers_with_hover_requests.contains_key(&new_server_name),
6458 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6459 );
6460 match new_server_name.as_ref() {
6461 "TailwindServer" | "TypeScriptServer" => {
6462 servers_with_hover_requests.insert(
6463 new_server_name.clone(),
6464 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6465 move |_, _| {
6466 let name = new_server_name.clone();
6467 async move {
6468 Ok(Some(lsp::Hover {
6469 contents: lsp::HoverContents::Scalar(
6470 lsp::MarkedString::String(format!("{name} hover")),
6471 ),
6472 range: None,
6473 }))
6474 }
6475 },
6476 ),
6477 );
6478 }
6479 "ESLintServer" => {
6480 servers_with_hover_requests.insert(
6481 new_server_name,
6482 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6483 |_, _| async move { Ok(None) },
6484 ),
6485 );
6486 }
6487 "NoHoverCapabilitiesServer" => {
6488 let _never_handled = new_server
6489 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6490 panic!(
6491 "Should not call for hovers server with no corresponding capabilities"
6492 )
6493 });
6494 }
6495 unexpected => panic!("Unexpected server name: {unexpected}"),
6496 }
6497 }
6498
6499 let hover_task = project.update(cx, |project, cx| {
6500 project.hover(&buffer, Point::new(0, 0), cx)
6501 });
6502 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6503 |mut hover_request| async move {
6504 hover_request
6505 .next()
6506 .await
6507 .expect("All hover requests should have been triggered")
6508 },
6509 ))
6510 .await;
6511 assert_eq!(
6512 vec!["TailwindServer hover", "TypeScriptServer hover"],
6513 hover_task
6514 .await
6515 .into_iter()
6516 .flatten()
6517 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6518 .sorted()
6519 .collect::<Vec<_>>(),
6520 "Should receive hover responses from all related servers with hover capabilities"
6521 );
6522}
6523
6524#[gpui::test]
6525async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6526 init_test(cx);
6527
6528 let fs = FakeFs::new(cx.executor());
6529 fs.insert_tree(
6530 path!("/dir"),
6531 json!({
6532 "a.ts": "a",
6533 }),
6534 )
6535 .await;
6536
6537 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6538
6539 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6540 language_registry.add(typescript_lang());
6541 let mut fake_language_servers = language_registry.register_fake_lsp(
6542 "TypeScript",
6543 FakeLspAdapter {
6544 capabilities: lsp::ServerCapabilities {
6545 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6546 ..lsp::ServerCapabilities::default()
6547 },
6548 ..FakeLspAdapter::default()
6549 },
6550 );
6551
6552 let (buffer, _handle) = project
6553 .update(cx, |p, cx| {
6554 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6555 })
6556 .await
6557 .unwrap();
6558 cx.executor().run_until_parked();
6559
6560 let fake_server = fake_language_servers
6561 .next()
6562 .await
6563 .expect("failed to get the language server");
6564
6565 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6566 move |_, _| async move {
6567 Ok(Some(lsp::Hover {
6568 contents: lsp::HoverContents::Array(vec![
6569 lsp::MarkedString::String("".to_string()),
6570 lsp::MarkedString::String(" ".to_string()),
6571 lsp::MarkedString::String("\n\n\n".to_string()),
6572 ]),
6573 range: None,
6574 }))
6575 },
6576 );
6577
6578 let hover_task = project.update(cx, |project, cx| {
6579 project.hover(&buffer, Point::new(0, 0), cx)
6580 });
6581 let () = request_handled
6582 .next()
6583 .await
6584 .expect("All hover requests should have been triggered");
6585 assert_eq!(
6586 Vec::<String>::new(),
6587 hover_task
6588 .await
6589 .into_iter()
6590 .flatten()
6591 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6592 .sorted()
6593 .collect::<Vec<_>>(),
6594 "Empty hover parts should be ignored"
6595 );
6596}
6597
6598#[gpui::test]
6599async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6600 init_test(cx);
6601
6602 let fs = FakeFs::new(cx.executor());
6603 fs.insert_tree(
6604 path!("/dir"),
6605 json!({
6606 "a.ts": "a",
6607 }),
6608 )
6609 .await;
6610
6611 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6612
6613 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6614 language_registry.add(typescript_lang());
6615 let mut fake_language_servers = language_registry.register_fake_lsp(
6616 "TypeScript",
6617 FakeLspAdapter {
6618 capabilities: lsp::ServerCapabilities {
6619 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6620 ..lsp::ServerCapabilities::default()
6621 },
6622 ..FakeLspAdapter::default()
6623 },
6624 );
6625
6626 let (buffer, _handle) = project
6627 .update(cx, |p, cx| {
6628 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6629 })
6630 .await
6631 .unwrap();
6632 cx.executor().run_until_parked();
6633
6634 let fake_server = fake_language_servers
6635 .next()
6636 .await
6637 .expect("failed to get the language server");
6638
6639 let mut request_handled = fake_server
6640 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6641 Ok(Some(vec![
6642 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6643 title: "organize imports".to_string(),
6644 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6645 ..lsp::CodeAction::default()
6646 }),
6647 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6648 title: "fix code".to_string(),
6649 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6650 ..lsp::CodeAction::default()
6651 }),
6652 ]))
6653 });
6654
6655 let code_actions_task = project.update(cx, |project, cx| {
6656 project.code_actions(
6657 &buffer,
6658 0..buffer.read(cx).len(),
6659 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6660 cx,
6661 )
6662 });
6663
6664 let () = request_handled
6665 .next()
6666 .await
6667 .expect("The code action request should have been triggered");
6668
6669 let code_actions = code_actions_task.await.unwrap().unwrap();
6670 assert_eq!(code_actions.len(), 1);
6671 assert_eq!(
6672 code_actions[0].lsp_action.action_kind(),
6673 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6674 );
6675}
6676
6677#[gpui::test]
6678async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6679 init_test(cx);
6680
6681 let fs = FakeFs::new(cx.executor());
6682 fs.insert_tree(
6683 path!("/dir"),
6684 json!({
6685 "a.tsx": "a",
6686 }),
6687 )
6688 .await;
6689
6690 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6691
6692 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6693 language_registry.add(tsx_lang());
6694 let language_server_names = [
6695 "TypeScriptServer",
6696 "TailwindServer",
6697 "ESLintServer",
6698 "NoActionsCapabilitiesServer",
6699 ];
6700
6701 let mut language_server_rxs = [
6702 language_registry.register_fake_lsp(
6703 "tsx",
6704 FakeLspAdapter {
6705 name: language_server_names[0],
6706 capabilities: lsp::ServerCapabilities {
6707 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6708 ..lsp::ServerCapabilities::default()
6709 },
6710 ..FakeLspAdapter::default()
6711 },
6712 ),
6713 language_registry.register_fake_lsp(
6714 "tsx",
6715 FakeLspAdapter {
6716 name: language_server_names[1],
6717 capabilities: lsp::ServerCapabilities {
6718 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6719 ..lsp::ServerCapabilities::default()
6720 },
6721 ..FakeLspAdapter::default()
6722 },
6723 ),
6724 language_registry.register_fake_lsp(
6725 "tsx",
6726 FakeLspAdapter {
6727 name: language_server_names[2],
6728 capabilities: lsp::ServerCapabilities {
6729 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6730 ..lsp::ServerCapabilities::default()
6731 },
6732 ..FakeLspAdapter::default()
6733 },
6734 ),
6735 language_registry.register_fake_lsp(
6736 "tsx",
6737 FakeLspAdapter {
6738 name: language_server_names[3],
6739 capabilities: lsp::ServerCapabilities {
6740 code_action_provider: None,
6741 ..lsp::ServerCapabilities::default()
6742 },
6743 ..FakeLspAdapter::default()
6744 },
6745 ),
6746 ];
6747
6748 let (buffer, _handle) = project
6749 .update(cx, |p, cx| {
6750 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6751 })
6752 .await
6753 .unwrap();
6754 cx.executor().run_until_parked();
6755
6756 let mut servers_with_actions_requests = HashMap::default();
6757 for i in 0..language_server_names.len() {
6758 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6759 panic!(
6760 "Failed to get language server #{i} with name {}",
6761 &language_server_names[i]
6762 )
6763 });
6764 let new_server_name = new_server.server.name();
6765
6766 assert!(
6767 !servers_with_actions_requests.contains_key(&new_server_name),
6768 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6769 );
6770 match new_server_name.0.as_ref() {
6771 "TailwindServer" | "TypeScriptServer" => {
6772 servers_with_actions_requests.insert(
6773 new_server_name.clone(),
6774 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6775 move |_, _| {
6776 let name = new_server_name.clone();
6777 async move {
6778 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6779 lsp::CodeAction {
6780 title: format!("{name} code action"),
6781 ..lsp::CodeAction::default()
6782 },
6783 )]))
6784 }
6785 },
6786 ),
6787 );
6788 }
6789 "ESLintServer" => {
6790 servers_with_actions_requests.insert(
6791 new_server_name,
6792 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6793 |_, _| async move { Ok(None) },
6794 ),
6795 );
6796 }
6797 "NoActionsCapabilitiesServer" => {
6798 let _never_handled = new_server
6799 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6800 panic!(
6801 "Should not call for code actions server with no corresponding capabilities"
6802 )
6803 });
6804 }
6805 unexpected => panic!("Unexpected server name: {unexpected}"),
6806 }
6807 }
6808
6809 let code_actions_task = project.update(cx, |project, cx| {
6810 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6811 });
6812
6813 // cx.run_until_parked();
6814 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6815 |mut code_actions_request| async move {
6816 code_actions_request
6817 .next()
6818 .await
6819 .expect("All code actions requests should have been triggered")
6820 },
6821 ))
6822 .await;
6823 assert_eq!(
6824 vec!["TailwindServer code action", "TypeScriptServer code action"],
6825 code_actions_task
6826 .await
6827 .unwrap()
6828 .unwrap()
6829 .into_iter()
6830 .map(|code_action| code_action.lsp_action.title().to_owned())
6831 .sorted()
6832 .collect::<Vec<_>>(),
6833 "Should receive code actions responses from all related servers with hover capabilities"
6834 );
6835}
6836
6837#[gpui::test]
6838async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6839 init_test(cx);
6840
6841 let fs = FakeFs::new(cx.executor());
6842 fs.insert_tree(
6843 "/dir",
6844 json!({
6845 "a.rs": "let a = 1;",
6846 "b.rs": "let b = 2;",
6847 "c.rs": "let c = 2;",
6848 }),
6849 )
6850 .await;
6851
6852 let project = Project::test(
6853 fs,
6854 [
6855 "/dir/a.rs".as_ref(),
6856 "/dir/b.rs".as_ref(),
6857 "/dir/c.rs".as_ref(),
6858 ],
6859 cx,
6860 )
6861 .await;
6862
6863 // check the initial state and get the worktrees
6864 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6865 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6866 assert_eq!(worktrees.len(), 3);
6867
6868 let worktree_a = worktrees[0].read(cx);
6869 let worktree_b = worktrees[1].read(cx);
6870 let worktree_c = worktrees[2].read(cx);
6871
6872 // check they start in the right order
6873 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6874 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6875 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6876
6877 (
6878 worktrees[0].clone(),
6879 worktrees[1].clone(),
6880 worktrees[2].clone(),
6881 )
6882 });
6883
6884 // move first worktree to after the second
6885 // [a, b, c] -> [b, a, c]
6886 project
6887 .update(cx, |project, cx| {
6888 let first = worktree_a.read(cx);
6889 let second = worktree_b.read(cx);
6890 project.move_worktree(first.id(), second.id(), cx)
6891 })
6892 .expect("moving first after second");
6893
6894 // check the state after moving
6895 project.update(cx, |project, cx| {
6896 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6897 assert_eq!(worktrees.len(), 3);
6898
6899 let first = worktrees[0].read(cx);
6900 let second = worktrees[1].read(cx);
6901 let third = worktrees[2].read(cx);
6902
6903 // check they are now in the right order
6904 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6905 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6906 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6907 });
6908
6909 // move the second worktree to before the first
6910 // [b, a, c] -> [a, b, c]
6911 project
6912 .update(cx, |project, cx| {
6913 let second = worktree_a.read(cx);
6914 let first = worktree_b.read(cx);
6915 project.move_worktree(first.id(), second.id(), cx)
6916 })
6917 .expect("moving second before first");
6918
6919 // check the state after moving
6920 project.update(cx, |project, cx| {
6921 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6922 assert_eq!(worktrees.len(), 3);
6923
6924 let first = worktrees[0].read(cx);
6925 let second = worktrees[1].read(cx);
6926 let third = worktrees[2].read(cx);
6927
6928 // check they are now in the right order
6929 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6930 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6931 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6932 });
6933
6934 // move the second worktree to after the third
6935 // [a, b, c] -> [a, c, b]
6936 project
6937 .update(cx, |project, cx| {
6938 let second = worktree_b.read(cx);
6939 let third = worktree_c.read(cx);
6940 project.move_worktree(second.id(), third.id(), cx)
6941 })
6942 .expect("moving second after third");
6943
6944 // check the state after moving
6945 project.update(cx, |project, cx| {
6946 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6947 assert_eq!(worktrees.len(), 3);
6948
6949 let first = worktrees[0].read(cx);
6950 let second = worktrees[1].read(cx);
6951 let third = worktrees[2].read(cx);
6952
6953 // check they are now in the right order
6954 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6955 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6956 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6957 });
6958
6959 // move the third worktree to before the second
6960 // [a, c, b] -> [a, b, c]
6961 project
6962 .update(cx, |project, cx| {
6963 let third = worktree_c.read(cx);
6964 let second = worktree_b.read(cx);
6965 project.move_worktree(third.id(), second.id(), cx)
6966 })
6967 .expect("moving third before second");
6968
6969 // check the state after moving
6970 project.update(cx, |project, cx| {
6971 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6972 assert_eq!(worktrees.len(), 3);
6973
6974 let first = worktrees[0].read(cx);
6975 let second = worktrees[1].read(cx);
6976 let third = worktrees[2].read(cx);
6977
6978 // check they are now in the right order
6979 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6980 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6981 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6982 });
6983
6984 // move the first worktree to after the third
6985 // [a, b, c] -> [b, c, a]
6986 project
6987 .update(cx, |project, cx| {
6988 let first = worktree_a.read(cx);
6989 let third = worktree_c.read(cx);
6990 project.move_worktree(first.id(), third.id(), cx)
6991 })
6992 .expect("moving first after third");
6993
6994 // check the state after moving
6995 project.update(cx, |project, cx| {
6996 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6997 assert_eq!(worktrees.len(), 3);
6998
6999 let first = worktrees[0].read(cx);
7000 let second = worktrees[1].read(cx);
7001 let third = worktrees[2].read(cx);
7002
7003 // check they are now in the right order
7004 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7005 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7006 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7007 });
7008
7009 // move the third worktree to before the first
7010 // [b, c, a] -> [a, b, c]
7011 project
7012 .update(cx, |project, cx| {
7013 let third = worktree_a.read(cx);
7014 let first = worktree_b.read(cx);
7015 project.move_worktree(third.id(), first.id(), cx)
7016 })
7017 .expect("moving third before first");
7018
7019 // check the state after moving
7020 project.update(cx, |project, cx| {
7021 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7022 assert_eq!(worktrees.len(), 3);
7023
7024 let first = worktrees[0].read(cx);
7025 let second = worktrees[1].read(cx);
7026 let third = worktrees[2].read(cx);
7027
7028 // check they are now in the right order
7029 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7030 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7031 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7032 });
7033}
7034
7035#[gpui::test]
7036async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7037 init_test(cx);
7038
7039 let staged_contents = r#"
7040 fn main() {
7041 println!("hello world");
7042 }
7043 "#
7044 .unindent();
7045 let file_contents = r#"
7046 // print goodbye
7047 fn main() {
7048 println!("goodbye world");
7049 }
7050 "#
7051 .unindent();
7052
7053 let fs = FakeFs::new(cx.background_executor.clone());
7054 fs.insert_tree(
7055 "/dir",
7056 json!({
7057 ".git": {},
7058 "src": {
7059 "main.rs": file_contents,
7060 }
7061 }),
7062 )
7063 .await;
7064
7065 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7066
7067 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7068
7069 let buffer = project
7070 .update(cx, |project, cx| {
7071 project.open_local_buffer("/dir/src/main.rs", cx)
7072 })
7073 .await
7074 .unwrap();
7075 let unstaged_diff = project
7076 .update(cx, |project, cx| {
7077 project.open_unstaged_diff(buffer.clone(), cx)
7078 })
7079 .await
7080 .unwrap();
7081
7082 cx.run_until_parked();
7083 unstaged_diff.update(cx, |unstaged_diff, cx| {
7084 let snapshot = buffer.read(cx).snapshot();
7085 assert_hunks(
7086 unstaged_diff.hunks(&snapshot, cx),
7087 &snapshot,
7088 &unstaged_diff.base_text_string().unwrap(),
7089 &[
7090 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7091 (
7092 2..3,
7093 " println!(\"hello world\");\n",
7094 " println!(\"goodbye world\");\n",
7095 DiffHunkStatus::modified_none(),
7096 ),
7097 ],
7098 );
7099 });
7100
7101 let staged_contents = r#"
7102 // print goodbye
7103 fn main() {
7104 }
7105 "#
7106 .unindent();
7107
7108 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7109
7110 cx.run_until_parked();
7111 unstaged_diff.update(cx, |unstaged_diff, cx| {
7112 let snapshot = buffer.read(cx).snapshot();
7113 assert_hunks(
7114 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7115 &snapshot,
7116 &unstaged_diff.base_text().text(),
7117 &[(
7118 2..3,
7119 "",
7120 " println!(\"goodbye world\");\n",
7121 DiffHunkStatus::added_none(),
7122 )],
7123 );
7124 });
7125}
7126
7127#[gpui::test]
7128async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7129 init_test(cx);
7130
7131 let committed_contents = r#"
7132 fn main() {
7133 println!("hello world");
7134 }
7135 "#
7136 .unindent();
7137 let staged_contents = r#"
7138 fn main() {
7139 println!("goodbye world");
7140 }
7141 "#
7142 .unindent();
7143 let file_contents = r#"
7144 // print goodbye
7145 fn main() {
7146 println!("goodbye world");
7147 }
7148 "#
7149 .unindent();
7150
7151 let fs = FakeFs::new(cx.background_executor.clone());
7152 fs.insert_tree(
7153 "/dir",
7154 json!({
7155 ".git": {},
7156 "src": {
7157 "modification.rs": file_contents,
7158 }
7159 }),
7160 )
7161 .await;
7162
7163 fs.set_head_for_repo(
7164 Path::new("/dir/.git"),
7165 &[
7166 ("src/modification.rs", committed_contents),
7167 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7168 ],
7169 "deadbeef",
7170 );
7171 fs.set_index_for_repo(
7172 Path::new("/dir/.git"),
7173 &[
7174 ("src/modification.rs", staged_contents),
7175 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7176 ],
7177 );
7178
7179 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7180 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7181 let language = rust_lang();
7182 language_registry.add(language.clone());
7183
7184 let buffer_1 = project
7185 .update(cx, |project, cx| {
7186 project.open_local_buffer("/dir/src/modification.rs", cx)
7187 })
7188 .await
7189 .unwrap();
7190 let diff_1 = project
7191 .update(cx, |project, cx| {
7192 project.open_uncommitted_diff(buffer_1.clone(), cx)
7193 })
7194 .await
7195 .unwrap();
7196 diff_1.read_with(cx, |diff, _| {
7197 assert_eq!(diff.base_text().language().cloned(), Some(language))
7198 });
7199 cx.run_until_parked();
7200 diff_1.update(cx, |diff, cx| {
7201 let snapshot = buffer_1.read(cx).snapshot();
7202 assert_hunks(
7203 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7204 &snapshot,
7205 &diff.base_text_string().unwrap(),
7206 &[
7207 (
7208 0..1,
7209 "",
7210 "// print goodbye\n",
7211 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7212 ),
7213 (
7214 2..3,
7215 " println!(\"hello world\");\n",
7216 " println!(\"goodbye world\");\n",
7217 DiffHunkStatus::modified_none(),
7218 ),
7219 ],
7220 );
7221 });
7222
7223 // Reset HEAD to a version that differs from both the buffer and the index.
7224 let committed_contents = r#"
7225 // print goodbye
7226 fn main() {
7227 }
7228 "#
7229 .unindent();
7230 fs.set_head_for_repo(
7231 Path::new("/dir/.git"),
7232 &[
7233 ("src/modification.rs", committed_contents.clone()),
7234 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7235 ],
7236 "deadbeef",
7237 );
7238
7239 // Buffer now has an unstaged hunk.
7240 cx.run_until_parked();
7241 diff_1.update(cx, |diff, cx| {
7242 let snapshot = buffer_1.read(cx).snapshot();
7243 assert_hunks(
7244 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7245 &snapshot,
7246 &diff.base_text().text(),
7247 &[(
7248 2..3,
7249 "",
7250 " println!(\"goodbye world\");\n",
7251 DiffHunkStatus::added_none(),
7252 )],
7253 );
7254 });
7255
7256 // Open a buffer for a file that's been deleted.
7257 let buffer_2 = project
7258 .update(cx, |project, cx| {
7259 project.open_local_buffer("/dir/src/deletion.rs", cx)
7260 })
7261 .await
7262 .unwrap();
7263 let diff_2 = project
7264 .update(cx, |project, cx| {
7265 project.open_uncommitted_diff(buffer_2.clone(), cx)
7266 })
7267 .await
7268 .unwrap();
7269 cx.run_until_parked();
7270 diff_2.update(cx, |diff, cx| {
7271 let snapshot = buffer_2.read(cx).snapshot();
7272 assert_hunks(
7273 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7274 &snapshot,
7275 &diff.base_text_string().unwrap(),
7276 &[(
7277 0..0,
7278 "// the-deleted-contents\n",
7279 "",
7280 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7281 )],
7282 );
7283 });
7284
7285 // Stage the deletion of this file
7286 fs.set_index_for_repo(
7287 Path::new("/dir/.git"),
7288 &[("src/modification.rs", committed_contents.clone())],
7289 );
7290 cx.run_until_parked();
7291 diff_2.update(cx, |diff, cx| {
7292 let snapshot = buffer_2.read(cx).snapshot();
7293 assert_hunks(
7294 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7295 &snapshot,
7296 &diff.base_text_string().unwrap(),
7297 &[(
7298 0..0,
7299 "// the-deleted-contents\n",
7300 "",
7301 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7302 )],
7303 );
7304 });
7305}
7306
7307#[gpui::test]
7308async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7309 use DiffHunkSecondaryStatus::*;
7310 init_test(cx);
7311
7312 let committed_contents = r#"
7313 zero
7314 one
7315 two
7316 three
7317 four
7318 five
7319 "#
7320 .unindent();
7321 let file_contents = r#"
7322 one
7323 TWO
7324 three
7325 FOUR
7326 five
7327 "#
7328 .unindent();
7329
7330 let fs = FakeFs::new(cx.background_executor.clone());
7331 fs.insert_tree(
7332 "/dir",
7333 json!({
7334 ".git": {},
7335 "file.txt": file_contents.clone()
7336 }),
7337 )
7338 .await;
7339
7340 fs.set_head_and_index_for_repo(
7341 path!("/dir/.git").as_ref(),
7342 &[("file.txt", committed_contents.clone())],
7343 );
7344
7345 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7346
7347 let buffer = project
7348 .update(cx, |project, cx| {
7349 project.open_local_buffer("/dir/file.txt", cx)
7350 })
7351 .await
7352 .unwrap();
7353 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7354 let uncommitted_diff = project
7355 .update(cx, |project, cx| {
7356 project.open_uncommitted_diff(buffer.clone(), cx)
7357 })
7358 .await
7359 .unwrap();
7360 let mut diff_events = cx.events(&uncommitted_diff);
7361
7362 // The hunks are initially unstaged.
7363 uncommitted_diff.read_with(cx, |diff, cx| {
7364 assert_hunks(
7365 diff.hunks(&snapshot, cx),
7366 &snapshot,
7367 &diff.base_text_string().unwrap(),
7368 &[
7369 (
7370 0..0,
7371 "zero\n",
7372 "",
7373 DiffHunkStatus::deleted(HasSecondaryHunk),
7374 ),
7375 (
7376 1..2,
7377 "two\n",
7378 "TWO\n",
7379 DiffHunkStatus::modified(HasSecondaryHunk),
7380 ),
7381 (
7382 3..4,
7383 "four\n",
7384 "FOUR\n",
7385 DiffHunkStatus::modified(HasSecondaryHunk),
7386 ),
7387 ],
7388 );
7389 });
7390
7391 // Stage a hunk. It appears as optimistically staged.
7392 uncommitted_diff.update(cx, |diff, cx| {
7393 let range =
7394 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7395 let hunks = diff
7396 .hunks_intersecting_range(range, &snapshot, cx)
7397 .collect::<Vec<_>>();
7398 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7399
7400 assert_hunks(
7401 diff.hunks(&snapshot, cx),
7402 &snapshot,
7403 &diff.base_text_string().unwrap(),
7404 &[
7405 (
7406 0..0,
7407 "zero\n",
7408 "",
7409 DiffHunkStatus::deleted(HasSecondaryHunk),
7410 ),
7411 (
7412 1..2,
7413 "two\n",
7414 "TWO\n",
7415 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7416 ),
7417 (
7418 3..4,
7419 "four\n",
7420 "FOUR\n",
7421 DiffHunkStatus::modified(HasSecondaryHunk),
7422 ),
7423 ],
7424 );
7425 });
7426
7427 // The diff emits a change event for the range of the staged hunk.
7428 assert!(matches!(
7429 diff_events.next().await.unwrap(),
7430 BufferDiffEvent::HunksStagedOrUnstaged(_)
7431 ));
7432 let event = diff_events.next().await.unwrap();
7433 if let BufferDiffEvent::DiffChanged {
7434 changed_range: Some(changed_range),
7435 } = event
7436 {
7437 let changed_range = changed_range.to_point(&snapshot);
7438 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7439 } else {
7440 panic!("Unexpected event {event:?}");
7441 }
7442
7443 // When the write to the index completes, it appears as staged.
7444 cx.run_until_parked();
7445 uncommitted_diff.update(cx, |diff, cx| {
7446 assert_hunks(
7447 diff.hunks(&snapshot, cx),
7448 &snapshot,
7449 &diff.base_text_string().unwrap(),
7450 &[
7451 (
7452 0..0,
7453 "zero\n",
7454 "",
7455 DiffHunkStatus::deleted(HasSecondaryHunk),
7456 ),
7457 (
7458 1..2,
7459 "two\n",
7460 "TWO\n",
7461 DiffHunkStatus::modified(NoSecondaryHunk),
7462 ),
7463 (
7464 3..4,
7465 "four\n",
7466 "FOUR\n",
7467 DiffHunkStatus::modified(HasSecondaryHunk),
7468 ),
7469 ],
7470 );
7471 });
7472
7473 // The diff emits a change event for the changed index text.
7474 let event = diff_events.next().await.unwrap();
7475 if let BufferDiffEvent::DiffChanged {
7476 changed_range: Some(changed_range),
7477 } = event
7478 {
7479 let changed_range = changed_range.to_point(&snapshot);
7480 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7481 } else {
7482 panic!("Unexpected event {event:?}");
7483 }
7484
7485 // Simulate a problem writing to the git index.
7486 fs.set_error_message_for_index_write(
7487 "/dir/.git".as_ref(),
7488 Some("failed to write git index".into()),
7489 );
7490
7491 // Stage another hunk.
7492 uncommitted_diff.update(cx, |diff, cx| {
7493 let range =
7494 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7495 let hunks = diff
7496 .hunks_intersecting_range(range, &snapshot, cx)
7497 .collect::<Vec<_>>();
7498 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7499
7500 assert_hunks(
7501 diff.hunks(&snapshot, cx),
7502 &snapshot,
7503 &diff.base_text_string().unwrap(),
7504 &[
7505 (
7506 0..0,
7507 "zero\n",
7508 "",
7509 DiffHunkStatus::deleted(HasSecondaryHunk),
7510 ),
7511 (
7512 1..2,
7513 "two\n",
7514 "TWO\n",
7515 DiffHunkStatus::modified(NoSecondaryHunk),
7516 ),
7517 (
7518 3..4,
7519 "four\n",
7520 "FOUR\n",
7521 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7522 ),
7523 ],
7524 );
7525 });
7526 assert!(matches!(
7527 diff_events.next().await.unwrap(),
7528 BufferDiffEvent::HunksStagedOrUnstaged(_)
7529 ));
7530 let event = diff_events.next().await.unwrap();
7531 if let BufferDiffEvent::DiffChanged {
7532 changed_range: Some(changed_range),
7533 } = event
7534 {
7535 let changed_range = changed_range.to_point(&snapshot);
7536 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7537 } else {
7538 panic!("Unexpected event {event:?}");
7539 }
7540
7541 // When the write fails, the hunk returns to being unstaged.
7542 cx.run_until_parked();
7543 uncommitted_diff.update(cx, |diff, cx| {
7544 assert_hunks(
7545 diff.hunks(&snapshot, cx),
7546 &snapshot,
7547 &diff.base_text_string().unwrap(),
7548 &[
7549 (
7550 0..0,
7551 "zero\n",
7552 "",
7553 DiffHunkStatus::deleted(HasSecondaryHunk),
7554 ),
7555 (
7556 1..2,
7557 "two\n",
7558 "TWO\n",
7559 DiffHunkStatus::modified(NoSecondaryHunk),
7560 ),
7561 (
7562 3..4,
7563 "four\n",
7564 "FOUR\n",
7565 DiffHunkStatus::modified(HasSecondaryHunk),
7566 ),
7567 ],
7568 );
7569 });
7570
7571 let event = diff_events.next().await.unwrap();
7572 if let BufferDiffEvent::DiffChanged {
7573 changed_range: Some(changed_range),
7574 } = event
7575 {
7576 let changed_range = changed_range.to_point(&snapshot);
7577 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7578 } else {
7579 panic!("Unexpected event {event:?}");
7580 }
7581
7582 // Allow writing to the git index to succeed again.
7583 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7584
7585 // Stage two hunks with separate operations.
7586 uncommitted_diff.update(cx, |diff, cx| {
7587 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7588 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7589 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7590 });
7591
7592 // Both staged hunks appear as pending.
7593 uncommitted_diff.update(cx, |diff, cx| {
7594 assert_hunks(
7595 diff.hunks(&snapshot, cx),
7596 &snapshot,
7597 &diff.base_text_string().unwrap(),
7598 &[
7599 (
7600 0..0,
7601 "zero\n",
7602 "",
7603 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7604 ),
7605 (
7606 1..2,
7607 "two\n",
7608 "TWO\n",
7609 DiffHunkStatus::modified(NoSecondaryHunk),
7610 ),
7611 (
7612 3..4,
7613 "four\n",
7614 "FOUR\n",
7615 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7616 ),
7617 ],
7618 );
7619 });
7620
7621 // Both staging operations take effect.
7622 cx.run_until_parked();
7623 uncommitted_diff.update(cx, |diff, cx| {
7624 assert_hunks(
7625 diff.hunks(&snapshot, cx),
7626 &snapshot,
7627 &diff.base_text_string().unwrap(),
7628 &[
7629 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7630 (
7631 1..2,
7632 "two\n",
7633 "TWO\n",
7634 DiffHunkStatus::modified(NoSecondaryHunk),
7635 ),
7636 (
7637 3..4,
7638 "four\n",
7639 "FOUR\n",
7640 DiffHunkStatus::modified(NoSecondaryHunk),
7641 ),
7642 ],
7643 );
7644 });
7645}
7646
7647#[gpui::test(seeds(340, 472))]
7648async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7649 use DiffHunkSecondaryStatus::*;
7650 init_test(cx);
7651
7652 let committed_contents = r#"
7653 zero
7654 one
7655 two
7656 three
7657 four
7658 five
7659 "#
7660 .unindent();
7661 let file_contents = r#"
7662 one
7663 TWO
7664 three
7665 FOUR
7666 five
7667 "#
7668 .unindent();
7669
7670 let fs = FakeFs::new(cx.background_executor.clone());
7671 fs.insert_tree(
7672 "/dir",
7673 json!({
7674 ".git": {},
7675 "file.txt": file_contents.clone()
7676 }),
7677 )
7678 .await;
7679
7680 fs.set_head_for_repo(
7681 "/dir/.git".as_ref(),
7682 &[("file.txt", committed_contents.clone())],
7683 "deadbeef",
7684 );
7685 fs.set_index_for_repo(
7686 "/dir/.git".as_ref(),
7687 &[("file.txt", committed_contents.clone())],
7688 );
7689
7690 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7691
7692 let buffer = project
7693 .update(cx, |project, cx| {
7694 project.open_local_buffer("/dir/file.txt", cx)
7695 })
7696 .await
7697 .unwrap();
7698 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7699 let uncommitted_diff = project
7700 .update(cx, |project, cx| {
7701 project.open_uncommitted_diff(buffer.clone(), cx)
7702 })
7703 .await
7704 .unwrap();
7705
7706 // The hunks are initially unstaged.
7707 uncommitted_diff.read_with(cx, |diff, cx| {
7708 assert_hunks(
7709 diff.hunks(&snapshot, cx),
7710 &snapshot,
7711 &diff.base_text_string().unwrap(),
7712 &[
7713 (
7714 0..0,
7715 "zero\n",
7716 "",
7717 DiffHunkStatus::deleted(HasSecondaryHunk),
7718 ),
7719 (
7720 1..2,
7721 "two\n",
7722 "TWO\n",
7723 DiffHunkStatus::modified(HasSecondaryHunk),
7724 ),
7725 (
7726 3..4,
7727 "four\n",
7728 "FOUR\n",
7729 DiffHunkStatus::modified(HasSecondaryHunk),
7730 ),
7731 ],
7732 );
7733 });
7734
7735 // Pause IO events
7736 fs.pause_events();
7737
7738 // Stage the first hunk.
7739 uncommitted_diff.update(cx, |diff, cx| {
7740 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7741 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7742 assert_hunks(
7743 diff.hunks(&snapshot, cx),
7744 &snapshot,
7745 &diff.base_text_string().unwrap(),
7746 &[
7747 (
7748 0..0,
7749 "zero\n",
7750 "",
7751 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7752 ),
7753 (
7754 1..2,
7755 "two\n",
7756 "TWO\n",
7757 DiffHunkStatus::modified(HasSecondaryHunk),
7758 ),
7759 (
7760 3..4,
7761 "four\n",
7762 "FOUR\n",
7763 DiffHunkStatus::modified(HasSecondaryHunk),
7764 ),
7765 ],
7766 );
7767 });
7768
7769 // Stage the second hunk *before* receiving the FS event for the first hunk.
7770 cx.run_until_parked();
7771 uncommitted_diff.update(cx, |diff, cx| {
7772 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7773 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7774 assert_hunks(
7775 diff.hunks(&snapshot, cx),
7776 &snapshot,
7777 &diff.base_text_string().unwrap(),
7778 &[
7779 (
7780 0..0,
7781 "zero\n",
7782 "",
7783 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7784 ),
7785 (
7786 1..2,
7787 "two\n",
7788 "TWO\n",
7789 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7790 ),
7791 (
7792 3..4,
7793 "four\n",
7794 "FOUR\n",
7795 DiffHunkStatus::modified(HasSecondaryHunk),
7796 ),
7797 ],
7798 );
7799 });
7800
7801 // Process the FS event for staging the first hunk (second event is still pending).
7802 fs.flush_events(1);
7803 cx.run_until_parked();
7804
7805 // Stage the third hunk before receiving the second FS event.
7806 uncommitted_diff.update(cx, |diff, cx| {
7807 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7808 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7809 });
7810
7811 // Wait for all remaining IO.
7812 cx.run_until_parked();
7813 fs.flush_events(fs.buffered_event_count());
7814
7815 // Now all hunks are staged.
7816 cx.run_until_parked();
7817 uncommitted_diff.update(cx, |diff, cx| {
7818 assert_hunks(
7819 diff.hunks(&snapshot, cx),
7820 &snapshot,
7821 &diff.base_text_string().unwrap(),
7822 &[
7823 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7824 (
7825 1..2,
7826 "two\n",
7827 "TWO\n",
7828 DiffHunkStatus::modified(NoSecondaryHunk),
7829 ),
7830 (
7831 3..4,
7832 "four\n",
7833 "FOUR\n",
7834 DiffHunkStatus::modified(NoSecondaryHunk),
7835 ),
7836 ],
7837 );
7838 });
7839}
7840
7841#[gpui::test(iterations = 25)]
7842async fn test_staging_random_hunks(
7843 mut rng: StdRng,
7844 executor: BackgroundExecutor,
7845 cx: &mut gpui::TestAppContext,
7846) {
7847 let operations = env::var("OPERATIONS")
7848 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7849 .unwrap_or(20);
7850
7851 // Try to induce races between diff recalculation and index writes.
7852 if rng.random_bool(0.5) {
7853 executor.deprioritize(*CALCULATE_DIFF_TASK);
7854 }
7855
7856 use DiffHunkSecondaryStatus::*;
7857 init_test(cx);
7858
7859 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7860 let index_text = committed_text.clone();
7861 let buffer_text = (0..30)
7862 .map(|i| match i % 5 {
7863 0 => format!("line {i} (modified)\n"),
7864 _ => format!("line {i}\n"),
7865 })
7866 .collect::<String>();
7867
7868 let fs = FakeFs::new(cx.background_executor.clone());
7869 fs.insert_tree(
7870 path!("/dir"),
7871 json!({
7872 ".git": {},
7873 "file.txt": buffer_text.clone()
7874 }),
7875 )
7876 .await;
7877 fs.set_head_for_repo(
7878 path!("/dir/.git").as_ref(),
7879 &[("file.txt", committed_text.clone())],
7880 "deadbeef",
7881 );
7882 fs.set_index_for_repo(
7883 path!("/dir/.git").as_ref(),
7884 &[("file.txt", index_text.clone())],
7885 );
7886 let repo = fs
7887 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7888 .unwrap();
7889
7890 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7891 let buffer = project
7892 .update(cx, |project, cx| {
7893 project.open_local_buffer(path!("/dir/file.txt"), cx)
7894 })
7895 .await
7896 .unwrap();
7897 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7898 let uncommitted_diff = project
7899 .update(cx, |project, cx| {
7900 project.open_uncommitted_diff(buffer.clone(), cx)
7901 })
7902 .await
7903 .unwrap();
7904
7905 let mut hunks =
7906 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7907 assert_eq!(hunks.len(), 6);
7908
7909 for _i in 0..operations {
7910 let hunk_ix = rng.random_range(0..hunks.len());
7911 let hunk = &mut hunks[hunk_ix];
7912 let row = hunk.range.start.row;
7913
7914 if hunk.status().has_secondary_hunk() {
7915 log::info!("staging hunk at {row}");
7916 uncommitted_diff.update(cx, |diff, cx| {
7917 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7918 });
7919 hunk.secondary_status = SecondaryHunkRemovalPending;
7920 } else {
7921 log::info!("unstaging hunk at {row}");
7922 uncommitted_diff.update(cx, |diff, cx| {
7923 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7924 });
7925 hunk.secondary_status = SecondaryHunkAdditionPending;
7926 }
7927
7928 for _ in 0..rng.random_range(0..10) {
7929 log::info!("yielding");
7930 cx.executor().simulate_random_delay().await;
7931 }
7932 }
7933
7934 cx.executor().run_until_parked();
7935
7936 for hunk in &mut hunks {
7937 if hunk.secondary_status == SecondaryHunkRemovalPending {
7938 hunk.secondary_status = NoSecondaryHunk;
7939 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7940 hunk.secondary_status = HasSecondaryHunk;
7941 }
7942 }
7943
7944 log::info!(
7945 "index text:\n{}",
7946 repo.load_index_text(rel_path("file.txt").into())
7947 .await
7948 .unwrap()
7949 );
7950
7951 uncommitted_diff.update(cx, |diff, cx| {
7952 let expected_hunks = hunks
7953 .iter()
7954 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7955 .collect::<Vec<_>>();
7956 let actual_hunks = diff
7957 .hunks(&snapshot, cx)
7958 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7959 .collect::<Vec<_>>();
7960 assert_eq!(actual_hunks, expected_hunks);
7961 });
7962}
7963
7964#[gpui::test]
7965async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7966 init_test(cx);
7967
7968 let committed_contents = r#"
7969 fn main() {
7970 println!("hello from HEAD");
7971 }
7972 "#
7973 .unindent();
7974 let file_contents = r#"
7975 fn main() {
7976 println!("hello from the working copy");
7977 }
7978 "#
7979 .unindent();
7980
7981 let fs = FakeFs::new(cx.background_executor.clone());
7982 fs.insert_tree(
7983 "/dir",
7984 json!({
7985 ".git": {},
7986 "src": {
7987 "main.rs": file_contents,
7988 }
7989 }),
7990 )
7991 .await;
7992
7993 fs.set_head_for_repo(
7994 Path::new("/dir/.git"),
7995 &[("src/main.rs", committed_contents.clone())],
7996 "deadbeef",
7997 );
7998 fs.set_index_for_repo(
7999 Path::new("/dir/.git"),
8000 &[("src/main.rs", committed_contents.clone())],
8001 );
8002
8003 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8004
8005 let buffer = project
8006 .update(cx, |project, cx| {
8007 project.open_local_buffer("/dir/src/main.rs", cx)
8008 })
8009 .await
8010 .unwrap();
8011 let uncommitted_diff = project
8012 .update(cx, |project, cx| {
8013 project.open_uncommitted_diff(buffer.clone(), cx)
8014 })
8015 .await
8016 .unwrap();
8017
8018 cx.run_until_parked();
8019 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8020 let snapshot = buffer.read(cx).snapshot();
8021 assert_hunks(
8022 uncommitted_diff.hunks(&snapshot, cx),
8023 &snapshot,
8024 &uncommitted_diff.base_text_string().unwrap(),
8025 &[(
8026 1..2,
8027 " println!(\"hello from HEAD\");\n",
8028 " println!(\"hello from the working copy\");\n",
8029 DiffHunkStatus {
8030 kind: DiffHunkStatusKind::Modified,
8031 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8032 },
8033 )],
8034 );
8035 });
8036}
8037
8038#[gpui::test]
8039async fn test_repository_and_path_for_project_path(
8040 background_executor: BackgroundExecutor,
8041 cx: &mut gpui::TestAppContext,
8042) {
8043 init_test(cx);
8044 let fs = FakeFs::new(background_executor);
8045 fs.insert_tree(
8046 path!("/root"),
8047 json!({
8048 "c.txt": "",
8049 "dir1": {
8050 ".git": {},
8051 "deps": {
8052 "dep1": {
8053 ".git": {},
8054 "src": {
8055 "a.txt": ""
8056 }
8057 }
8058 },
8059 "src": {
8060 "b.txt": ""
8061 }
8062 },
8063 }),
8064 )
8065 .await;
8066
8067 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8068 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8069 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8070 project
8071 .update(cx, |project, cx| project.git_scans_complete(cx))
8072 .await;
8073 cx.run_until_parked();
8074
8075 project.read_with(cx, |project, cx| {
8076 let git_store = project.git_store().read(cx);
8077 let pairs = [
8078 ("c.txt", None),
8079 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8080 (
8081 "dir1/deps/dep1/src/a.txt",
8082 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8083 ),
8084 ];
8085 let expected = pairs
8086 .iter()
8087 .map(|(path, result)| {
8088 (
8089 path,
8090 result.map(|(repo, repo_path)| {
8091 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8092 }),
8093 )
8094 })
8095 .collect::<Vec<_>>();
8096 let actual = pairs
8097 .iter()
8098 .map(|(path, _)| {
8099 let project_path = (tree_id, rel_path(path)).into();
8100 let result = maybe!({
8101 let (repo, repo_path) =
8102 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8103 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8104 });
8105 (path, result)
8106 })
8107 .collect::<Vec<_>>();
8108 pretty_assertions::assert_eq!(expected, actual);
8109 });
8110
8111 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8112 .await
8113 .unwrap();
8114 cx.run_until_parked();
8115
8116 project.read_with(cx, |project, cx| {
8117 let git_store = project.git_store().read(cx);
8118 assert_eq!(
8119 git_store.repository_and_path_for_project_path(
8120 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8121 cx
8122 ),
8123 None
8124 );
8125 });
8126}
8127
8128#[gpui::test]
8129async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8130 init_test(cx);
8131 let fs = FakeFs::new(cx.background_executor.clone());
8132 let home = paths::home_dir();
8133 fs.insert_tree(
8134 home,
8135 json!({
8136 ".git": {},
8137 "project": {
8138 "a.txt": "A"
8139 },
8140 }),
8141 )
8142 .await;
8143
8144 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8145 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8146 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8147
8148 project
8149 .update(cx, |project, cx| project.git_scans_complete(cx))
8150 .await;
8151 tree.flush_fs_events(cx).await;
8152
8153 project.read_with(cx, |project, cx| {
8154 let containing = project
8155 .git_store()
8156 .read(cx)
8157 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8158 assert!(containing.is_none());
8159 });
8160
8161 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8162 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8163 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8164 project
8165 .update(cx, |project, cx| project.git_scans_complete(cx))
8166 .await;
8167 tree.flush_fs_events(cx).await;
8168
8169 project.read_with(cx, |project, cx| {
8170 let containing = project
8171 .git_store()
8172 .read(cx)
8173 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8174 assert_eq!(
8175 containing
8176 .unwrap()
8177 .0
8178 .read(cx)
8179 .work_directory_abs_path
8180 .as_ref(),
8181 home,
8182 );
8183 });
8184}
8185
8186#[gpui::test]
8187async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8188 init_test(cx);
8189 cx.executor().allow_parking();
8190
8191 let root = TempTree::new(json!({
8192 "project": {
8193 "a.txt": "a", // Modified
8194 "b.txt": "bb", // Added
8195 "c.txt": "ccc", // Unchanged
8196 "d.txt": "dddd", // Deleted
8197 },
8198 }));
8199
8200 // Set up git repository before creating the project.
8201 let work_dir = root.path().join("project");
8202 let repo = git_init(work_dir.as_path());
8203 git_add("a.txt", &repo);
8204 git_add("c.txt", &repo);
8205 git_add("d.txt", &repo);
8206 git_commit("Initial commit", &repo);
8207 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8208 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8209
8210 let project = Project::test(
8211 Arc::new(RealFs::new(None, cx.executor())),
8212 [root.path()],
8213 cx,
8214 )
8215 .await;
8216
8217 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8218 tree.flush_fs_events(cx).await;
8219 project
8220 .update(cx, |project, cx| project.git_scans_complete(cx))
8221 .await;
8222 cx.executor().run_until_parked();
8223
8224 let repository = project.read_with(cx, |project, cx| {
8225 project.repositories(cx).values().next().unwrap().clone()
8226 });
8227
8228 // Check that the right git state is observed on startup
8229 repository.read_with(cx, |repository, _| {
8230 let entries = repository.cached_status().collect::<Vec<_>>();
8231 assert_eq!(
8232 entries,
8233 [
8234 StatusEntry {
8235 repo_path: repo_path("a.txt"),
8236 status: StatusCode::Modified.worktree(),
8237 },
8238 StatusEntry {
8239 repo_path: repo_path("b.txt"),
8240 status: FileStatus::Untracked,
8241 },
8242 StatusEntry {
8243 repo_path: repo_path("d.txt"),
8244 status: StatusCode::Deleted.worktree(),
8245 },
8246 ]
8247 );
8248 });
8249
8250 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8251
8252 tree.flush_fs_events(cx).await;
8253 project
8254 .update(cx, |project, cx| project.git_scans_complete(cx))
8255 .await;
8256 cx.executor().run_until_parked();
8257
8258 repository.read_with(cx, |repository, _| {
8259 let entries = repository.cached_status().collect::<Vec<_>>();
8260 assert_eq!(
8261 entries,
8262 [
8263 StatusEntry {
8264 repo_path: repo_path("a.txt"),
8265 status: StatusCode::Modified.worktree(),
8266 },
8267 StatusEntry {
8268 repo_path: repo_path("b.txt"),
8269 status: FileStatus::Untracked,
8270 },
8271 StatusEntry {
8272 repo_path: repo_path("c.txt"),
8273 status: StatusCode::Modified.worktree(),
8274 },
8275 StatusEntry {
8276 repo_path: repo_path("d.txt"),
8277 status: StatusCode::Deleted.worktree(),
8278 },
8279 ]
8280 );
8281 });
8282
8283 git_add("a.txt", &repo);
8284 git_add("c.txt", &repo);
8285 git_remove_index(Path::new("d.txt"), &repo);
8286 git_commit("Another commit", &repo);
8287 tree.flush_fs_events(cx).await;
8288 project
8289 .update(cx, |project, cx| project.git_scans_complete(cx))
8290 .await;
8291 cx.executor().run_until_parked();
8292
8293 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8294 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8295 tree.flush_fs_events(cx).await;
8296 project
8297 .update(cx, |project, cx| project.git_scans_complete(cx))
8298 .await;
8299 cx.executor().run_until_parked();
8300
8301 repository.read_with(cx, |repository, _cx| {
8302 let entries = repository.cached_status().collect::<Vec<_>>();
8303
8304 // Deleting an untracked entry, b.txt, should leave no status
8305 // a.txt was tracked, and so should have a status
8306 assert_eq!(
8307 entries,
8308 [StatusEntry {
8309 repo_path: repo_path("a.txt"),
8310 status: StatusCode::Deleted.worktree(),
8311 }]
8312 );
8313 });
8314}
8315
8316#[gpui::test]
8317async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8318 init_test(cx);
8319 cx.executor().allow_parking();
8320
8321 let root = TempTree::new(json!({
8322 "project": {
8323 "sub": {},
8324 "a.txt": "",
8325 },
8326 }));
8327
8328 let work_dir = root.path().join("project");
8329 let repo = git_init(work_dir.as_path());
8330 // a.txt exists in HEAD and the working copy but is deleted in the index.
8331 git_add("a.txt", &repo);
8332 git_commit("Initial commit", &repo);
8333 git_remove_index("a.txt".as_ref(), &repo);
8334 // `sub` is a nested git repository.
8335 let _sub = git_init(&work_dir.join("sub"));
8336
8337 let project = Project::test(
8338 Arc::new(RealFs::new(None, cx.executor())),
8339 [root.path()],
8340 cx,
8341 )
8342 .await;
8343
8344 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8345 tree.flush_fs_events(cx).await;
8346 project
8347 .update(cx, |project, cx| project.git_scans_complete(cx))
8348 .await;
8349 cx.executor().run_until_parked();
8350
8351 let repository = project.read_with(cx, |project, cx| {
8352 project
8353 .repositories(cx)
8354 .values()
8355 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8356 .unwrap()
8357 .clone()
8358 });
8359
8360 repository.read_with(cx, |repository, _cx| {
8361 let entries = repository.cached_status().collect::<Vec<_>>();
8362
8363 // `sub` doesn't appear in our computed statuses.
8364 // a.txt appears with a combined `DA` status.
8365 assert_eq!(
8366 entries,
8367 [StatusEntry {
8368 repo_path: repo_path("a.txt"),
8369 status: TrackedStatus {
8370 index_status: StatusCode::Deleted,
8371 worktree_status: StatusCode::Added
8372 }
8373 .into(),
8374 }]
8375 )
8376 });
8377}
8378
8379#[gpui::test]
8380async fn test_repository_subfolder_git_status(
8381 executor: gpui::BackgroundExecutor,
8382 cx: &mut gpui::TestAppContext,
8383) {
8384 init_test(cx);
8385
8386 let fs = FakeFs::new(executor);
8387 fs.insert_tree(
8388 path!("/root"),
8389 json!({
8390 "my-repo": {
8391 ".git": {},
8392 "a.txt": "a",
8393 "sub-folder-1": {
8394 "sub-folder-2": {
8395 "c.txt": "cc",
8396 "d": {
8397 "e.txt": "eee"
8398 }
8399 },
8400 }
8401 },
8402 }),
8403 )
8404 .await;
8405
8406 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8407 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8408
8409 fs.set_status_for_repo(
8410 path!("/root/my-repo/.git").as_ref(),
8411 &[(E_TXT, FileStatus::Untracked)],
8412 );
8413
8414 let project = Project::test(
8415 fs.clone(),
8416 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8417 cx,
8418 )
8419 .await;
8420
8421 project
8422 .update(cx, |project, cx| project.git_scans_complete(cx))
8423 .await;
8424 cx.run_until_parked();
8425
8426 let repository = project.read_with(cx, |project, cx| {
8427 project.repositories(cx).values().next().unwrap().clone()
8428 });
8429
8430 // Ensure that the git status is loaded correctly
8431 repository.read_with(cx, |repository, _cx| {
8432 assert_eq!(
8433 repository.work_directory_abs_path,
8434 Path::new(path!("/root/my-repo")).into()
8435 );
8436
8437 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8438 assert_eq!(
8439 repository
8440 .status_for_path(&repo_path(E_TXT))
8441 .unwrap()
8442 .status,
8443 FileStatus::Untracked
8444 );
8445 });
8446
8447 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8448 project
8449 .update(cx, |project, cx| project.git_scans_complete(cx))
8450 .await;
8451 cx.run_until_parked();
8452
8453 repository.read_with(cx, |repository, _cx| {
8454 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8455 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8456 });
8457}
8458
8459// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8460#[cfg(any())]
8461#[gpui::test]
8462async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8463 init_test(cx);
8464 cx.executor().allow_parking();
8465
8466 let root = TempTree::new(json!({
8467 "project": {
8468 "a.txt": "a",
8469 },
8470 }));
8471 let root_path = root.path();
8472
8473 let repo = git_init(&root_path.join("project"));
8474 git_add("a.txt", &repo);
8475 git_commit("init", &repo);
8476
8477 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8478
8479 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8480 tree.flush_fs_events(cx).await;
8481 project
8482 .update(cx, |project, cx| project.git_scans_complete(cx))
8483 .await;
8484 cx.executor().run_until_parked();
8485
8486 let repository = project.read_with(cx, |project, cx| {
8487 project.repositories(cx).values().next().unwrap().clone()
8488 });
8489
8490 git_branch("other-branch", &repo);
8491 git_checkout("refs/heads/other-branch", &repo);
8492 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8493 git_add("a.txt", &repo);
8494 git_commit("capitalize", &repo);
8495 let commit = repo
8496 .head()
8497 .expect("Failed to get HEAD")
8498 .peel_to_commit()
8499 .expect("HEAD is not a commit");
8500 git_checkout("refs/heads/main", &repo);
8501 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8502 git_add("a.txt", &repo);
8503 git_commit("improve letter", &repo);
8504 git_cherry_pick(&commit, &repo);
8505 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8506 .expect("No CHERRY_PICK_HEAD");
8507 pretty_assertions::assert_eq!(
8508 git_status(&repo),
8509 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8510 );
8511 tree.flush_fs_events(cx).await;
8512 project
8513 .update(cx, |project, cx| project.git_scans_complete(cx))
8514 .await;
8515 cx.executor().run_until_parked();
8516 let conflicts = repository.update(cx, |repository, _| {
8517 repository
8518 .merge_conflicts
8519 .iter()
8520 .cloned()
8521 .collect::<Vec<_>>()
8522 });
8523 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8524
8525 git_add("a.txt", &repo);
8526 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8527 git_commit("whatevs", &repo);
8528 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8529 .expect("Failed to remove CHERRY_PICK_HEAD");
8530 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8531 tree.flush_fs_events(cx).await;
8532 let conflicts = repository.update(cx, |repository, _| {
8533 repository
8534 .merge_conflicts
8535 .iter()
8536 .cloned()
8537 .collect::<Vec<_>>()
8538 });
8539 pretty_assertions::assert_eq!(conflicts, []);
8540}
8541
8542#[gpui::test]
8543async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8544 init_test(cx);
8545 let fs = FakeFs::new(cx.background_executor.clone());
8546 fs.insert_tree(
8547 path!("/root"),
8548 json!({
8549 ".git": {},
8550 ".gitignore": "*.txt\n",
8551 "a.xml": "<a></a>",
8552 "b.txt": "Some text"
8553 }),
8554 )
8555 .await;
8556
8557 fs.set_head_and_index_for_repo(
8558 path!("/root/.git").as_ref(),
8559 &[
8560 (".gitignore", "*.txt\n".into()),
8561 ("a.xml", "<a></a>".into()),
8562 ],
8563 );
8564
8565 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8566
8567 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8568 tree.flush_fs_events(cx).await;
8569 project
8570 .update(cx, |project, cx| project.git_scans_complete(cx))
8571 .await;
8572 cx.executor().run_until_parked();
8573
8574 let repository = project.read_with(cx, |project, cx| {
8575 project.repositories(cx).values().next().unwrap().clone()
8576 });
8577
8578 // One file is unmodified, the other is ignored.
8579 cx.read(|cx| {
8580 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8581 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8582 });
8583
8584 // Change the gitignore, and stage the newly non-ignored file.
8585 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8586 .await
8587 .unwrap();
8588 fs.set_index_for_repo(
8589 Path::new(path!("/root/.git")),
8590 &[
8591 (".gitignore", "*.txt\n".into()),
8592 ("a.xml", "<a></a>".into()),
8593 ("b.txt", "Some text".into()),
8594 ],
8595 );
8596
8597 cx.executor().run_until_parked();
8598 cx.read(|cx| {
8599 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8600 assert_entry_git_state(
8601 tree.read(cx),
8602 repository.read(cx),
8603 "b.txt",
8604 Some(StatusCode::Added),
8605 false,
8606 );
8607 });
8608}
8609
8610// NOTE:
8611// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8612// a directory which some program has already open.
8613// This is a limitation of the Windows.
8614// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8615// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8616#[gpui::test]
8617#[cfg_attr(target_os = "windows", ignore)]
8618async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8619 init_test(cx);
8620 cx.executor().allow_parking();
8621 let root = TempTree::new(json!({
8622 "projects": {
8623 "project1": {
8624 "a": "",
8625 "b": "",
8626 }
8627 },
8628
8629 }));
8630 let root_path = root.path();
8631
8632 let repo = git_init(&root_path.join("projects/project1"));
8633 git_add("a", &repo);
8634 git_commit("init", &repo);
8635 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8636
8637 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8638
8639 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8640 tree.flush_fs_events(cx).await;
8641 project
8642 .update(cx, |project, cx| project.git_scans_complete(cx))
8643 .await;
8644 cx.executor().run_until_parked();
8645
8646 let repository = project.read_with(cx, |project, cx| {
8647 project.repositories(cx).values().next().unwrap().clone()
8648 });
8649
8650 repository.read_with(cx, |repository, _| {
8651 assert_eq!(
8652 repository.work_directory_abs_path.as_ref(),
8653 root_path.join("projects/project1").as_path()
8654 );
8655 assert_eq!(
8656 repository
8657 .status_for_path(&repo_path("a"))
8658 .map(|entry| entry.status),
8659 Some(StatusCode::Modified.worktree()),
8660 );
8661 assert_eq!(
8662 repository
8663 .status_for_path(&repo_path("b"))
8664 .map(|entry| entry.status),
8665 Some(FileStatus::Untracked),
8666 );
8667 });
8668
8669 std::fs::rename(
8670 root_path.join("projects/project1"),
8671 root_path.join("projects/project2"),
8672 )
8673 .unwrap();
8674 tree.flush_fs_events(cx).await;
8675
8676 repository.read_with(cx, |repository, _| {
8677 assert_eq!(
8678 repository.work_directory_abs_path.as_ref(),
8679 root_path.join("projects/project2").as_path()
8680 );
8681 assert_eq!(
8682 repository.status_for_path(&repo_path("a")).unwrap().status,
8683 StatusCode::Modified.worktree(),
8684 );
8685 assert_eq!(
8686 repository.status_for_path(&repo_path("b")).unwrap().status,
8687 FileStatus::Untracked,
8688 );
8689 });
8690}
8691
8692// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8693// you can't rename a directory which some program has already open. This is a
8694// limitation of the Windows. See:
8695// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8696// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8697#[gpui::test]
8698#[cfg_attr(target_os = "windows", ignore)]
8699async fn test_file_status(cx: &mut gpui::TestAppContext) {
8700 init_test(cx);
8701 cx.executor().allow_parking();
8702 const IGNORE_RULE: &str = "**/target";
8703
8704 let root = TempTree::new(json!({
8705 "project": {
8706 "a.txt": "a",
8707 "b.txt": "bb",
8708 "c": {
8709 "d": {
8710 "e.txt": "eee"
8711 }
8712 },
8713 "f.txt": "ffff",
8714 "target": {
8715 "build_file": "???"
8716 },
8717 ".gitignore": IGNORE_RULE
8718 },
8719
8720 }));
8721 let root_path = root.path();
8722
8723 const A_TXT: &str = "a.txt";
8724 const B_TXT: &str = "b.txt";
8725 const E_TXT: &str = "c/d/e.txt";
8726 const F_TXT: &str = "f.txt";
8727 const DOTGITIGNORE: &str = ".gitignore";
8728 const BUILD_FILE: &str = "target/build_file";
8729
8730 // Set up git repository before creating the worktree.
8731 let work_dir = root.path().join("project");
8732 let mut repo = git_init(work_dir.as_path());
8733 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8734 git_add(A_TXT, &repo);
8735 git_add(E_TXT, &repo);
8736 git_add(DOTGITIGNORE, &repo);
8737 git_commit("Initial commit", &repo);
8738
8739 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8740
8741 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8742 tree.flush_fs_events(cx).await;
8743 project
8744 .update(cx, |project, cx| project.git_scans_complete(cx))
8745 .await;
8746 cx.executor().run_until_parked();
8747
8748 let repository = project.read_with(cx, |project, cx| {
8749 project.repositories(cx).values().next().unwrap().clone()
8750 });
8751
8752 // Check that the right git state is observed on startup
8753 repository.read_with(cx, |repository, _cx| {
8754 assert_eq!(
8755 repository.work_directory_abs_path.as_ref(),
8756 root_path.join("project").as_path()
8757 );
8758
8759 assert_eq!(
8760 repository
8761 .status_for_path(&repo_path(B_TXT))
8762 .unwrap()
8763 .status,
8764 FileStatus::Untracked,
8765 );
8766 assert_eq!(
8767 repository
8768 .status_for_path(&repo_path(F_TXT))
8769 .unwrap()
8770 .status,
8771 FileStatus::Untracked,
8772 );
8773 });
8774
8775 // Modify a file in the working copy.
8776 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8777 tree.flush_fs_events(cx).await;
8778 project
8779 .update(cx, |project, cx| project.git_scans_complete(cx))
8780 .await;
8781 cx.executor().run_until_parked();
8782
8783 // The worktree detects that the file's git status has changed.
8784 repository.read_with(cx, |repository, _| {
8785 assert_eq!(
8786 repository
8787 .status_for_path(&repo_path(A_TXT))
8788 .unwrap()
8789 .status,
8790 StatusCode::Modified.worktree(),
8791 );
8792 });
8793
8794 // Create a commit in the git repository.
8795 git_add(A_TXT, &repo);
8796 git_add(B_TXT, &repo);
8797 git_commit("Committing modified and added", &repo);
8798 tree.flush_fs_events(cx).await;
8799 project
8800 .update(cx, |project, cx| project.git_scans_complete(cx))
8801 .await;
8802 cx.executor().run_until_parked();
8803
8804 // The worktree detects that the files' git status have changed.
8805 repository.read_with(cx, |repository, _cx| {
8806 assert_eq!(
8807 repository
8808 .status_for_path(&repo_path(F_TXT))
8809 .unwrap()
8810 .status,
8811 FileStatus::Untracked,
8812 );
8813 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8814 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8815 });
8816
8817 // Modify files in the working copy and perform git operations on other files.
8818 git_reset(0, &repo);
8819 git_remove_index(Path::new(B_TXT), &repo);
8820 git_stash(&mut repo);
8821 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8822 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8823 tree.flush_fs_events(cx).await;
8824 project
8825 .update(cx, |project, cx| project.git_scans_complete(cx))
8826 .await;
8827 cx.executor().run_until_parked();
8828
8829 // Check that more complex repo changes are tracked
8830 repository.read_with(cx, |repository, _cx| {
8831 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8832 assert_eq!(
8833 repository
8834 .status_for_path(&repo_path(B_TXT))
8835 .unwrap()
8836 .status,
8837 FileStatus::Untracked,
8838 );
8839 assert_eq!(
8840 repository
8841 .status_for_path(&repo_path(E_TXT))
8842 .unwrap()
8843 .status,
8844 StatusCode::Modified.worktree(),
8845 );
8846 });
8847
8848 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8849 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8850 std::fs::write(
8851 work_dir.join(DOTGITIGNORE),
8852 [IGNORE_RULE, "f.txt"].join("\n"),
8853 )
8854 .unwrap();
8855
8856 git_add(Path::new(DOTGITIGNORE), &repo);
8857 git_commit("Committing modified git ignore", &repo);
8858
8859 tree.flush_fs_events(cx).await;
8860 cx.executor().run_until_parked();
8861
8862 let mut renamed_dir_name = "first_directory/second_directory";
8863 const RENAMED_FILE: &str = "rf.txt";
8864
8865 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8866 std::fs::write(
8867 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8868 "new-contents",
8869 )
8870 .unwrap();
8871
8872 tree.flush_fs_events(cx).await;
8873 project
8874 .update(cx, |project, cx| project.git_scans_complete(cx))
8875 .await;
8876 cx.executor().run_until_parked();
8877
8878 repository.read_with(cx, |repository, _cx| {
8879 assert_eq!(
8880 repository
8881 .status_for_path(
8882 &rel_path(renamed_dir_name)
8883 .join(rel_path(RENAMED_FILE))
8884 .into()
8885 )
8886 .unwrap()
8887 .status,
8888 FileStatus::Untracked,
8889 );
8890 });
8891
8892 renamed_dir_name = "new_first_directory/second_directory";
8893
8894 std::fs::rename(
8895 work_dir.join("first_directory"),
8896 work_dir.join("new_first_directory"),
8897 )
8898 .unwrap();
8899
8900 tree.flush_fs_events(cx).await;
8901 project
8902 .update(cx, |project, cx| project.git_scans_complete(cx))
8903 .await;
8904 cx.executor().run_until_parked();
8905
8906 repository.read_with(cx, |repository, _cx| {
8907 assert_eq!(
8908 repository
8909 .status_for_path(
8910 &rel_path(renamed_dir_name)
8911 .join(rel_path(RENAMED_FILE))
8912 .into()
8913 )
8914 .unwrap()
8915 .status,
8916 FileStatus::Untracked,
8917 );
8918 });
8919}
8920
8921#[gpui::test]
8922#[ignore]
8923async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8924 init_test(cx);
8925 cx.executor().allow_parking();
8926
8927 const IGNORE_RULE: &str = "**/target";
8928
8929 let root = TempTree::new(json!({
8930 "project": {
8931 "src": {
8932 "main.rs": "fn main() {}"
8933 },
8934 "target": {
8935 "debug": {
8936 "important_text.txt": "important text",
8937 },
8938 },
8939 ".gitignore": IGNORE_RULE
8940 },
8941
8942 }));
8943 let root_path = root.path();
8944
8945 // Set up git repository before creating the worktree.
8946 let work_dir = root.path().join("project");
8947 let repo = git_init(work_dir.as_path());
8948 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8949 git_add("src/main.rs", &repo);
8950 git_add(".gitignore", &repo);
8951 git_commit("Initial commit", &repo);
8952
8953 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8954 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8955 let project_events = Arc::new(Mutex::new(Vec::new()));
8956 project.update(cx, |project, cx| {
8957 let repo_events = repository_updates.clone();
8958 cx.subscribe(project.git_store(), move |_, _, e, _| {
8959 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8960 repo_events.lock().push(e.clone());
8961 }
8962 })
8963 .detach();
8964 let project_events = project_events.clone();
8965 cx.subscribe_self(move |_, e, _| {
8966 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8967 project_events.lock().extend(
8968 updates
8969 .iter()
8970 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8971 .filter(|(path, _)| path != "fs-event-sentinel"),
8972 );
8973 }
8974 })
8975 .detach();
8976 });
8977
8978 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8979 tree.flush_fs_events(cx).await;
8980 tree.update(cx, |tree, cx| {
8981 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
8982 })
8983 .await
8984 .unwrap();
8985 tree.update(cx, |tree, _| {
8986 assert_eq!(
8987 tree.entries(true, 0)
8988 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8989 .collect::<Vec<_>>(),
8990 vec![
8991 (rel_path(""), false),
8992 (rel_path("project/"), false),
8993 (rel_path("project/.gitignore"), false),
8994 (rel_path("project/src"), false),
8995 (rel_path("project/src/main.rs"), false),
8996 (rel_path("project/target"), true),
8997 (rel_path("project/target/debug"), true),
8998 (rel_path("project/target/debug/important_text.txt"), true),
8999 ]
9000 );
9001 });
9002
9003 assert_eq!(
9004 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9005 vec![
9006 RepositoryEvent::StatusesChanged { full_scan: true },
9007 RepositoryEvent::MergeHeadsChanged,
9008 ],
9009 "Initial worktree scan should produce a repo update event"
9010 );
9011 assert_eq!(
9012 project_events.lock().drain(..).collect::<Vec<_>>(),
9013 vec![
9014 ("project/target".to_string(), PathChange::Loaded),
9015 ("project/target/debug".to_string(), PathChange::Loaded),
9016 (
9017 "project/target/debug/important_text.txt".to_string(),
9018 PathChange::Loaded
9019 ),
9020 ],
9021 "Initial project changes should show that all not-ignored and all opened files are loaded"
9022 );
9023
9024 let deps_dir = work_dir.join("target").join("debug").join("deps");
9025 std::fs::create_dir_all(&deps_dir).unwrap();
9026 tree.flush_fs_events(cx).await;
9027 project
9028 .update(cx, |project, cx| project.git_scans_complete(cx))
9029 .await;
9030 cx.executor().run_until_parked();
9031 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9032 tree.flush_fs_events(cx).await;
9033 project
9034 .update(cx, |project, cx| project.git_scans_complete(cx))
9035 .await;
9036 cx.executor().run_until_parked();
9037 std::fs::remove_dir_all(&deps_dir).unwrap();
9038 tree.flush_fs_events(cx).await;
9039 project
9040 .update(cx, |project, cx| project.git_scans_complete(cx))
9041 .await;
9042 cx.executor().run_until_parked();
9043
9044 tree.update(cx, |tree, _| {
9045 assert_eq!(
9046 tree.entries(true, 0)
9047 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9048 .collect::<Vec<_>>(),
9049 vec![
9050 (rel_path(""), false),
9051 (rel_path("project/"), false),
9052 (rel_path("project/.gitignore"), false),
9053 (rel_path("project/src"), false),
9054 (rel_path("project/src/main.rs"), false),
9055 (rel_path("project/target"), true),
9056 (rel_path("project/target/debug"), true),
9057 (rel_path("project/target/debug/important_text.txt"), true),
9058 ],
9059 "No stray temp files should be left after the flycheck changes"
9060 );
9061 });
9062
9063 assert_eq!(
9064 repository_updates
9065 .lock()
9066 .iter()
9067 .cloned()
9068 .collect::<Vec<_>>(),
9069 Vec::new(),
9070 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9071 );
9072 assert_eq!(
9073 project_events.lock().as_slice(),
9074 vec![
9075 ("project/target/debug/deps".to_string(), PathChange::Added),
9076 ("project/target/debug/deps".to_string(), PathChange::Removed),
9077 ],
9078 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9079 No updates for more nested directories should happen as those are ignored",
9080 );
9081}
9082
9083#[gpui::test]
9084async fn test_odd_events_for_ignored_dirs(
9085 executor: BackgroundExecutor,
9086 cx: &mut gpui::TestAppContext,
9087) {
9088 init_test(cx);
9089 let fs = FakeFs::new(executor);
9090 fs.insert_tree(
9091 path!("/root"),
9092 json!({
9093 ".git": {},
9094 ".gitignore": "**/target/",
9095 "src": {
9096 "main.rs": "fn main() {}",
9097 },
9098 "target": {
9099 "debug": {
9100 "foo.txt": "foo",
9101 "deps": {}
9102 }
9103 }
9104 }),
9105 )
9106 .await;
9107 fs.set_head_and_index_for_repo(
9108 path!("/root/.git").as_ref(),
9109 &[
9110 (".gitignore", "**/target/".into()),
9111 ("src/main.rs", "fn main() {}".into()),
9112 ],
9113 );
9114
9115 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9116 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9117 let project_events = Arc::new(Mutex::new(Vec::new()));
9118 project.update(cx, |project, cx| {
9119 let repository_updates = repository_updates.clone();
9120 cx.subscribe(project.git_store(), move |_, _, e, _| {
9121 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9122 repository_updates.lock().push(e.clone());
9123 }
9124 })
9125 .detach();
9126 let project_events = project_events.clone();
9127 cx.subscribe_self(move |_, e, _| {
9128 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9129 project_events.lock().extend(
9130 updates
9131 .iter()
9132 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9133 .filter(|(path, _)| path != "fs-event-sentinel"),
9134 );
9135 }
9136 })
9137 .detach();
9138 });
9139
9140 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9141 tree.update(cx, |tree, cx| {
9142 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9143 })
9144 .await
9145 .unwrap();
9146 tree.flush_fs_events(cx).await;
9147 project
9148 .update(cx, |project, cx| project.git_scans_complete(cx))
9149 .await;
9150 cx.run_until_parked();
9151 tree.update(cx, |tree, _| {
9152 assert_eq!(
9153 tree.entries(true, 0)
9154 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9155 .collect::<Vec<_>>(),
9156 vec![
9157 (rel_path(""), false),
9158 (rel_path(".gitignore"), false),
9159 (rel_path("src"), false),
9160 (rel_path("src/main.rs"), false),
9161 (rel_path("target"), true),
9162 (rel_path("target/debug"), true),
9163 (rel_path("target/debug/deps"), true),
9164 (rel_path("target/debug/foo.txt"), true),
9165 ]
9166 );
9167 });
9168
9169 assert_eq!(
9170 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9171 vec![
9172 RepositoryEvent::MergeHeadsChanged,
9173 RepositoryEvent::BranchChanged
9174 ],
9175 "Initial worktree scan should produce a repo update event"
9176 );
9177 assert_eq!(
9178 project_events.lock().drain(..).collect::<Vec<_>>(),
9179 vec![
9180 ("target".to_string(), PathChange::Loaded),
9181 ("target/debug".to_string(), PathChange::Loaded),
9182 ("target/debug/deps".to_string(), PathChange::Loaded),
9183 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9184 ],
9185 "All non-ignored entries and all opened firs should be getting a project event",
9186 );
9187
9188 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9189 // This may happen multiple times during a single flycheck, but once is enough for testing.
9190 fs.emit_fs_event("/root/target/debug/deps", None);
9191 tree.flush_fs_events(cx).await;
9192 project
9193 .update(cx, |project, cx| project.git_scans_complete(cx))
9194 .await;
9195 cx.executor().run_until_parked();
9196
9197 assert_eq!(
9198 repository_updates
9199 .lock()
9200 .iter()
9201 .cloned()
9202 .collect::<Vec<_>>(),
9203 Vec::new(),
9204 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9205 );
9206 assert_eq!(
9207 project_events.lock().as_slice(),
9208 Vec::new(),
9209 "No further project events should happen, as only ignored dirs received FS events",
9210 );
9211}
9212
9213#[gpui::test]
9214async fn test_repos_in_invisible_worktrees(
9215 executor: BackgroundExecutor,
9216 cx: &mut gpui::TestAppContext,
9217) {
9218 init_test(cx);
9219 let fs = FakeFs::new(executor);
9220 fs.insert_tree(
9221 path!("/root"),
9222 json!({
9223 "dir1": {
9224 ".git": {},
9225 "dep1": {
9226 ".git": {},
9227 "src": {
9228 "a.txt": "",
9229 },
9230 },
9231 "b.txt": "",
9232 },
9233 }),
9234 )
9235 .await;
9236
9237 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9238 let _visible_worktree =
9239 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9240 project
9241 .update(cx, |project, cx| project.git_scans_complete(cx))
9242 .await;
9243
9244 let repos = project.read_with(cx, |project, cx| {
9245 project
9246 .repositories(cx)
9247 .values()
9248 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9249 .collect::<Vec<_>>()
9250 });
9251 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9252
9253 let (_invisible_worktree, _) = project
9254 .update(cx, |project, cx| {
9255 project.worktree_store.update(cx, |worktree_store, cx| {
9256 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9257 })
9258 })
9259 .await
9260 .expect("failed to create worktree");
9261 project
9262 .update(cx, |project, cx| project.git_scans_complete(cx))
9263 .await;
9264
9265 let repos = project.read_with(cx, |project, cx| {
9266 project
9267 .repositories(cx)
9268 .values()
9269 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9270 .collect::<Vec<_>>()
9271 });
9272 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9273}
9274
9275#[gpui::test(iterations = 10)]
9276async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9277 init_test(cx);
9278 cx.update(|cx| {
9279 cx.update_global::<SettingsStore, _>(|store, cx| {
9280 store.update_user_settings(cx, |settings| {
9281 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9282 });
9283 });
9284 });
9285 let fs = FakeFs::new(cx.background_executor.clone());
9286 fs.insert_tree(
9287 path!("/root"),
9288 json!({
9289 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9290 "tree": {
9291 ".git": {},
9292 ".gitignore": "ignored-dir\n",
9293 "tracked-dir": {
9294 "tracked-file1": "",
9295 "ancestor-ignored-file1": "",
9296 },
9297 "ignored-dir": {
9298 "ignored-file1": ""
9299 }
9300 }
9301 }),
9302 )
9303 .await;
9304 fs.set_head_and_index_for_repo(
9305 path!("/root/tree/.git").as_ref(),
9306 &[
9307 (".gitignore", "ignored-dir\n".into()),
9308 ("tracked-dir/tracked-file1", "".into()),
9309 ],
9310 );
9311
9312 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9313
9314 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9315 tree.flush_fs_events(cx).await;
9316 project
9317 .update(cx, |project, cx| project.git_scans_complete(cx))
9318 .await;
9319 cx.executor().run_until_parked();
9320
9321 let repository = project.read_with(cx, |project, cx| {
9322 project.repositories(cx).values().next().unwrap().clone()
9323 });
9324
9325 tree.read_with(cx, |tree, _| {
9326 tree.as_local()
9327 .unwrap()
9328 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9329 })
9330 .recv()
9331 .await;
9332
9333 cx.read(|cx| {
9334 assert_entry_git_state(
9335 tree.read(cx),
9336 repository.read(cx),
9337 "tracked-dir/tracked-file1",
9338 None,
9339 false,
9340 );
9341 assert_entry_git_state(
9342 tree.read(cx),
9343 repository.read(cx),
9344 "tracked-dir/ancestor-ignored-file1",
9345 None,
9346 false,
9347 );
9348 assert_entry_git_state(
9349 tree.read(cx),
9350 repository.read(cx),
9351 "ignored-dir/ignored-file1",
9352 None,
9353 true,
9354 );
9355 });
9356
9357 fs.create_file(
9358 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9359 Default::default(),
9360 )
9361 .await
9362 .unwrap();
9363 fs.set_index_for_repo(
9364 path!("/root/tree/.git").as_ref(),
9365 &[
9366 (".gitignore", "ignored-dir\n".into()),
9367 ("tracked-dir/tracked-file1", "".into()),
9368 ("tracked-dir/tracked-file2", "".into()),
9369 ],
9370 );
9371 fs.create_file(
9372 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9373 Default::default(),
9374 )
9375 .await
9376 .unwrap();
9377 fs.create_file(
9378 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9379 Default::default(),
9380 )
9381 .await
9382 .unwrap();
9383
9384 cx.executor().run_until_parked();
9385 cx.read(|cx| {
9386 assert_entry_git_state(
9387 tree.read(cx),
9388 repository.read(cx),
9389 "tracked-dir/tracked-file2",
9390 Some(StatusCode::Added),
9391 false,
9392 );
9393 assert_entry_git_state(
9394 tree.read(cx),
9395 repository.read(cx),
9396 "tracked-dir/ancestor-ignored-file2",
9397 None,
9398 false,
9399 );
9400 assert_entry_git_state(
9401 tree.read(cx),
9402 repository.read(cx),
9403 "ignored-dir/ignored-file2",
9404 None,
9405 true,
9406 );
9407 assert!(
9408 tree.read(cx)
9409 .entry_for_path(&rel_path(".git"))
9410 .unwrap()
9411 .is_ignored
9412 );
9413 });
9414}
9415
9416#[gpui::test]
9417async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9418 init_test(cx);
9419
9420 let fs = FakeFs::new(cx.executor());
9421 fs.insert_tree(
9422 path!("/project"),
9423 json!({
9424 ".git": {
9425 "worktrees": {
9426 "some-worktree": {
9427 "commondir": "../..\n",
9428 // For is_git_dir
9429 "HEAD": "",
9430 "config": ""
9431 }
9432 },
9433 "modules": {
9434 "subdir": {
9435 "some-submodule": {
9436 // For is_git_dir
9437 "HEAD": "",
9438 "config": "",
9439 }
9440 }
9441 }
9442 },
9443 "src": {
9444 "a.txt": "A",
9445 },
9446 "some-worktree": {
9447 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9448 "src": {
9449 "b.txt": "B",
9450 }
9451 },
9452 "subdir": {
9453 "some-submodule": {
9454 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9455 "c.txt": "C",
9456 }
9457 }
9458 }),
9459 )
9460 .await;
9461
9462 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9463 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9464 scan_complete.await;
9465
9466 let mut repositories = project.update(cx, |project, cx| {
9467 project
9468 .repositories(cx)
9469 .values()
9470 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9471 .collect::<Vec<_>>()
9472 });
9473 repositories.sort();
9474 pretty_assertions::assert_eq!(
9475 repositories,
9476 [
9477 Path::new(path!("/project")).into(),
9478 Path::new(path!("/project/some-worktree")).into(),
9479 Path::new(path!("/project/subdir/some-submodule")).into(),
9480 ]
9481 );
9482
9483 // Generate a git-related event for the worktree and check that it's refreshed.
9484 fs.with_git_state(
9485 path!("/project/some-worktree/.git").as_ref(),
9486 true,
9487 |state| {
9488 state
9489 .head_contents
9490 .insert(repo_path("src/b.txt"), "b".to_owned());
9491 state
9492 .index_contents
9493 .insert(repo_path("src/b.txt"), "b".to_owned());
9494 },
9495 )
9496 .unwrap();
9497 cx.run_until_parked();
9498
9499 let buffer = project
9500 .update(cx, |project, cx| {
9501 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9502 })
9503 .await
9504 .unwrap();
9505 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9506 let (repo, _) = project
9507 .git_store()
9508 .read(cx)
9509 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9510 .unwrap();
9511 pretty_assertions::assert_eq!(
9512 repo.read(cx).work_directory_abs_path,
9513 Path::new(path!("/project/some-worktree")).into(),
9514 );
9515 let barrier = repo.update(cx, |repo, _| repo.barrier());
9516 (repo.clone(), barrier)
9517 });
9518 barrier.await.unwrap();
9519 worktree_repo.update(cx, |repo, _| {
9520 pretty_assertions::assert_eq!(
9521 repo.status_for_path(&repo_path("src/b.txt"))
9522 .unwrap()
9523 .status,
9524 StatusCode::Modified.worktree(),
9525 );
9526 });
9527
9528 // The same for the submodule.
9529 fs.with_git_state(
9530 path!("/project/subdir/some-submodule/.git").as_ref(),
9531 true,
9532 |state| {
9533 state
9534 .head_contents
9535 .insert(repo_path("c.txt"), "c".to_owned());
9536 state
9537 .index_contents
9538 .insert(repo_path("c.txt"), "c".to_owned());
9539 },
9540 )
9541 .unwrap();
9542 cx.run_until_parked();
9543
9544 let buffer = project
9545 .update(cx, |project, cx| {
9546 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9547 })
9548 .await
9549 .unwrap();
9550 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9551 let (repo, _) = project
9552 .git_store()
9553 .read(cx)
9554 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9555 .unwrap();
9556 pretty_assertions::assert_eq!(
9557 repo.read(cx).work_directory_abs_path,
9558 Path::new(path!("/project/subdir/some-submodule")).into(),
9559 );
9560 let barrier = repo.update(cx, |repo, _| repo.barrier());
9561 (repo.clone(), barrier)
9562 });
9563 barrier.await.unwrap();
9564 submodule_repo.update(cx, |repo, _| {
9565 pretty_assertions::assert_eq!(
9566 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9567 StatusCode::Modified.worktree(),
9568 );
9569 });
9570}
9571
9572#[gpui::test]
9573async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9574 init_test(cx);
9575 let fs = FakeFs::new(cx.background_executor.clone());
9576 fs.insert_tree(
9577 path!("/root"),
9578 json!({
9579 "project": {
9580 ".git": {},
9581 "child1": {
9582 "a.txt": "A",
9583 },
9584 "child2": {
9585 "b.txt": "B",
9586 }
9587 }
9588 }),
9589 )
9590 .await;
9591
9592 let project = Project::test(
9593 fs.clone(),
9594 [
9595 path!("/root/project/child1").as_ref(),
9596 path!("/root/project/child2").as_ref(),
9597 ],
9598 cx,
9599 )
9600 .await;
9601
9602 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9603 tree.flush_fs_events(cx).await;
9604 project
9605 .update(cx, |project, cx| project.git_scans_complete(cx))
9606 .await;
9607 cx.executor().run_until_parked();
9608
9609 let repos = project.read_with(cx, |project, cx| {
9610 project
9611 .repositories(cx)
9612 .values()
9613 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9614 .collect::<Vec<_>>()
9615 });
9616 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9617}
9618
9619async fn search(
9620 project: &Entity<Project>,
9621 query: SearchQuery,
9622 cx: &mut gpui::TestAppContext,
9623) -> Result<HashMap<String, Vec<Range<usize>>>> {
9624 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9625 let mut results = HashMap::default();
9626 while let Ok(search_result) = search_rx.recv().await {
9627 match search_result {
9628 SearchResult::Buffer { buffer, ranges } => {
9629 results.entry(buffer).or_insert(ranges);
9630 }
9631 SearchResult::LimitReached => {}
9632 }
9633 }
9634 Ok(results
9635 .into_iter()
9636 .map(|(buffer, ranges)| {
9637 buffer.update(cx, |buffer, cx| {
9638 let path = buffer
9639 .file()
9640 .unwrap()
9641 .full_path(cx)
9642 .to_string_lossy()
9643 .to_string();
9644 let ranges = ranges
9645 .into_iter()
9646 .map(|range| range.to_offset(buffer))
9647 .collect::<Vec<_>>();
9648 (path, ranges)
9649 })
9650 })
9651 .collect())
9652}
9653
9654pub fn init_test(cx: &mut gpui::TestAppContext) {
9655 zlog::init_test();
9656
9657 cx.update(|cx| {
9658 let settings_store = SettingsStore::test(cx);
9659 cx.set_global(settings_store);
9660 release_channel::init(SemanticVersion::default(), cx);
9661 language::init(cx);
9662 Project::init_settings(cx);
9663 });
9664}
9665
9666fn json_lang() -> Arc<Language> {
9667 Arc::new(Language::new(
9668 LanguageConfig {
9669 name: "JSON".into(),
9670 matcher: LanguageMatcher {
9671 path_suffixes: vec!["json".to_string()],
9672 ..Default::default()
9673 },
9674 ..Default::default()
9675 },
9676 None,
9677 ))
9678}
9679
9680fn js_lang() -> Arc<Language> {
9681 Arc::new(Language::new(
9682 LanguageConfig {
9683 name: "JavaScript".into(),
9684 matcher: LanguageMatcher {
9685 path_suffixes: vec!["js".to_string()],
9686 ..Default::default()
9687 },
9688 ..Default::default()
9689 },
9690 None,
9691 ))
9692}
9693
9694fn rust_lang() -> Arc<Language> {
9695 Arc::new(Language::new(
9696 LanguageConfig {
9697 name: "Rust".into(),
9698 matcher: LanguageMatcher {
9699 path_suffixes: vec!["rs".to_string()],
9700 ..Default::default()
9701 },
9702 ..Default::default()
9703 },
9704 Some(tree_sitter_rust::LANGUAGE.into()),
9705 ))
9706}
9707
9708fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9709 struct PythonMootToolchainLister(Arc<FakeFs>);
9710 #[async_trait]
9711 impl ToolchainLister for PythonMootToolchainLister {
9712 async fn list(
9713 &self,
9714 worktree_root: PathBuf,
9715 subroot_relative_path: Arc<RelPath>,
9716 _: Option<HashMap<String, String>>,
9717 _: &dyn Fs,
9718 ) -> ToolchainList {
9719 // This lister will always return a path .venv directories within ancestors
9720 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9721 let mut toolchains = vec![];
9722 for ancestor in ancestors {
9723 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9724 if self.0.is_dir(&venv_path).await {
9725 toolchains.push(Toolchain {
9726 name: SharedString::new("Python Venv"),
9727 path: venv_path.to_string_lossy().into_owned().into(),
9728 language_name: LanguageName(SharedString::new_static("Python")),
9729 as_json: serde_json::Value::Null,
9730 })
9731 }
9732 }
9733 ToolchainList {
9734 toolchains,
9735 ..Default::default()
9736 }
9737 }
9738 async fn resolve(
9739 &self,
9740 _: PathBuf,
9741 _: Option<HashMap<String, String>>,
9742 _: &dyn Fs,
9743 ) -> anyhow::Result<Toolchain> {
9744 Err(anyhow::anyhow!("Not implemented"))
9745 }
9746 fn meta(&self) -> ToolchainMetadata {
9747 ToolchainMetadata {
9748 term: SharedString::new_static("Virtual Environment"),
9749 new_toolchain_placeholder: SharedString::new_static(
9750 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9751 ),
9752 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9753 }
9754 }
9755 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9756 vec![]
9757 }
9758 }
9759 Arc::new(
9760 Language::new(
9761 LanguageConfig {
9762 name: "Python".into(),
9763 matcher: LanguageMatcher {
9764 path_suffixes: vec!["py".to_string()],
9765 ..Default::default()
9766 },
9767 ..Default::default()
9768 },
9769 None, // We're not testing Python parsing with this language.
9770 )
9771 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9772 "pyproject.toml",
9773 ))))
9774 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9775 )
9776}
9777
9778fn typescript_lang() -> Arc<Language> {
9779 Arc::new(Language::new(
9780 LanguageConfig {
9781 name: "TypeScript".into(),
9782 matcher: LanguageMatcher {
9783 path_suffixes: vec!["ts".to_string()],
9784 ..Default::default()
9785 },
9786 ..Default::default()
9787 },
9788 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9789 ))
9790}
9791
9792fn tsx_lang() -> Arc<Language> {
9793 Arc::new(Language::new(
9794 LanguageConfig {
9795 name: "tsx".into(),
9796 matcher: LanguageMatcher {
9797 path_suffixes: vec!["tsx".to_string()],
9798 ..Default::default()
9799 },
9800 ..Default::default()
9801 },
9802 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9803 ))
9804}
9805
9806fn get_all_tasks(
9807 project: &Entity<Project>,
9808 task_contexts: Arc<TaskContexts>,
9809 cx: &mut App,
9810) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9811 let new_tasks = project.update(cx, |project, cx| {
9812 project.task_store.update(cx, |task_store, cx| {
9813 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9814 this.used_and_current_resolved_tasks(task_contexts, cx)
9815 })
9816 })
9817 });
9818
9819 cx.background_spawn(async move {
9820 let (mut old, new) = new_tasks.await;
9821 old.extend(new);
9822 old
9823 })
9824}
9825
9826#[track_caller]
9827fn assert_entry_git_state(
9828 tree: &Worktree,
9829 repository: &Repository,
9830 path: &str,
9831 index_status: Option<StatusCode>,
9832 is_ignored: bool,
9833) {
9834 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9835 let entry = tree
9836 .entry_for_path(&rel_path(path))
9837 .unwrap_or_else(|| panic!("entry {path} not found"));
9838 let status = repository
9839 .status_for_path(&repo_path(path))
9840 .map(|entry| entry.status);
9841 let expected = index_status.map(|index_status| {
9842 TrackedStatus {
9843 index_status,
9844 worktree_status: StatusCode::Unmodified,
9845 }
9846 .into()
9847 });
9848 assert_eq!(
9849 status, expected,
9850 "expected {path} to have git status: {expected:?}"
9851 );
9852 assert_eq!(
9853 entry.is_ignored, is_ignored,
9854 "expected {path} to have is_ignored: {is_ignored}"
9855 );
9856}
9857
9858#[track_caller]
9859fn git_init(path: &Path) -> git2::Repository {
9860 let mut init_opts = RepositoryInitOptions::new();
9861 init_opts.initial_head("main");
9862 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9863}
9864
9865#[track_caller]
9866fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9867 let path = path.as_ref();
9868 let mut index = repo.index().expect("Failed to get index");
9869 index.add_path(path).expect("Failed to add file");
9870 index.write().expect("Failed to write index");
9871}
9872
9873#[track_caller]
9874fn git_remove_index(path: &Path, repo: &git2::Repository) {
9875 let mut index = repo.index().expect("Failed to get index");
9876 index.remove_path(path).expect("Failed to add file");
9877 index.write().expect("Failed to write index");
9878}
9879
9880#[track_caller]
9881fn git_commit(msg: &'static str, repo: &git2::Repository) {
9882 use git2::Signature;
9883
9884 let signature = Signature::now("test", "test@zed.dev").unwrap();
9885 let oid = repo.index().unwrap().write_tree().unwrap();
9886 let tree = repo.find_tree(oid).unwrap();
9887 if let Ok(head) = repo.head() {
9888 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9889
9890 let parent_commit = parent_obj.as_commit().unwrap();
9891
9892 repo.commit(
9893 Some("HEAD"),
9894 &signature,
9895 &signature,
9896 msg,
9897 &tree,
9898 &[parent_commit],
9899 )
9900 .expect("Failed to commit with parent");
9901 } else {
9902 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9903 .expect("Failed to commit");
9904 }
9905}
9906
9907#[cfg(any())]
9908#[track_caller]
9909fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9910 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9911}
9912
9913#[track_caller]
9914fn git_stash(repo: &mut git2::Repository) {
9915 use git2::Signature;
9916
9917 let signature = Signature::now("test", "test@zed.dev").unwrap();
9918 repo.stash_save(&signature, "N/A", None)
9919 .expect("Failed to stash");
9920}
9921
9922#[track_caller]
9923fn git_reset(offset: usize, repo: &git2::Repository) {
9924 let head = repo.head().expect("Couldn't get repo head");
9925 let object = head.peel(git2::ObjectType::Commit).unwrap();
9926 let commit = object.as_commit().unwrap();
9927 let new_head = commit
9928 .parents()
9929 .inspect(|parnet| {
9930 parnet.message();
9931 })
9932 .nth(offset)
9933 .expect("Not enough history");
9934 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9935 .expect("Could not reset");
9936}
9937
9938#[cfg(any())]
9939#[track_caller]
9940fn git_branch(name: &str, repo: &git2::Repository) {
9941 let head = repo
9942 .head()
9943 .expect("Couldn't get repo head")
9944 .peel_to_commit()
9945 .expect("HEAD is not a commit");
9946 repo.branch(name, &head, false).expect("Failed to commit");
9947}
9948
9949#[cfg(any())]
9950#[track_caller]
9951fn git_checkout(name: &str, repo: &git2::Repository) {
9952 repo.set_head(name).expect("Failed to set head");
9953 repo.checkout_head(None).expect("Failed to check out head");
9954}
9955
9956#[cfg(any())]
9957#[track_caller]
9958fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9959 repo.statuses(None)
9960 .unwrap()
9961 .iter()
9962 .map(|status| (status.path().unwrap().to_string(), status.status()))
9963 .collect()
9964}
9965
9966#[gpui::test]
9967async fn test_find_project_path_abs(
9968 background_executor: BackgroundExecutor,
9969 cx: &mut gpui::TestAppContext,
9970) {
9971 // find_project_path should work with absolute paths
9972 init_test(cx);
9973
9974 let fs = FakeFs::new(background_executor);
9975 fs.insert_tree(
9976 path!("/root"),
9977 json!({
9978 "project1": {
9979 "file1.txt": "content1",
9980 "subdir": {
9981 "file2.txt": "content2"
9982 }
9983 },
9984 "project2": {
9985 "file3.txt": "content3"
9986 }
9987 }),
9988 )
9989 .await;
9990
9991 let project = Project::test(
9992 fs.clone(),
9993 [
9994 path!("/root/project1").as_ref(),
9995 path!("/root/project2").as_ref(),
9996 ],
9997 cx,
9998 )
9999 .await;
10000
10001 // Make sure the worktrees are fully initialized
10002 project
10003 .update(cx, |project, cx| project.git_scans_complete(cx))
10004 .await;
10005 cx.run_until_parked();
10006
10007 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10008 project.read_with(cx, |project, cx| {
10009 let worktrees: Vec<_> = project.worktrees(cx).collect();
10010 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10011 let id1 = worktrees[0].read(cx).id();
10012 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10013 let id2 = worktrees[1].read(cx).id();
10014 (abs_path1, id1, abs_path2, id2)
10015 });
10016
10017 project.update(cx, |project, cx| {
10018 let abs_path = project1_abs_path.join("file1.txt");
10019 let found_path = project.find_project_path(abs_path, cx).unwrap();
10020 assert_eq!(found_path.worktree_id, project1_id);
10021 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10022
10023 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10024 let found_path = project.find_project_path(abs_path, cx).unwrap();
10025 assert_eq!(found_path.worktree_id, project1_id);
10026 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10027
10028 let abs_path = project2_abs_path.join("file3.txt");
10029 let found_path = project.find_project_path(abs_path, cx).unwrap();
10030 assert_eq!(found_path.worktree_id, project2_id);
10031 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10032
10033 let abs_path = project1_abs_path.join("nonexistent.txt");
10034 let found_path = project.find_project_path(abs_path, cx);
10035 assert!(
10036 found_path.is_some(),
10037 "Should find project path for nonexistent file in worktree"
10038 );
10039
10040 // Test with an absolute path outside any worktree
10041 let abs_path = Path::new("/some/other/path");
10042 let found_path = project.find_project_path(abs_path, cx);
10043 assert!(
10044 found_path.is_none(),
10045 "Should not find project path for path outside any worktree"
10046 );
10047 });
10048}