1#![allow(clippy::format_collect)]
2
3use crate::{
4 Event,
5 git_store::{GitStoreEvent, RepositoryEvent, StatusEntry},
6 task_inventory::TaskContexts,
7 task_store::TaskSettingsLocation,
8 *,
9};
10use async_trait::async_trait;
11use buffer_diff::{
12 BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
13 DiffHunkStatusKind, assert_hunks,
14};
15use fs::FakeFs;
16use futures::{StreamExt, future};
17use git::{
18 GitHostingProviderRegistry,
19 repository::{RepoPath, repo_path},
20 status::{StatusCode, TrackedStatus},
21};
22use git2::RepositoryInitOptions;
23use gpui::{App, BackgroundExecutor, SemanticVersion, UpdateGlobal};
24use itertools::Itertools;
25use language::{
26 Diagnostic, DiagnosticEntry, DiagnosticEntryRef, DiagnosticSet, DiagnosticSourceKind,
27 DiskState, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageName, LineEnding,
28 ManifestName, ManifestProvider, ManifestQuery, OffsetRangeExt, Point, ToPoint, ToolchainList,
29 ToolchainLister,
30 language_settings::{LanguageSettingsContent, language_settings},
31 tree_sitter_rust, tree_sitter_typescript,
32};
33use lsp::{
34 DiagnosticSeverity, DocumentChanges, FileOperationFilter, NumberOrString, TextDocumentEdit,
35 Uri, WillRenameFiles, notification::DidRenameFiles,
36};
37use parking_lot::Mutex;
38use paths::{config_dir, global_gitignore_path, tasks_file};
39use postage::stream::Stream as _;
40use pretty_assertions::{assert_eq, assert_matches};
41use rand::{Rng as _, rngs::StdRng};
42use serde_json::json;
43#[cfg(not(windows))]
44use std::os;
45use std::{
46 env, mem,
47 num::NonZeroU32,
48 ops::Range,
49 str::FromStr,
50 sync::{Arc, OnceLock},
51 task::Poll,
52};
53use task::{ResolvedTask, ShellKind, TaskContext};
54use unindent::Unindent as _;
55use util::{
56 TryFutureExt as _, assert_set_eq, maybe, path,
57 paths::PathMatcher,
58 rel_path::rel_path,
59 test::{TempTree, marked_text_offsets},
60 uri,
61};
62use worktree::WorktreeModelHandle as _;
63
64#[gpui::test]
65async fn test_block_via_channel(cx: &mut gpui::TestAppContext) {
66 cx.executor().allow_parking();
67
68 let (tx, mut rx) = futures::channel::mpsc::unbounded();
69 let _thread = std::thread::spawn(move || {
70 #[cfg(not(target_os = "windows"))]
71 std::fs::metadata("/tmp").unwrap();
72 #[cfg(target_os = "windows")]
73 std::fs::metadata("C:/Windows").unwrap();
74 std::thread::sleep(Duration::from_millis(1000));
75 tx.unbounded_send(1).unwrap();
76 });
77 rx.next().await.unwrap();
78}
79
80#[gpui::test]
81async fn test_block_via_smol(cx: &mut gpui::TestAppContext) {
82 cx.executor().allow_parking();
83
84 let io_task = smol::unblock(move || {
85 println!("sleeping on thread {:?}", std::thread::current().id());
86 std::thread::sleep(Duration::from_millis(10));
87 1
88 });
89
90 let task = cx.foreground_executor().spawn(async move {
91 io_task.await;
92 });
93
94 task.await;
95}
96
97// NOTE:
98// While POSIX symbolic links are somewhat supported on Windows, they are an opt in by the user, and thus
99// we assume that they are not supported out of the box.
100#[cfg(not(windows))]
101#[gpui::test]
102async fn test_symlinks(cx: &mut gpui::TestAppContext) {
103 init_test(cx);
104 cx.executor().allow_parking();
105
106 let dir = TempTree::new(json!({
107 "root": {
108 "apple": "",
109 "banana": {
110 "carrot": {
111 "date": "",
112 "endive": "",
113 }
114 },
115 "fennel": {
116 "grape": "",
117 }
118 }
119 }));
120
121 let root_link_path = dir.path().join("root_link");
122 os::unix::fs::symlink(dir.path().join("root"), &root_link_path).unwrap();
123 os::unix::fs::symlink(
124 dir.path().join("root/fennel"),
125 dir.path().join("root/finnochio"),
126 )
127 .unwrap();
128
129 let project = Project::test(
130 Arc::new(RealFs::new(None, cx.executor())),
131 [root_link_path.as_ref()],
132 cx,
133 )
134 .await;
135
136 project.update(cx, |project, cx| {
137 let tree = project.worktrees(cx).next().unwrap().read(cx);
138 assert_eq!(tree.file_count(), 5);
139 assert_eq!(
140 tree.entry_for_path(rel_path("fennel/grape")).unwrap().inode,
141 tree.entry_for_path(rel_path("finnochio/grape"))
142 .unwrap()
143 .inode
144 );
145 });
146}
147
148#[gpui::test]
149async fn test_editorconfig_support(cx: &mut gpui::TestAppContext) {
150 init_test(cx);
151
152 let dir = TempTree::new(json!({
153 ".editorconfig": r#"
154 root = true
155 [*.rs]
156 indent_style = tab
157 indent_size = 3
158 end_of_line = lf
159 insert_final_newline = true
160 trim_trailing_whitespace = true
161 max_line_length = 120
162 [*.js]
163 tab_width = 10
164 max_line_length = off
165 "#,
166 ".zed": {
167 "settings.json": r#"{
168 "tab_size": 8,
169 "hard_tabs": false,
170 "ensure_final_newline_on_save": false,
171 "remove_trailing_whitespace_on_save": false,
172 "preferred_line_length": 64,
173 "soft_wrap": "editor_width",
174 }"#,
175 },
176 "a.rs": "fn a() {\n A\n}",
177 "b": {
178 ".editorconfig": r#"
179 [*.rs]
180 indent_size = 2
181 max_line_length = off,
182 "#,
183 "b.rs": "fn b() {\n B\n}",
184 },
185 "c.js": "def c\n C\nend",
186 "README.json": "tabs are better\n",
187 }));
188
189 let path = dir.path();
190 let fs = FakeFs::new(cx.executor());
191 fs.insert_tree_from_real_fs(path, path).await;
192 let project = Project::test(fs, [path], cx).await;
193
194 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
195 language_registry.add(js_lang());
196 language_registry.add(json_lang());
197 language_registry.add(rust_lang());
198
199 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
200
201 cx.executor().run_until_parked();
202
203 cx.update(|cx| {
204 let tree = worktree.read(cx);
205 let settings_for = |path: &str| {
206 let file_entry = tree.entry_for_path(rel_path(path)).unwrap().clone();
207 let file = File::for_entry(file_entry, worktree.clone());
208 let file_language = project
209 .read(cx)
210 .languages()
211 .load_language_for_file_path(file.path.as_std_path());
212 let file_language = cx
213 .background_executor()
214 .block(file_language)
215 .expect("Failed to get file language");
216 let file = file as _;
217 language_settings(Some(file_language.name()), Some(&file), cx).into_owned()
218 };
219
220 let settings_a = settings_for("a.rs");
221 let settings_b = settings_for("b/b.rs");
222 let settings_c = settings_for("c.js");
223 let settings_readme = settings_for("README.json");
224
225 // .editorconfig overrides .zed/settings
226 assert_eq!(Some(settings_a.tab_size), NonZeroU32::new(3));
227 assert_eq!(settings_a.hard_tabs, true);
228 assert_eq!(settings_a.ensure_final_newline_on_save, true);
229 assert_eq!(settings_a.remove_trailing_whitespace_on_save, true);
230 assert_eq!(settings_a.preferred_line_length, 120);
231
232 // .editorconfig in b/ overrides .editorconfig in root
233 assert_eq!(Some(settings_b.tab_size), NonZeroU32::new(2));
234
235 // "indent_size" is not set, so "tab_width" is used
236 assert_eq!(Some(settings_c.tab_size), NonZeroU32::new(10));
237
238 // When max_line_length is "off", default to .zed/settings.json
239 assert_eq!(settings_b.preferred_line_length, 64);
240 assert_eq!(settings_c.preferred_line_length, 64);
241
242 // README.md should not be affected by .editorconfig's globe "*.rs"
243 assert_eq!(Some(settings_readme.tab_size), NonZeroU32::new(8));
244 });
245}
246
247#[gpui::test]
248async fn test_git_provider_project_setting(cx: &mut gpui::TestAppContext) {
249 init_test(cx);
250 cx.update(|cx| {
251 GitHostingProviderRegistry::default_global(cx);
252 git_hosting_providers::init(cx);
253 });
254
255 let fs = FakeFs::new(cx.executor());
256 let str_path = path!("/dir");
257 let path = Path::new(str_path);
258
259 fs.insert_tree(
260 path!("/dir"),
261 json!({
262 ".zed": {
263 "settings.json": r#"{
264 "git_hosting_providers": [
265 {
266 "provider": "gitlab",
267 "base_url": "https://google.com",
268 "name": "foo"
269 }
270 ]
271 }"#
272 },
273 }),
274 )
275 .await;
276
277 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
278 let (_worktree, _) =
279 project.read_with(cx, |project, cx| project.find_worktree(path, cx).unwrap());
280 cx.executor().run_until_parked();
281
282 cx.update(|cx| {
283 let provider = GitHostingProviderRegistry::global(cx);
284 assert!(
285 provider
286 .list_hosting_providers()
287 .into_iter()
288 .any(|provider| provider.name() == "foo")
289 );
290 });
291
292 fs.atomic_write(
293 Path::new(path!("/dir/.zed/settings.json")).to_owned(),
294 "{}".into(),
295 )
296 .await
297 .unwrap();
298
299 cx.run_until_parked();
300
301 cx.update(|cx| {
302 let provider = GitHostingProviderRegistry::global(cx);
303 assert!(
304 !provider
305 .list_hosting_providers()
306 .into_iter()
307 .any(|provider| provider.name() == "foo")
308 );
309 });
310}
311
312#[gpui::test]
313async fn test_managing_project_specific_settings(cx: &mut gpui::TestAppContext) {
314 init_test(cx);
315 TaskStore::init(None);
316
317 let fs = FakeFs::new(cx.executor());
318 fs.insert_tree(
319 path!("/dir"),
320 json!({
321 ".zed": {
322 "settings.json": r#"{ "tab_size": 8 }"#,
323 "tasks.json": r#"[{
324 "label": "cargo check all",
325 "command": "cargo",
326 "args": ["check", "--all"]
327 },]"#,
328 },
329 "a": {
330 "a.rs": "fn a() {\n A\n}"
331 },
332 "b": {
333 ".zed": {
334 "settings.json": r#"{ "tab_size": 2 }"#,
335 "tasks.json": r#"[{
336 "label": "cargo check",
337 "command": "cargo",
338 "args": ["check"]
339 },]"#,
340 },
341 "b.rs": "fn b() {\n B\n}"
342 }
343 }),
344 )
345 .await;
346
347 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
348 let worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
349
350 cx.executor().run_until_parked();
351 let worktree_id = cx.update(|cx| {
352 project.update(cx, |project, cx| {
353 project.worktrees(cx).next().unwrap().read(cx).id()
354 })
355 });
356
357 let mut task_contexts = TaskContexts::default();
358 task_contexts.active_worktree_context = Some((worktree_id, TaskContext::default()));
359 let task_contexts = Arc::new(task_contexts);
360
361 let topmost_local_task_source_kind = TaskSourceKind::Worktree {
362 id: worktree_id,
363 directory_in_worktree: rel_path(".zed").into(),
364 id_base: "local worktree tasks from directory \".zed\"".into(),
365 };
366
367 let all_tasks = cx
368 .update(|cx| {
369 let tree = worktree.read(cx);
370
371 let file_a = File::for_entry(
372 tree.entry_for_path(rel_path("a/a.rs")).unwrap().clone(),
373 worktree.clone(),
374 ) as _;
375 let settings_a = language_settings(None, Some(&file_a), cx);
376 let file_b = File::for_entry(
377 tree.entry_for_path(rel_path("b/b.rs")).unwrap().clone(),
378 worktree.clone(),
379 ) as _;
380 let settings_b = language_settings(None, Some(&file_b), cx);
381
382 assert_eq!(settings_a.tab_size.get(), 8);
383 assert_eq!(settings_b.tab_size.get(), 2);
384
385 get_all_tasks(&project, task_contexts.clone(), cx)
386 })
387 .await
388 .into_iter()
389 .map(|(source_kind, task)| {
390 let resolved = task.resolved;
391 (
392 source_kind,
393 task.resolved_label,
394 resolved.args,
395 resolved.env,
396 )
397 })
398 .collect::<Vec<_>>();
399 assert_eq!(
400 all_tasks,
401 vec![
402 (
403 TaskSourceKind::Worktree {
404 id: worktree_id,
405 directory_in_worktree: rel_path("b/.zed").into(),
406 id_base: "local worktree tasks from directory \"b/.zed\"".into()
407 },
408 "cargo check".to_string(),
409 vec!["check".to_string()],
410 HashMap::default(),
411 ),
412 (
413 topmost_local_task_source_kind.clone(),
414 "cargo check all".to_string(),
415 vec!["check".to_string(), "--all".to_string()],
416 HashMap::default(),
417 ),
418 ]
419 );
420
421 let (_, resolved_task) = cx
422 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
423 .await
424 .into_iter()
425 .find(|(source_kind, _)| source_kind == &topmost_local_task_source_kind)
426 .expect("should have one global task");
427 project.update(cx, |project, cx| {
428 let task_inventory = project
429 .task_store
430 .read(cx)
431 .task_inventory()
432 .cloned()
433 .unwrap();
434 task_inventory.update(cx, |inventory, _| {
435 inventory.task_scheduled(topmost_local_task_source_kind.clone(), resolved_task);
436 inventory
437 .update_file_based_tasks(
438 TaskSettingsLocation::Global(tasks_file()),
439 Some(
440 &json!([{
441 "label": "cargo check unstable",
442 "command": "cargo",
443 "args": [
444 "check",
445 "--all",
446 "--all-targets"
447 ],
448 "env": {
449 "RUSTFLAGS": "-Zunstable-options"
450 }
451 }])
452 .to_string(),
453 ),
454 )
455 .unwrap();
456 });
457 });
458 cx.run_until_parked();
459
460 let all_tasks = cx
461 .update(|cx| get_all_tasks(&project, task_contexts.clone(), cx))
462 .await
463 .into_iter()
464 .map(|(source_kind, task)| {
465 let resolved = task.resolved;
466 (
467 source_kind,
468 task.resolved_label,
469 resolved.args,
470 resolved.env,
471 )
472 })
473 .collect::<Vec<_>>();
474 assert_eq!(
475 all_tasks,
476 vec![
477 (
478 topmost_local_task_source_kind.clone(),
479 "cargo check all".to_string(),
480 vec!["check".to_string(), "--all".to_string()],
481 HashMap::default(),
482 ),
483 (
484 TaskSourceKind::Worktree {
485 id: worktree_id,
486 directory_in_worktree: rel_path("b/.zed").into(),
487 id_base: "local worktree tasks from directory \"b/.zed\"".into()
488 },
489 "cargo check".to_string(),
490 vec!["check".to_string()],
491 HashMap::default(),
492 ),
493 (
494 TaskSourceKind::AbsPath {
495 abs_path: paths::tasks_file().clone(),
496 id_base: "global tasks.json".into(),
497 },
498 "cargo check unstable".to_string(),
499 vec![
500 "check".to_string(),
501 "--all".to_string(),
502 "--all-targets".to_string(),
503 ],
504 HashMap::from_iter(Some((
505 "RUSTFLAGS".to_string(),
506 "-Zunstable-options".to_string()
507 ))),
508 ),
509 ]
510 );
511}
512
513#[gpui::test]
514async fn test_fallback_to_single_worktree_tasks(cx: &mut gpui::TestAppContext) {
515 init_test(cx);
516 TaskStore::init(None);
517
518 let fs = FakeFs::new(cx.executor());
519 fs.insert_tree(
520 path!("/dir"),
521 json!({
522 ".zed": {
523 "tasks.json": r#"[{
524 "label": "test worktree root",
525 "command": "echo $ZED_WORKTREE_ROOT"
526 }]"#,
527 },
528 "a": {
529 "a.rs": "fn a() {\n A\n}"
530 },
531 }),
532 )
533 .await;
534
535 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
536 let _worktree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
537
538 cx.executor().run_until_parked();
539 let worktree_id = cx.update(|cx| {
540 project.update(cx, |project, cx| {
541 project.worktrees(cx).next().unwrap().read(cx).id()
542 })
543 });
544
545 let active_non_worktree_item_tasks = cx
546 .update(|cx| {
547 get_all_tasks(
548 &project,
549 Arc::new(TaskContexts {
550 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
551 active_worktree_context: None,
552 other_worktree_contexts: Vec::new(),
553 lsp_task_sources: HashMap::default(),
554 latest_selection: None,
555 }),
556 cx,
557 )
558 })
559 .await;
560 assert!(
561 active_non_worktree_item_tasks.is_empty(),
562 "A task can not be resolved with context with no ZED_WORKTREE_ROOT data"
563 );
564
565 let active_worktree_tasks = cx
566 .update(|cx| {
567 get_all_tasks(
568 &project,
569 Arc::new(TaskContexts {
570 active_item_context: Some((Some(worktree_id), None, TaskContext::default())),
571 active_worktree_context: Some((worktree_id, {
572 let mut worktree_context = TaskContext::default();
573 worktree_context
574 .task_variables
575 .insert(task::VariableName::WorktreeRoot, "/dir".to_string());
576 worktree_context
577 })),
578 other_worktree_contexts: Vec::new(),
579 lsp_task_sources: HashMap::default(),
580 latest_selection: None,
581 }),
582 cx,
583 )
584 })
585 .await;
586 assert_eq!(
587 active_worktree_tasks
588 .into_iter()
589 .map(|(source_kind, task)| {
590 let resolved = task.resolved;
591 (source_kind, resolved.command.unwrap())
592 })
593 .collect::<Vec<_>>(),
594 vec![(
595 TaskSourceKind::Worktree {
596 id: worktree_id,
597 directory_in_worktree: rel_path(".zed").into(),
598 id_base: "local worktree tasks from directory \".zed\"".into(),
599 },
600 "echo /dir".to_string(),
601 )]
602 );
603}
604
605#[gpui::test]
606async fn test_running_multiple_instances_of_a_single_server_in_one_worktree(
607 cx: &mut gpui::TestAppContext,
608) {
609 pub(crate) struct PyprojectTomlManifestProvider;
610
611 impl ManifestProvider for PyprojectTomlManifestProvider {
612 fn name(&self) -> ManifestName {
613 SharedString::new_static("pyproject.toml").into()
614 }
615
616 fn search(
617 &self,
618 ManifestQuery {
619 path,
620 depth,
621 delegate,
622 }: ManifestQuery,
623 ) -> Option<Arc<RelPath>> {
624 for path in path.ancestors().take(depth) {
625 let p = path.join(rel_path("pyproject.toml"));
626 if delegate.exists(&p, Some(false)) {
627 return Some(path.into());
628 }
629 }
630
631 None
632 }
633 }
634
635 init_test(cx);
636 let fs = FakeFs::new(cx.executor());
637
638 fs.insert_tree(
639 path!("/the-root"),
640 json!({
641 ".zed": {
642 "settings.json": r#"
643 {
644 "languages": {
645 "Python": {
646 "language_servers": ["ty"]
647 }
648 }
649 }"#
650 },
651 "project-a": {
652 ".venv": {},
653 "file.py": "",
654 "pyproject.toml": ""
655 },
656 "project-b": {
657 ".venv": {},
658 "source_file.py":"",
659 "another_file.py": "",
660 "pyproject.toml": ""
661 }
662 }),
663 )
664 .await;
665 cx.update(|cx| {
666 ManifestProvidersStore::global(cx).register(Arc::new(PyprojectTomlManifestProvider))
667 });
668
669 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
670 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
671 let _fake_python_server = language_registry.register_fake_lsp(
672 "Python",
673 FakeLspAdapter {
674 name: "ty",
675 capabilities: lsp::ServerCapabilities {
676 ..Default::default()
677 },
678 ..Default::default()
679 },
680 );
681
682 language_registry.add(python_lang(fs.clone()));
683 let (first_buffer, _handle) = project
684 .update(cx, |project, cx| {
685 project.open_local_buffer_with_lsp(path!("/the-root/project-a/file.py"), cx)
686 })
687 .await
688 .unwrap();
689 cx.executor().run_until_parked();
690 let servers = project.update(cx, |project, cx| {
691 project.lsp_store.update(cx, |this, cx| {
692 first_buffer.update(cx, |buffer, cx| {
693 this.language_servers_for_local_buffer(buffer, cx)
694 .map(|(adapter, server)| (adapter.clone(), server.clone()))
695 .collect::<Vec<_>>()
696 })
697 })
698 });
699 cx.executor().run_until_parked();
700 assert_eq!(servers.len(), 1);
701 let (adapter, server) = servers.into_iter().next().unwrap();
702 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
703 assert_eq!(server.server_id(), LanguageServerId(0));
704 // `workspace_folders` are set to the rooting point.
705 assert_eq!(
706 server.workspace_folders(),
707 BTreeSet::from_iter(
708 [Uri::from_file_path(path!("/the-root/project-a")).unwrap()].into_iter()
709 )
710 );
711
712 let (second_project_buffer, _other_handle) = project
713 .update(cx, |project, cx| {
714 project.open_local_buffer_with_lsp(path!("/the-root/project-b/source_file.py"), cx)
715 })
716 .await
717 .unwrap();
718 cx.executor().run_until_parked();
719 let servers = project.update(cx, |project, cx| {
720 project.lsp_store.update(cx, |this, cx| {
721 second_project_buffer.update(cx, |buffer, cx| {
722 this.language_servers_for_local_buffer(buffer, cx)
723 .map(|(adapter, server)| (adapter.clone(), server.clone()))
724 .collect::<Vec<_>>()
725 })
726 })
727 });
728 cx.executor().run_until_parked();
729 assert_eq!(servers.len(), 1);
730 let (adapter, server) = servers.into_iter().next().unwrap();
731 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
732 // We're not using venvs at all here, so both folders should fall under the same root.
733 assert_eq!(server.server_id(), LanguageServerId(0));
734 // Now, let's select a different toolchain for one of subprojects.
735
736 let Toolchains {
737 toolchains: available_toolchains_for_b,
738 root_path,
739 ..
740 } = project
741 .update(cx, |this, cx| {
742 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
743 this.available_toolchains(
744 ProjectPath {
745 worktree_id,
746 path: rel_path("project-b/source_file.py").into(),
747 },
748 LanguageName::new("Python"),
749 cx,
750 )
751 })
752 .await
753 .expect("A toolchain to be discovered");
754 assert_eq!(root_path.as_ref(), rel_path("project-b"));
755 assert_eq!(available_toolchains_for_b.toolchains().len(), 1);
756 let currently_active_toolchain = project
757 .update(cx, |this, cx| {
758 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
759 this.active_toolchain(
760 ProjectPath {
761 worktree_id,
762 path: rel_path("project-b/source_file.py").into(),
763 },
764 LanguageName::new("Python"),
765 cx,
766 )
767 })
768 .await;
769
770 assert!(currently_active_toolchain.is_none());
771 let _ = project
772 .update(cx, |this, cx| {
773 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
774 this.activate_toolchain(
775 ProjectPath {
776 worktree_id,
777 path: root_path,
778 },
779 available_toolchains_for_b
780 .toolchains
781 .into_iter()
782 .next()
783 .unwrap(),
784 cx,
785 )
786 })
787 .await
788 .unwrap();
789 cx.run_until_parked();
790 let servers = project.update(cx, |project, cx| {
791 project.lsp_store.update(cx, |this, cx| {
792 second_project_buffer.update(cx, |buffer, cx| {
793 this.language_servers_for_local_buffer(buffer, cx)
794 .map(|(adapter, server)| (adapter.clone(), server.clone()))
795 .collect::<Vec<_>>()
796 })
797 })
798 });
799 cx.executor().run_until_parked();
800 assert_eq!(servers.len(), 1);
801 let (adapter, server) = servers.into_iter().next().unwrap();
802 assert_eq!(adapter.name(), LanguageServerName::new_static("ty"));
803 // There's a new language server in town.
804 assert_eq!(server.server_id(), LanguageServerId(1));
805}
806
807#[gpui::test]
808async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
809 init_test(cx);
810
811 let fs = FakeFs::new(cx.executor());
812 fs.insert_tree(
813 path!("/dir"),
814 json!({
815 "test.rs": "const A: i32 = 1;",
816 "test2.rs": "",
817 "Cargo.toml": "a = 1",
818 "package.json": "{\"a\": 1}",
819 }),
820 )
821 .await;
822
823 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
824 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
825
826 let mut fake_rust_servers = language_registry.register_fake_lsp(
827 "Rust",
828 FakeLspAdapter {
829 name: "the-rust-language-server",
830 capabilities: lsp::ServerCapabilities {
831 completion_provider: Some(lsp::CompletionOptions {
832 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
833 ..Default::default()
834 }),
835 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
836 lsp::TextDocumentSyncOptions {
837 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
838 ..Default::default()
839 },
840 )),
841 ..Default::default()
842 },
843 ..Default::default()
844 },
845 );
846 let mut fake_json_servers = language_registry.register_fake_lsp(
847 "JSON",
848 FakeLspAdapter {
849 name: "the-json-language-server",
850 capabilities: lsp::ServerCapabilities {
851 completion_provider: Some(lsp::CompletionOptions {
852 trigger_characters: Some(vec![":".to_string()]),
853 ..Default::default()
854 }),
855 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
856 lsp::TextDocumentSyncOptions {
857 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
858 ..Default::default()
859 },
860 )),
861 ..Default::default()
862 },
863 ..Default::default()
864 },
865 );
866
867 // Open a buffer without an associated language server.
868 let (toml_buffer, _handle) = project
869 .update(cx, |project, cx| {
870 project.open_local_buffer_with_lsp(path!("/dir/Cargo.toml"), cx)
871 })
872 .await
873 .unwrap();
874
875 // Open a buffer with an associated language server before the language for it has been loaded.
876 let (rust_buffer, _handle2) = project
877 .update(cx, |project, cx| {
878 project.open_local_buffer_with_lsp(path!("/dir/test.rs"), cx)
879 })
880 .await
881 .unwrap();
882 rust_buffer.update(cx, |buffer, _| {
883 assert_eq!(buffer.language().map(|l| l.name()), None);
884 });
885
886 // Now we add the languages to the project, and ensure they get assigned to all
887 // the relevant open buffers.
888 language_registry.add(json_lang());
889 language_registry.add(rust_lang());
890 cx.executor().run_until_parked();
891 rust_buffer.update(cx, |buffer, _| {
892 assert_eq!(buffer.language().map(|l| l.name()), Some("Rust".into()));
893 });
894
895 // A server is started up, and it is notified about Rust files.
896 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
897 assert_eq!(
898 fake_rust_server
899 .receive_notification::<lsp::notification::DidOpenTextDocument>()
900 .await
901 .text_document,
902 lsp::TextDocumentItem {
903 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
904 version: 0,
905 text: "const A: i32 = 1;".to_string(),
906 language_id: "rust".to_string(),
907 }
908 );
909
910 // The buffer is configured based on the language server's capabilities.
911 rust_buffer.update(cx, |buffer, _| {
912 assert_eq!(
913 buffer
914 .completion_triggers()
915 .iter()
916 .cloned()
917 .collect::<Vec<_>>(),
918 &[".".to_string(), "::".to_string()]
919 );
920 });
921 toml_buffer.update(cx, |buffer, _| {
922 assert!(buffer.completion_triggers().is_empty());
923 });
924
925 // Edit a buffer. The changes are reported to the language server.
926 rust_buffer.update(cx, |buffer, cx| buffer.edit([(16..16, "2")], None, cx));
927 assert_eq!(
928 fake_rust_server
929 .receive_notification::<lsp::notification::DidChangeTextDocument>()
930 .await
931 .text_document,
932 lsp::VersionedTextDocumentIdentifier::new(
933 lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
934 1
935 )
936 );
937
938 // Open a third buffer with a different associated language server.
939 let (json_buffer, _json_handle) = project
940 .update(cx, |project, cx| {
941 project.open_local_buffer_with_lsp(path!("/dir/package.json"), cx)
942 })
943 .await
944 .unwrap();
945
946 // A json language server is started up and is only notified about the json buffer.
947 let mut fake_json_server = fake_json_servers.next().await.unwrap();
948 assert_eq!(
949 fake_json_server
950 .receive_notification::<lsp::notification::DidOpenTextDocument>()
951 .await
952 .text_document,
953 lsp::TextDocumentItem {
954 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
955 version: 0,
956 text: "{\"a\": 1}".to_string(),
957 language_id: "json".to_string(),
958 }
959 );
960
961 // This buffer is configured based on the second language server's
962 // capabilities.
963 json_buffer.update(cx, |buffer, _| {
964 assert_eq!(
965 buffer
966 .completion_triggers()
967 .iter()
968 .cloned()
969 .collect::<Vec<_>>(),
970 &[":".to_string()]
971 );
972 });
973
974 // When opening another buffer whose language server is already running,
975 // it is also configured based on the existing language server's capabilities.
976 let (rust_buffer2, _handle4) = project
977 .update(cx, |project, cx| {
978 project.open_local_buffer_with_lsp(path!("/dir/test2.rs"), cx)
979 })
980 .await
981 .unwrap();
982 rust_buffer2.update(cx, |buffer, _| {
983 assert_eq!(
984 buffer
985 .completion_triggers()
986 .iter()
987 .cloned()
988 .collect::<Vec<_>>(),
989 &[".".to_string(), "::".to_string()]
990 );
991 });
992
993 // Changes are reported only to servers matching the buffer's language.
994 toml_buffer.update(cx, |buffer, cx| buffer.edit([(5..5, "23")], None, cx));
995 rust_buffer2.update(cx, |buffer, cx| {
996 buffer.edit([(0..0, "let x = 1;")], None, cx)
997 });
998 assert_eq!(
999 fake_rust_server
1000 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1001 .await
1002 .text_document,
1003 lsp::VersionedTextDocumentIdentifier::new(
1004 lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap(),
1005 1
1006 )
1007 );
1008
1009 // Save notifications are reported to all servers.
1010 project
1011 .update(cx, |project, cx| project.save_buffer(toml_buffer, cx))
1012 .await
1013 .unwrap();
1014 assert_eq!(
1015 fake_rust_server
1016 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1017 .await
1018 .text_document,
1019 lsp::TextDocumentIdentifier::new(
1020 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1021 )
1022 );
1023 assert_eq!(
1024 fake_json_server
1025 .receive_notification::<lsp::notification::DidSaveTextDocument>()
1026 .await
1027 .text_document,
1028 lsp::TextDocumentIdentifier::new(
1029 lsp::Uri::from_file_path(path!("/dir/Cargo.toml")).unwrap()
1030 )
1031 );
1032
1033 // Renames are reported only to servers matching the buffer's language.
1034 fs.rename(
1035 Path::new(path!("/dir/test2.rs")),
1036 Path::new(path!("/dir/test3.rs")),
1037 Default::default(),
1038 )
1039 .await
1040 .unwrap();
1041 assert_eq!(
1042 fake_rust_server
1043 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1044 .await
1045 .text_document,
1046 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test2.rs")).unwrap()),
1047 );
1048 assert_eq!(
1049 fake_rust_server
1050 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1051 .await
1052 .text_document,
1053 lsp::TextDocumentItem {
1054 uri: lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap(),
1055 version: 0,
1056 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1057 language_id: "rust".to_string(),
1058 },
1059 );
1060
1061 rust_buffer2.update(cx, |buffer, cx| {
1062 buffer.update_diagnostics(
1063 LanguageServerId(0),
1064 DiagnosticSet::from_sorted_entries(
1065 vec![DiagnosticEntry {
1066 diagnostic: Default::default(),
1067 range: Anchor::MIN..Anchor::MAX,
1068 }],
1069 &buffer.snapshot(),
1070 ),
1071 cx,
1072 );
1073 assert_eq!(
1074 buffer
1075 .snapshot()
1076 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1077 .count(),
1078 1
1079 );
1080 });
1081
1082 // When the rename changes the extension of the file, the buffer gets closed on the old
1083 // language server and gets opened on the new one.
1084 fs.rename(
1085 Path::new(path!("/dir/test3.rs")),
1086 Path::new(path!("/dir/test3.json")),
1087 Default::default(),
1088 )
1089 .await
1090 .unwrap();
1091 assert_eq!(
1092 fake_rust_server
1093 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1094 .await
1095 .text_document,
1096 lsp::TextDocumentIdentifier::new(lsp::Uri::from_file_path(path!("/dir/test3.rs")).unwrap()),
1097 );
1098 assert_eq!(
1099 fake_json_server
1100 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1101 .await
1102 .text_document,
1103 lsp::TextDocumentItem {
1104 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1105 version: 0,
1106 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1107 language_id: "json".to_string(),
1108 },
1109 );
1110
1111 // We clear the diagnostics, since the language has changed.
1112 rust_buffer2.update(cx, |buffer, _| {
1113 assert_eq!(
1114 buffer
1115 .snapshot()
1116 .diagnostics_in_range::<_, usize>(0..buffer.len(), false)
1117 .count(),
1118 0
1119 );
1120 });
1121
1122 // The renamed file's version resets after changing language server.
1123 rust_buffer2.update(cx, |buffer, cx| buffer.edit([(0..0, "// ")], None, cx));
1124 assert_eq!(
1125 fake_json_server
1126 .receive_notification::<lsp::notification::DidChangeTextDocument>()
1127 .await
1128 .text_document,
1129 lsp::VersionedTextDocumentIdentifier::new(
1130 lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1131 1
1132 )
1133 );
1134
1135 // Restart language servers
1136 project.update(cx, |project, cx| {
1137 project.restart_language_servers_for_buffers(
1138 vec![rust_buffer.clone(), json_buffer.clone()],
1139 HashSet::default(),
1140 cx,
1141 );
1142 });
1143
1144 let mut rust_shutdown_requests = fake_rust_server
1145 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1146 let mut json_shutdown_requests = fake_json_server
1147 .set_request_handler::<lsp::request::Shutdown, _, _>(|_, _| future::ready(Ok(())));
1148 futures::join!(rust_shutdown_requests.next(), json_shutdown_requests.next());
1149
1150 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
1151 let mut fake_json_server = fake_json_servers.next().await.unwrap();
1152
1153 // Ensure rust document is reopened in new rust language server
1154 assert_eq!(
1155 fake_rust_server
1156 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1157 .await
1158 .text_document,
1159 lsp::TextDocumentItem {
1160 uri: lsp::Uri::from_file_path(path!("/dir/test.rs")).unwrap(),
1161 version: 0,
1162 text: rust_buffer.update(cx, |buffer, _| buffer.text()),
1163 language_id: "rust".to_string(),
1164 }
1165 );
1166
1167 // Ensure json documents are reopened in new json language server
1168 assert_set_eq!(
1169 [
1170 fake_json_server
1171 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1172 .await
1173 .text_document,
1174 fake_json_server
1175 .receive_notification::<lsp::notification::DidOpenTextDocument>()
1176 .await
1177 .text_document,
1178 ],
1179 [
1180 lsp::TextDocumentItem {
1181 uri: lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1182 version: 0,
1183 text: json_buffer.update(cx, |buffer, _| buffer.text()),
1184 language_id: "json".to_string(),
1185 },
1186 lsp::TextDocumentItem {
1187 uri: lsp::Uri::from_file_path(path!("/dir/test3.json")).unwrap(),
1188 version: 0,
1189 text: rust_buffer2.update(cx, |buffer, _| buffer.text()),
1190 language_id: "json".to_string(),
1191 }
1192 ]
1193 );
1194
1195 // Close notifications are reported only to servers matching the buffer's language.
1196 cx.update(|_| drop(_json_handle));
1197 let close_message = lsp::DidCloseTextDocumentParams {
1198 text_document: lsp::TextDocumentIdentifier::new(
1199 lsp::Uri::from_file_path(path!("/dir/package.json")).unwrap(),
1200 ),
1201 };
1202 assert_eq!(
1203 fake_json_server
1204 .receive_notification::<lsp::notification::DidCloseTextDocument>()
1205 .await,
1206 close_message,
1207 );
1208}
1209
1210#[gpui::test]
1211async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppContext) {
1212 init_test(cx);
1213
1214 let fs = FakeFs::new(cx.executor());
1215 fs.insert_tree(
1216 path!("/the-root"),
1217 json!({
1218 ".gitignore": "target\n",
1219 "Cargo.lock": "",
1220 "src": {
1221 "a.rs": "",
1222 "b.rs": "",
1223 },
1224 "target": {
1225 "x": {
1226 "out": {
1227 "x.rs": ""
1228 }
1229 },
1230 "y": {
1231 "out": {
1232 "y.rs": "",
1233 }
1234 },
1235 "z": {
1236 "out": {
1237 "z.rs": ""
1238 }
1239 }
1240 }
1241 }),
1242 )
1243 .await;
1244 fs.insert_tree(
1245 path!("/the-registry"),
1246 json!({
1247 "dep1": {
1248 "src": {
1249 "dep1.rs": "",
1250 }
1251 },
1252 "dep2": {
1253 "src": {
1254 "dep2.rs": "",
1255 }
1256 },
1257 }),
1258 )
1259 .await;
1260 fs.insert_tree(
1261 path!("/the/stdlib"),
1262 json!({
1263 "LICENSE": "",
1264 "src": {
1265 "string.rs": "",
1266 }
1267 }),
1268 )
1269 .await;
1270
1271 let project = Project::test(fs.clone(), [path!("/the-root").as_ref()], cx).await;
1272 let (language_registry, lsp_store) = project.read_with(cx, |project, _| {
1273 (project.languages().clone(), project.lsp_store())
1274 });
1275 language_registry.add(rust_lang());
1276 let mut fake_servers = language_registry.register_fake_lsp(
1277 "Rust",
1278 FakeLspAdapter {
1279 name: "the-language-server",
1280 ..Default::default()
1281 },
1282 );
1283
1284 cx.executor().run_until_parked();
1285
1286 // Start the language server by opening a buffer with a compatible file extension.
1287 project
1288 .update(cx, |project, cx| {
1289 project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
1290 })
1291 .await
1292 .unwrap();
1293
1294 // Initially, we don't load ignored files because the language server has not explicitly asked us to watch them.
1295 project.update(cx, |project, cx| {
1296 let worktree = project.worktrees(cx).next().unwrap();
1297 assert_eq!(
1298 worktree
1299 .read(cx)
1300 .snapshot()
1301 .entries(true, 0)
1302 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1303 .collect::<Vec<_>>(),
1304 &[
1305 ("", false),
1306 (".gitignore", false),
1307 ("Cargo.lock", false),
1308 ("src", false),
1309 ("src/a.rs", false),
1310 ("src/b.rs", false),
1311 ("target", true),
1312 ]
1313 );
1314 });
1315
1316 let prev_read_dir_count = fs.read_dir_call_count();
1317
1318 let fake_server = fake_servers.next().await.unwrap();
1319 let server_id = lsp_store.read_with(cx, |lsp_store, _| {
1320 let (id, _) = lsp_store.language_server_statuses().next().unwrap();
1321 id
1322 });
1323
1324 // Simulate jumping to a definition in a dependency outside of the worktree.
1325 let _out_of_worktree_buffer = project
1326 .update(cx, |project, cx| {
1327 project.open_local_buffer_via_lsp(
1328 lsp::Uri::from_file_path(path!("/the-registry/dep1/src/dep1.rs")).unwrap(),
1329 server_id,
1330 cx,
1331 )
1332 })
1333 .await
1334 .unwrap();
1335
1336 // Keep track of the FS events reported to the language server.
1337 let file_changes = Arc::new(Mutex::new(Vec::new()));
1338 fake_server
1339 .request::<lsp::request::RegisterCapability>(lsp::RegistrationParams {
1340 registrations: vec![lsp::Registration {
1341 id: Default::default(),
1342 method: "workspace/didChangeWatchedFiles".to_string(),
1343 register_options: serde_json::to_value(
1344 lsp::DidChangeWatchedFilesRegistrationOptions {
1345 watchers: vec![
1346 lsp::FileSystemWatcher {
1347 glob_pattern: lsp::GlobPattern::String(
1348 path!("/the-root/Cargo.toml").to_string(),
1349 ),
1350 kind: None,
1351 },
1352 lsp::FileSystemWatcher {
1353 glob_pattern: lsp::GlobPattern::String(
1354 path!("/the-root/src/*.{rs,c}").to_string(),
1355 ),
1356 kind: None,
1357 },
1358 lsp::FileSystemWatcher {
1359 glob_pattern: lsp::GlobPattern::String(
1360 path!("/the-root/target/y/**/*.rs").to_string(),
1361 ),
1362 kind: None,
1363 },
1364 lsp::FileSystemWatcher {
1365 glob_pattern: lsp::GlobPattern::String(
1366 path!("/the/stdlib/src/**/*.rs").to_string(),
1367 ),
1368 kind: None,
1369 },
1370 lsp::FileSystemWatcher {
1371 glob_pattern: lsp::GlobPattern::String(
1372 path!("**/Cargo.lock").to_string(),
1373 ),
1374 kind: None,
1375 },
1376 ],
1377 },
1378 )
1379 .ok(),
1380 }],
1381 })
1382 .await
1383 .into_response()
1384 .unwrap();
1385 fake_server.handle_notification::<lsp::notification::DidChangeWatchedFiles, _>({
1386 let file_changes = file_changes.clone();
1387 move |params, _| {
1388 let mut file_changes = file_changes.lock();
1389 file_changes.extend(params.changes);
1390 file_changes.sort_by(|a, b| a.uri.cmp(&b.uri));
1391 }
1392 });
1393
1394 cx.executor().run_until_parked();
1395 assert_eq!(mem::take(&mut *file_changes.lock()), &[]);
1396 assert_eq!(fs.read_dir_call_count() - prev_read_dir_count, 4);
1397
1398 let mut new_watched_paths = fs.watched_paths();
1399 new_watched_paths.retain(|path| {
1400 !path.starts_with(config_dir()) && !path.starts_with(global_gitignore_path().unwrap())
1401 });
1402 assert_eq!(
1403 &new_watched_paths,
1404 &[
1405 Path::new(path!("/the-root")),
1406 Path::new(path!("/the-registry/dep1/src/dep1.rs")),
1407 Path::new(path!("/the/stdlib/src"))
1408 ]
1409 );
1410
1411 // Now the language server has asked us to watch an ignored directory path,
1412 // so we recursively load it.
1413 project.update(cx, |project, cx| {
1414 let worktree = project.visible_worktrees(cx).next().unwrap();
1415 assert_eq!(
1416 worktree
1417 .read(cx)
1418 .snapshot()
1419 .entries(true, 0)
1420 .map(|entry| (entry.path.as_unix_str(), entry.is_ignored))
1421 .collect::<Vec<_>>(),
1422 &[
1423 ("", false),
1424 (".gitignore", false),
1425 ("Cargo.lock", false),
1426 ("src", false),
1427 ("src/a.rs", false),
1428 ("src/b.rs", false),
1429 ("target", true),
1430 ("target/x", true),
1431 ("target/y", true),
1432 ("target/y/out", true),
1433 ("target/y/out/y.rs", true),
1434 ("target/z", true),
1435 ]
1436 );
1437 });
1438
1439 // Perform some file system mutations, two of which match the watched patterns,
1440 // and one of which does not.
1441 fs.create_file(path!("/the-root/src/c.rs").as_ref(), Default::default())
1442 .await
1443 .unwrap();
1444 fs.create_file(path!("/the-root/src/d.txt").as_ref(), Default::default())
1445 .await
1446 .unwrap();
1447 fs.remove_file(path!("/the-root/src/b.rs").as_ref(), Default::default())
1448 .await
1449 .unwrap();
1450 fs.create_file(
1451 path!("/the-root/target/x/out/x2.rs").as_ref(),
1452 Default::default(),
1453 )
1454 .await
1455 .unwrap();
1456 fs.create_file(
1457 path!("/the-root/target/y/out/y2.rs").as_ref(),
1458 Default::default(),
1459 )
1460 .await
1461 .unwrap();
1462 fs.save(
1463 path!("/the-root/Cargo.lock").as_ref(),
1464 &Rope::default(),
1465 Default::default(),
1466 )
1467 .await
1468 .unwrap();
1469 fs.save(
1470 path!("/the-stdlib/LICENSE").as_ref(),
1471 &Rope::default(),
1472 Default::default(),
1473 )
1474 .await
1475 .unwrap();
1476 fs.save(
1477 path!("/the/stdlib/src/string.rs").as_ref(),
1478 &Rope::default(),
1479 Default::default(),
1480 )
1481 .await
1482 .unwrap();
1483
1484 // The language server receives events for the FS mutations that match its watch patterns.
1485 cx.executor().run_until_parked();
1486 assert_eq!(
1487 &*file_changes.lock(),
1488 &[
1489 lsp::FileEvent {
1490 uri: lsp::Uri::from_file_path(path!("/the-root/Cargo.lock")).unwrap(),
1491 typ: lsp::FileChangeType::CHANGED,
1492 },
1493 lsp::FileEvent {
1494 uri: lsp::Uri::from_file_path(path!("/the-root/src/b.rs")).unwrap(),
1495 typ: lsp::FileChangeType::DELETED,
1496 },
1497 lsp::FileEvent {
1498 uri: lsp::Uri::from_file_path(path!("/the-root/src/c.rs")).unwrap(),
1499 typ: lsp::FileChangeType::CREATED,
1500 },
1501 lsp::FileEvent {
1502 uri: lsp::Uri::from_file_path(path!("/the-root/target/y/out/y2.rs")).unwrap(),
1503 typ: lsp::FileChangeType::CREATED,
1504 },
1505 lsp::FileEvent {
1506 uri: lsp::Uri::from_file_path(path!("/the/stdlib/src/string.rs")).unwrap(),
1507 typ: lsp::FileChangeType::CHANGED,
1508 },
1509 ]
1510 );
1511}
1512
1513#[gpui::test]
1514async fn test_single_file_worktrees_diagnostics(cx: &mut gpui::TestAppContext) {
1515 init_test(cx);
1516
1517 let fs = FakeFs::new(cx.executor());
1518 fs.insert_tree(
1519 path!("/dir"),
1520 json!({
1521 "a.rs": "let a = 1;",
1522 "b.rs": "let b = 2;"
1523 }),
1524 )
1525 .await;
1526
1527 let project = Project::test(
1528 fs,
1529 [path!("/dir/a.rs").as_ref(), path!("/dir/b.rs").as_ref()],
1530 cx,
1531 )
1532 .await;
1533 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1534
1535 let buffer_a = project
1536 .update(cx, |project, cx| {
1537 project.open_local_buffer(path!("/dir/a.rs"), cx)
1538 })
1539 .await
1540 .unwrap();
1541 let buffer_b = project
1542 .update(cx, |project, cx| {
1543 project.open_local_buffer(path!("/dir/b.rs"), cx)
1544 })
1545 .await
1546 .unwrap();
1547
1548 lsp_store.update(cx, |lsp_store, cx| {
1549 lsp_store
1550 .update_diagnostics(
1551 LanguageServerId(0),
1552 lsp::PublishDiagnosticsParams {
1553 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1554 version: None,
1555 diagnostics: vec![lsp::Diagnostic {
1556 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1557 severity: Some(lsp::DiagnosticSeverity::ERROR),
1558 message: "error 1".to_string(),
1559 ..Default::default()
1560 }],
1561 },
1562 None,
1563 DiagnosticSourceKind::Pushed,
1564 &[],
1565 cx,
1566 )
1567 .unwrap();
1568 lsp_store
1569 .update_diagnostics(
1570 LanguageServerId(0),
1571 lsp::PublishDiagnosticsParams {
1572 uri: Uri::from_file_path(path!("/dir/b.rs")).unwrap(),
1573 version: None,
1574 diagnostics: vec![lsp::Diagnostic {
1575 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1576 severity: Some(DiagnosticSeverity::WARNING),
1577 message: "error 2".to_string(),
1578 ..Default::default()
1579 }],
1580 },
1581 None,
1582 DiagnosticSourceKind::Pushed,
1583 &[],
1584 cx,
1585 )
1586 .unwrap();
1587 });
1588
1589 buffer_a.update(cx, |buffer, _| {
1590 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1591 assert_eq!(
1592 chunks
1593 .iter()
1594 .map(|(s, d)| (s.as_str(), *d))
1595 .collect::<Vec<_>>(),
1596 &[
1597 ("let ", None),
1598 ("a", Some(DiagnosticSeverity::ERROR)),
1599 (" = 1;", None),
1600 ]
1601 );
1602 });
1603 buffer_b.update(cx, |buffer, _| {
1604 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1605 assert_eq!(
1606 chunks
1607 .iter()
1608 .map(|(s, d)| (s.as_str(), *d))
1609 .collect::<Vec<_>>(),
1610 &[
1611 ("let ", None),
1612 ("b", Some(DiagnosticSeverity::WARNING)),
1613 (" = 2;", None),
1614 ]
1615 );
1616 });
1617}
1618
1619#[gpui::test]
1620async fn test_omitted_diagnostics(cx: &mut gpui::TestAppContext) {
1621 init_test(cx);
1622
1623 let fs = FakeFs::new(cx.executor());
1624 fs.insert_tree(
1625 path!("/root"),
1626 json!({
1627 "dir": {
1628 ".git": {
1629 "HEAD": "ref: refs/heads/main",
1630 },
1631 ".gitignore": "b.rs",
1632 "a.rs": "let a = 1;",
1633 "b.rs": "let b = 2;",
1634 },
1635 "other.rs": "let b = c;"
1636 }),
1637 )
1638 .await;
1639
1640 let project = Project::test(fs, [path!("/root/dir").as_ref()], cx).await;
1641 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
1642 let (worktree, _) = project
1643 .update(cx, |project, cx| {
1644 project.find_or_create_worktree(path!("/root/dir"), true, cx)
1645 })
1646 .await
1647 .unwrap();
1648 let main_worktree_id = worktree.read_with(cx, |tree, _| tree.id());
1649
1650 let (worktree, _) = project
1651 .update(cx, |project, cx| {
1652 project.find_or_create_worktree(path!("/root/other.rs"), false, cx)
1653 })
1654 .await
1655 .unwrap();
1656 let other_worktree_id = worktree.update(cx, |tree, _| tree.id());
1657
1658 let server_id = LanguageServerId(0);
1659 lsp_store.update(cx, |lsp_store, cx| {
1660 lsp_store
1661 .update_diagnostics(
1662 server_id,
1663 lsp::PublishDiagnosticsParams {
1664 uri: Uri::from_file_path(path!("/root/dir/b.rs")).unwrap(),
1665 version: None,
1666 diagnostics: vec![lsp::Diagnostic {
1667 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 5)),
1668 severity: Some(lsp::DiagnosticSeverity::ERROR),
1669 message: "unused variable 'b'".to_string(),
1670 ..Default::default()
1671 }],
1672 },
1673 None,
1674 DiagnosticSourceKind::Pushed,
1675 &[],
1676 cx,
1677 )
1678 .unwrap();
1679 lsp_store
1680 .update_diagnostics(
1681 server_id,
1682 lsp::PublishDiagnosticsParams {
1683 uri: Uri::from_file_path(path!("/root/other.rs")).unwrap(),
1684 version: None,
1685 diagnostics: vec![lsp::Diagnostic {
1686 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 9)),
1687 severity: Some(lsp::DiagnosticSeverity::ERROR),
1688 message: "unknown variable 'c'".to_string(),
1689 ..Default::default()
1690 }],
1691 },
1692 None,
1693 DiagnosticSourceKind::Pushed,
1694 &[],
1695 cx,
1696 )
1697 .unwrap();
1698 });
1699
1700 let main_ignored_buffer = project
1701 .update(cx, |project, cx| {
1702 project.open_buffer((main_worktree_id, rel_path("b.rs")), cx)
1703 })
1704 .await
1705 .unwrap();
1706 main_ignored_buffer.update(cx, |buffer, _| {
1707 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1708 assert_eq!(
1709 chunks
1710 .iter()
1711 .map(|(s, d)| (s.as_str(), *d))
1712 .collect::<Vec<_>>(),
1713 &[
1714 ("let ", None),
1715 ("b", Some(DiagnosticSeverity::ERROR)),
1716 (" = 2;", None),
1717 ],
1718 "Gigitnored buffers should still get in-buffer diagnostics",
1719 );
1720 });
1721 let other_buffer = project
1722 .update(cx, |project, cx| {
1723 project.open_buffer((other_worktree_id, rel_path("")), cx)
1724 })
1725 .await
1726 .unwrap();
1727 other_buffer.update(cx, |buffer, _| {
1728 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
1729 assert_eq!(
1730 chunks
1731 .iter()
1732 .map(|(s, d)| (s.as_str(), *d))
1733 .collect::<Vec<_>>(),
1734 &[
1735 ("let b = ", None),
1736 ("c", Some(DiagnosticSeverity::ERROR)),
1737 (";", None),
1738 ],
1739 "Buffers from hidden projects should still get in-buffer diagnostics"
1740 );
1741 });
1742
1743 project.update(cx, |project, cx| {
1744 assert_eq!(project.diagnostic_summaries(false, cx).next(), None);
1745 assert_eq!(
1746 project.diagnostic_summaries(true, cx).collect::<Vec<_>>(),
1747 vec![(
1748 ProjectPath {
1749 worktree_id: main_worktree_id,
1750 path: rel_path("b.rs").into(),
1751 },
1752 server_id,
1753 DiagnosticSummary {
1754 error_count: 1,
1755 warning_count: 0,
1756 }
1757 )]
1758 );
1759 assert_eq!(project.diagnostic_summary(false, cx).error_count, 0);
1760 assert_eq!(project.diagnostic_summary(true, cx).error_count, 1);
1761 });
1762}
1763
1764#[gpui::test]
1765async fn test_disk_based_diagnostics_progress(cx: &mut gpui::TestAppContext) {
1766 init_test(cx);
1767
1768 let progress_token = "the-progress-token";
1769
1770 let fs = FakeFs::new(cx.executor());
1771 fs.insert_tree(
1772 path!("/dir"),
1773 json!({
1774 "a.rs": "fn a() { A }",
1775 "b.rs": "const y: i32 = 1",
1776 }),
1777 )
1778 .await;
1779
1780 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1781 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1782
1783 language_registry.add(rust_lang());
1784 let mut fake_servers = language_registry.register_fake_lsp(
1785 "Rust",
1786 FakeLspAdapter {
1787 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1788 disk_based_diagnostics_sources: vec!["disk".into()],
1789 ..Default::default()
1790 },
1791 );
1792
1793 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1794
1795 // Cause worktree to start the fake language server
1796 let _ = project
1797 .update(cx, |project, cx| {
1798 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
1799 })
1800 .await
1801 .unwrap();
1802
1803 let mut events = cx.events(&project);
1804
1805 let fake_server = fake_servers.next().await.unwrap();
1806 assert_eq!(
1807 events.next().await.unwrap(),
1808 Event::LanguageServerAdded(
1809 LanguageServerId(0),
1810 fake_server.server.name(),
1811 Some(worktree_id)
1812 ),
1813 );
1814
1815 fake_server
1816 .start_progress(format!("{}/0", progress_token))
1817 .await;
1818 assert_eq!(
1819 events.next().await.unwrap(),
1820 Event::RefreshInlayHints(fake_server.server.server_id())
1821 );
1822 assert_eq!(
1823 events.next().await.unwrap(),
1824 Event::DiskBasedDiagnosticsStarted {
1825 language_server_id: LanguageServerId(0),
1826 }
1827 );
1828
1829 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1830 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1831 version: None,
1832 diagnostics: vec![lsp::Diagnostic {
1833 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
1834 severity: Some(lsp::DiagnosticSeverity::ERROR),
1835 message: "undefined variable 'A'".to_string(),
1836 ..Default::default()
1837 }],
1838 });
1839 assert_eq!(
1840 events.next().await.unwrap(),
1841 Event::DiagnosticsUpdated {
1842 language_server_id: LanguageServerId(0),
1843 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1844 }
1845 );
1846
1847 fake_server.end_progress(format!("{}/0", progress_token));
1848 assert_eq!(
1849 events.next().await.unwrap(),
1850 Event::DiskBasedDiagnosticsFinished {
1851 language_server_id: LanguageServerId(0)
1852 }
1853 );
1854
1855 let buffer = project
1856 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
1857 .await
1858 .unwrap();
1859
1860 buffer.update(cx, |buffer, _| {
1861 let snapshot = buffer.snapshot();
1862 let diagnostics = snapshot
1863 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
1864 .collect::<Vec<_>>();
1865 assert_eq!(
1866 diagnostics,
1867 &[DiagnosticEntryRef {
1868 range: Point::new(0, 9)..Point::new(0, 10),
1869 diagnostic: &Diagnostic {
1870 severity: lsp::DiagnosticSeverity::ERROR,
1871 message: "undefined variable 'A'".to_string(),
1872 group_id: 0,
1873 is_primary: true,
1874 source_kind: DiagnosticSourceKind::Pushed,
1875 ..Diagnostic::default()
1876 }
1877 }]
1878 )
1879 });
1880
1881 // Ensure publishing empty diagnostics twice only results in one update event.
1882 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1883 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1884 version: None,
1885 diagnostics: Default::default(),
1886 });
1887 assert_eq!(
1888 events.next().await.unwrap(),
1889 Event::DiagnosticsUpdated {
1890 language_server_id: LanguageServerId(0),
1891 paths: vec![(worktree_id, rel_path("a.rs")).into()],
1892 }
1893 );
1894
1895 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
1896 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
1897 version: None,
1898 diagnostics: Default::default(),
1899 });
1900 cx.executor().run_until_parked();
1901 assert_eq!(futures::poll!(events.next()), Poll::Pending);
1902}
1903
1904#[gpui::test]
1905async fn test_restarting_server_with_diagnostics_running(cx: &mut gpui::TestAppContext) {
1906 init_test(cx);
1907
1908 let progress_token = "the-progress-token";
1909
1910 let fs = FakeFs::new(cx.executor());
1911 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
1912
1913 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
1914
1915 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
1916 language_registry.add(rust_lang());
1917 let mut fake_servers = language_registry.register_fake_lsp(
1918 "Rust",
1919 FakeLspAdapter {
1920 name: "the-language-server",
1921 disk_based_diagnostics_sources: vec!["disk".into()],
1922 disk_based_diagnostics_progress_token: Some(progress_token.into()),
1923 ..FakeLspAdapter::default()
1924 },
1925 );
1926
1927 let worktree_id = project.update(cx, |p, cx| p.worktrees(cx).next().unwrap().read(cx).id());
1928
1929 let (buffer, _handle) = project
1930 .update(cx, |project, cx| {
1931 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
1932 })
1933 .await
1934 .unwrap();
1935 let buffer_id = buffer.read_with(cx, |buffer, _| buffer.remote_id());
1936 // Simulate diagnostics starting to update.
1937 let fake_server = fake_servers.next().await.unwrap();
1938 fake_server.start_progress(progress_token).await;
1939
1940 // Restart the server before the diagnostics finish updating.
1941 project.update(cx, |project, cx| {
1942 project.restart_language_servers_for_buffers(vec![buffer], HashSet::default(), cx);
1943 });
1944 let mut events = cx.events(&project);
1945
1946 // Simulate the newly started server sending more diagnostics.
1947 let fake_server = fake_servers.next().await.unwrap();
1948 assert_eq!(
1949 events.next().await.unwrap(),
1950 Event::LanguageServerRemoved(LanguageServerId(0))
1951 );
1952 assert_eq!(
1953 events.next().await.unwrap(),
1954 Event::LanguageServerAdded(
1955 LanguageServerId(1),
1956 fake_server.server.name(),
1957 Some(worktree_id)
1958 )
1959 );
1960 assert_eq!(
1961 events.next().await.unwrap(),
1962 Event::RefreshInlayHints(fake_server.server.server_id())
1963 );
1964 fake_server.start_progress(progress_token).await;
1965 assert_eq!(
1966 events.next().await.unwrap(),
1967 Event::LanguageServerBufferRegistered {
1968 server_id: LanguageServerId(1),
1969 buffer_id,
1970 buffer_abs_path: PathBuf::from(path!("/dir/a.rs")),
1971 name: Some(fake_server.server.name())
1972 }
1973 );
1974 assert_eq!(
1975 events.next().await.unwrap(),
1976 Event::DiskBasedDiagnosticsStarted {
1977 language_server_id: LanguageServerId(1)
1978 }
1979 );
1980 project.update(cx, |project, cx| {
1981 assert_eq!(
1982 project
1983 .language_servers_running_disk_based_diagnostics(cx)
1984 .collect::<Vec<_>>(),
1985 [LanguageServerId(1)]
1986 );
1987 });
1988
1989 // All diagnostics are considered done, despite the old server's diagnostic
1990 // task never completing.
1991 fake_server.end_progress(progress_token);
1992 assert_eq!(
1993 events.next().await.unwrap(),
1994 Event::DiskBasedDiagnosticsFinished {
1995 language_server_id: LanguageServerId(1)
1996 }
1997 );
1998 project.update(cx, |project, cx| {
1999 assert_eq!(
2000 project
2001 .language_servers_running_disk_based_diagnostics(cx)
2002 .collect::<Vec<_>>(),
2003 [] as [language::LanguageServerId; 0]
2004 );
2005 });
2006}
2007
2008#[gpui::test]
2009async fn test_restarting_server_with_diagnostics_published(cx: &mut gpui::TestAppContext) {
2010 init_test(cx);
2011
2012 let fs = FakeFs::new(cx.executor());
2013 fs.insert_tree(path!("/dir"), json!({ "a.rs": "x" })).await;
2014
2015 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2016
2017 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2018 language_registry.add(rust_lang());
2019 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2020
2021 let (buffer, _) = project
2022 .update(cx, |project, cx| {
2023 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2024 })
2025 .await
2026 .unwrap();
2027
2028 // Publish diagnostics
2029 let fake_server = fake_servers.next().await.unwrap();
2030 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2031 uri: Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2032 version: None,
2033 diagnostics: vec![lsp::Diagnostic {
2034 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
2035 severity: Some(lsp::DiagnosticSeverity::ERROR),
2036 message: "the message".to_string(),
2037 ..Default::default()
2038 }],
2039 });
2040
2041 cx.executor().run_until_parked();
2042 buffer.update(cx, |buffer, _| {
2043 assert_eq!(
2044 buffer
2045 .snapshot()
2046 .diagnostics_in_range::<_, usize>(0..1, false)
2047 .map(|entry| entry.diagnostic.message.clone())
2048 .collect::<Vec<_>>(),
2049 ["the message".to_string()]
2050 );
2051 });
2052 project.update(cx, |project, cx| {
2053 assert_eq!(
2054 project.diagnostic_summary(false, cx),
2055 DiagnosticSummary {
2056 error_count: 1,
2057 warning_count: 0,
2058 }
2059 );
2060 });
2061
2062 project.update(cx, |project, cx| {
2063 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2064 });
2065
2066 // The diagnostics are cleared.
2067 cx.executor().run_until_parked();
2068 buffer.update(cx, |buffer, _| {
2069 assert_eq!(
2070 buffer
2071 .snapshot()
2072 .diagnostics_in_range::<_, usize>(0..1, false)
2073 .map(|entry| entry.diagnostic.message.clone())
2074 .collect::<Vec<_>>(),
2075 Vec::<String>::new(),
2076 );
2077 });
2078 project.update(cx, |project, cx| {
2079 assert_eq!(
2080 project.diagnostic_summary(false, cx),
2081 DiagnosticSummary {
2082 error_count: 0,
2083 warning_count: 0,
2084 }
2085 );
2086 });
2087}
2088
2089#[gpui::test]
2090async fn test_restarted_server_reporting_invalid_buffer_version(cx: &mut gpui::TestAppContext) {
2091 init_test(cx);
2092
2093 let fs = FakeFs::new(cx.executor());
2094 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2095
2096 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2097 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2098
2099 language_registry.add(rust_lang());
2100 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2101
2102 let (buffer, _handle) = project
2103 .update(cx, |project, cx| {
2104 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2105 })
2106 .await
2107 .unwrap();
2108
2109 // Before restarting the server, report diagnostics with an unknown buffer version.
2110 let fake_server = fake_servers.next().await.unwrap();
2111 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2112 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2113 version: Some(10000),
2114 diagnostics: Vec::new(),
2115 });
2116 cx.executor().run_until_parked();
2117 project.update(cx, |project, cx| {
2118 project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx);
2119 });
2120
2121 let mut fake_server = fake_servers.next().await.unwrap();
2122 let notification = fake_server
2123 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2124 .await
2125 .text_document;
2126 assert_eq!(notification.version, 0);
2127}
2128
2129#[gpui::test]
2130async fn test_cancel_language_server_work(cx: &mut gpui::TestAppContext) {
2131 init_test(cx);
2132
2133 let progress_token = "the-progress-token";
2134
2135 let fs = FakeFs::new(cx.executor());
2136 fs.insert_tree(path!("/dir"), json!({ "a.rs": "" })).await;
2137
2138 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2139
2140 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2141 language_registry.add(rust_lang());
2142 let mut fake_servers = language_registry.register_fake_lsp(
2143 "Rust",
2144 FakeLspAdapter {
2145 name: "the-language-server",
2146 disk_based_diagnostics_sources: vec!["disk".into()],
2147 disk_based_diagnostics_progress_token: Some(progress_token.into()),
2148 ..Default::default()
2149 },
2150 );
2151
2152 let (buffer, _handle) = project
2153 .update(cx, |project, cx| {
2154 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2155 })
2156 .await
2157 .unwrap();
2158
2159 // Simulate diagnostics starting to update.
2160 let mut fake_server = fake_servers.next().await.unwrap();
2161 fake_server
2162 .start_progress_with(
2163 "another-token",
2164 lsp::WorkDoneProgressBegin {
2165 cancellable: Some(false),
2166 ..Default::default()
2167 },
2168 )
2169 .await;
2170 fake_server
2171 .start_progress_with(
2172 progress_token,
2173 lsp::WorkDoneProgressBegin {
2174 cancellable: Some(true),
2175 ..Default::default()
2176 },
2177 )
2178 .await;
2179 cx.executor().run_until_parked();
2180
2181 project.update(cx, |project, cx| {
2182 project.cancel_language_server_work_for_buffers([buffer.clone()], cx)
2183 });
2184
2185 let cancel_notification = fake_server
2186 .receive_notification::<lsp::notification::WorkDoneProgressCancel>()
2187 .await;
2188 assert_eq!(
2189 cancel_notification.token,
2190 NumberOrString::String(progress_token.into())
2191 );
2192}
2193
2194#[gpui::test]
2195async fn test_toggling_enable_language_server(cx: &mut gpui::TestAppContext) {
2196 init_test(cx);
2197
2198 let fs = FakeFs::new(cx.executor());
2199 fs.insert_tree(path!("/dir"), json!({ "a.rs": "", "b.js": "" }))
2200 .await;
2201
2202 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2203 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2204
2205 let mut fake_rust_servers = language_registry.register_fake_lsp(
2206 "Rust",
2207 FakeLspAdapter {
2208 name: "rust-lsp",
2209 ..Default::default()
2210 },
2211 );
2212 let mut fake_js_servers = language_registry.register_fake_lsp(
2213 "JavaScript",
2214 FakeLspAdapter {
2215 name: "js-lsp",
2216 ..Default::default()
2217 },
2218 );
2219 language_registry.add(rust_lang());
2220 language_registry.add(js_lang());
2221
2222 let _rs_buffer = project
2223 .update(cx, |project, cx| {
2224 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2225 })
2226 .await
2227 .unwrap();
2228 let _js_buffer = project
2229 .update(cx, |project, cx| {
2230 project.open_local_buffer_with_lsp(path!("/dir/b.js"), cx)
2231 })
2232 .await
2233 .unwrap();
2234
2235 let mut fake_rust_server_1 = fake_rust_servers.next().await.unwrap();
2236 assert_eq!(
2237 fake_rust_server_1
2238 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2239 .await
2240 .text_document
2241 .uri
2242 .as_str(),
2243 uri!("file:///dir/a.rs")
2244 );
2245
2246 let mut fake_js_server = fake_js_servers.next().await.unwrap();
2247 assert_eq!(
2248 fake_js_server
2249 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2250 .await
2251 .text_document
2252 .uri
2253 .as_str(),
2254 uri!("file:///dir/b.js")
2255 );
2256
2257 // Disable Rust language server, ensuring only that server gets stopped.
2258 cx.update(|cx| {
2259 SettingsStore::update_global(cx, |settings, cx| {
2260 settings.update_user_settings(cx, |settings| {
2261 settings.languages_mut().insert(
2262 "Rust".into(),
2263 LanguageSettingsContent {
2264 enable_language_server: Some(false),
2265 ..Default::default()
2266 },
2267 );
2268 });
2269 })
2270 });
2271 fake_rust_server_1
2272 .receive_notification::<lsp::notification::Exit>()
2273 .await;
2274
2275 // Enable Rust and disable JavaScript language servers, ensuring that the
2276 // former gets started again and that the latter stops.
2277 cx.update(|cx| {
2278 SettingsStore::update_global(cx, |settings, cx| {
2279 settings.update_user_settings(cx, |settings| {
2280 settings.languages_mut().insert(
2281 "Rust".into(),
2282 LanguageSettingsContent {
2283 enable_language_server: Some(true),
2284 ..Default::default()
2285 },
2286 );
2287 settings.languages_mut().insert(
2288 "JavaScript".into(),
2289 LanguageSettingsContent {
2290 enable_language_server: Some(false),
2291 ..Default::default()
2292 },
2293 );
2294 });
2295 })
2296 });
2297 let mut fake_rust_server_2 = fake_rust_servers.next().await.unwrap();
2298 assert_eq!(
2299 fake_rust_server_2
2300 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2301 .await
2302 .text_document
2303 .uri
2304 .as_str(),
2305 uri!("file:///dir/a.rs")
2306 );
2307 fake_js_server
2308 .receive_notification::<lsp::notification::Exit>()
2309 .await;
2310}
2311
2312#[gpui::test(iterations = 3)]
2313async fn test_transforming_diagnostics(cx: &mut gpui::TestAppContext) {
2314 init_test(cx);
2315
2316 let text = "
2317 fn a() { A }
2318 fn b() { BB }
2319 fn c() { CCC }
2320 "
2321 .unindent();
2322
2323 let fs = FakeFs::new(cx.executor());
2324 fs.insert_tree(path!("/dir"), json!({ "a.rs": text })).await;
2325
2326 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2327 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2328
2329 language_registry.add(rust_lang());
2330 let mut fake_servers = language_registry.register_fake_lsp(
2331 "Rust",
2332 FakeLspAdapter {
2333 disk_based_diagnostics_sources: vec!["disk".into()],
2334 ..Default::default()
2335 },
2336 );
2337
2338 let buffer = project
2339 .update(cx, |project, cx| {
2340 project.open_local_buffer(path!("/dir/a.rs"), cx)
2341 })
2342 .await
2343 .unwrap();
2344
2345 let _handle = project.update(cx, |project, cx| {
2346 project.register_buffer_with_language_servers(&buffer, cx)
2347 });
2348
2349 let mut fake_server = fake_servers.next().await.unwrap();
2350 let open_notification = fake_server
2351 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2352 .await;
2353
2354 // Edit the buffer, moving the content down
2355 buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "\n\n")], None, cx));
2356 let change_notification_1 = fake_server
2357 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2358 .await;
2359 assert!(change_notification_1.text_document.version > open_notification.text_document.version);
2360
2361 // Report some diagnostics for the initial version of the buffer
2362 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2363 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2364 version: Some(open_notification.text_document.version),
2365 diagnostics: vec![
2366 lsp::Diagnostic {
2367 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2368 severity: Some(DiagnosticSeverity::ERROR),
2369 message: "undefined variable 'A'".to_string(),
2370 source: Some("disk".to_string()),
2371 ..Default::default()
2372 },
2373 lsp::Diagnostic {
2374 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2375 severity: Some(DiagnosticSeverity::ERROR),
2376 message: "undefined variable 'BB'".to_string(),
2377 source: Some("disk".to_string()),
2378 ..Default::default()
2379 },
2380 lsp::Diagnostic {
2381 range: lsp::Range::new(lsp::Position::new(2, 9), lsp::Position::new(2, 12)),
2382 severity: Some(DiagnosticSeverity::ERROR),
2383 source: Some("disk".to_string()),
2384 message: "undefined variable 'CCC'".to_string(),
2385 ..Default::default()
2386 },
2387 ],
2388 });
2389
2390 // The diagnostics have moved down since they were created.
2391 cx.executor().run_until_parked();
2392 buffer.update(cx, |buffer, _| {
2393 assert_eq!(
2394 buffer
2395 .snapshot()
2396 .diagnostics_in_range::<_, Point>(Point::new(3, 0)..Point::new(5, 0), false)
2397 .collect::<Vec<_>>(),
2398 &[
2399 DiagnosticEntry {
2400 range: Point::new(3, 9)..Point::new(3, 11),
2401 diagnostic: Diagnostic {
2402 source: Some("disk".into()),
2403 severity: DiagnosticSeverity::ERROR,
2404 message: "undefined variable 'BB'".to_string(),
2405 is_disk_based: true,
2406 group_id: 1,
2407 is_primary: true,
2408 source_kind: DiagnosticSourceKind::Pushed,
2409 ..Diagnostic::default()
2410 },
2411 },
2412 DiagnosticEntry {
2413 range: Point::new(4, 9)..Point::new(4, 12),
2414 diagnostic: Diagnostic {
2415 source: Some("disk".into()),
2416 severity: DiagnosticSeverity::ERROR,
2417 message: "undefined variable 'CCC'".to_string(),
2418 is_disk_based: true,
2419 group_id: 2,
2420 is_primary: true,
2421 source_kind: DiagnosticSourceKind::Pushed,
2422 ..Diagnostic::default()
2423 }
2424 }
2425 ]
2426 );
2427 assert_eq!(
2428 chunks_with_diagnostics(buffer, 0..buffer.len()),
2429 [
2430 ("\n\nfn a() { ".to_string(), None),
2431 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2432 (" }\nfn b() { ".to_string(), None),
2433 ("BB".to_string(), Some(DiagnosticSeverity::ERROR)),
2434 (" }\nfn c() { ".to_string(), None),
2435 ("CCC".to_string(), Some(DiagnosticSeverity::ERROR)),
2436 (" }\n".to_string(), None),
2437 ]
2438 );
2439 assert_eq!(
2440 chunks_with_diagnostics(buffer, Point::new(3, 10)..Point::new(4, 11)),
2441 [
2442 ("B".to_string(), Some(DiagnosticSeverity::ERROR)),
2443 (" }\nfn c() { ".to_string(), None),
2444 ("CC".to_string(), Some(DiagnosticSeverity::ERROR)),
2445 ]
2446 );
2447 });
2448
2449 // Ensure overlapping diagnostics are highlighted correctly.
2450 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2451 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2452 version: Some(open_notification.text_document.version),
2453 diagnostics: vec![
2454 lsp::Diagnostic {
2455 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2456 severity: Some(DiagnosticSeverity::ERROR),
2457 message: "undefined variable 'A'".to_string(),
2458 source: Some("disk".to_string()),
2459 ..Default::default()
2460 },
2461 lsp::Diagnostic {
2462 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 12)),
2463 severity: Some(DiagnosticSeverity::WARNING),
2464 message: "unreachable statement".to_string(),
2465 source: Some("disk".to_string()),
2466 ..Default::default()
2467 },
2468 ],
2469 });
2470
2471 cx.executor().run_until_parked();
2472 buffer.update(cx, |buffer, _| {
2473 assert_eq!(
2474 buffer
2475 .snapshot()
2476 .diagnostics_in_range::<_, Point>(Point::new(2, 0)..Point::new(3, 0), false)
2477 .collect::<Vec<_>>(),
2478 &[
2479 DiagnosticEntry {
2480 range: Point::new(2, 9)..Point::new(2, 12),
2481 diagnostic: Diagnostic {
2482 source: Some("disk".into()),
2483 severity: DiagnosticSeverity::WARNING,
2484 message: "unreachable statement".to_string(),
2485 is_disk_based: true,
2486 group_id: 4,
2487 is_primary: true,
2488 source_kind: DiagnosticSourceKind::Pushed,
2489 ..Diagnostic::default()
2490 }
2491 },
2492 DiagnosticEntry {
2493 range: Point::new(2, 9)..Point::new(2, 10),
2494 diagnostic: Diagnostic {
2495 source: Some("disk".into()),
2496 severity: DiagnosticSeverity::ERROR,
2497 message: "undefined variable 'A'".to_string(),
2498 is_disk_based: true,
2499 group_id: 3,
2500 is_primary: true,
2501 source_kind: DiagnosticSourceKind::Pushed,
2502 ..Diagnostic::default()
2503 },
2504 }
2505 ]
2506 );
2507 assert_eq!(
2508 chunks_with_diagnostics(buffer, Point::new(2, 0)..Point::new(3, 0)),
2509 [
2510 ("fn a() { ".to_string(), None),
2511 ("A".to_string(), Some(DiagnosticSeverity::ERROR)),
2512 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2513 ("\n".to_string(), None),
2514 ]
2515 );
2516 assert_eq!(
2517 chunks_with_diagnostics(buffer, Point::new(2, 10)..Point::new(3, 0)),
2518 [
2519 (" }".to_string(), Some(DiagnosticSeverity::WARNING)),
2520 ("\n".to_string(), None),
2521 ]
2522 );
2523 });
2524
2525 // Keep editing the buffer and ensure disk-based diagnostics get translated according to the
2526 // changes since the last save.
2527 buffer.update(cx, |buffer, cx| {
2528 buffer.edit([(Point::new(2, 0)..Point::new(2, 0), " ")], None, cx);
2529 buffer.edit(
2530 [(Point::new(2, 8)..Point::new(2, 10), "(x: usize)")],
2531 None,
2532 cx,
2533 );
2534 buffer.edit([(Point::new(3, 10)..Point::new(3, 10), "xxx")], None, cx);
2535 });
2536 let change_notification_2 = fake_server
2537 .receive_notification::<lsp::notification::DidChangeTextDocument>()
2538 .await;
2539 assert!(
2540 change_notification_2.text_document.version > change_notification_1.text_document.version
2541 );
2542
2543 // Handle out-of-order diagnostics
2544 fake_server.notify::<lsp::notification::PublishDiagnostics>(lsp::PublishDiagnosticsParams {
2545 uri: lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
2546 version: Some(change_notification_2.text_document.version),
2547 diagnostics: vec![
2548 lsp::Diagnostic {
2549 range: lsp::Range::new(lsp::Position::new(1, 9), lsp::Position::new(1, 11)),
2550 severity: Some(DiagnosticSeverity::ERROR),
2551 message: "undefined variable 'BB'".to_string(),
2552 source: Some("disk".to_string()),
2553 ..Default::default()
2554 },
2555 lsp::Diagnostic {
2556 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
2557 severity: Some(DiagnosticSeverity::WARNING),
2558 message: "undefined variable 'A'".to_string(),
2559 source: Some("disk".to_string()),
2560 ..Default::default()
2561 },
2562 ],
2563 });
2564
2565 cx.executor().run_until_parked();
2566 buffer.update(cx, |buffer, _| {
2567 assert_eq!(
2568 buffer
2569 .snapshot()
2570 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
2571 .collect::<Vec<_>>(),
2572 &[
2573 DiagnosticEntry {
2574 range: Point::new(2, 21)..Point::new(2, 22),
2575 diagnostic: Diagnostic {
2576 source: Some("disk".into()),
2577 severity: DiagnosticSeverity::WARNING,
2578 message: "undefined variable 'A'".to_string(),
2579 is_disk_based: true,
2580 group_id: 6,
2581 is_primary: true,
2582 source_kind: DiagnosticSourceKind::Pushed,
2583 ..Diagnostic::default()
2584 }
2585 },
2586 DiagnosticEntry {
2587 range: Point::new(3, 9)..Point::new(3, 14),
2588 diagnostic: Diagnostic {
2589 source: Some("disk".into()),
2590 severity: DiagnosticSeverity::ERROR,
2591 message: "undefined variable 'BB'".to_string(),
2592 is_disk_based: true,
2593 group_id: 5,
2594 is_primary: true,
2595 source_kind: DiagnosticSourceKind::Pushed,
2596 ..Diagnostic::default()
2597 },
2598 }
2599 ]
2600 );
2601 });
2602}
2603
2604#[gpui::test]
2605async fn test_empty_diagnostic_ranges(cx: &mut gpui::TestAppContext) {
2606 init_test(cx);
2607
2608 let text = concat!(
2609 "let one = ;\n", //
2610 "let two = \n",
2611 "let three = 3;\n",
2612 );
2613
2614 let fs = FakeFs::new(cx.executor());
2615 fs.insert_tree("/dir", json!({ "a.rs": text })).await;
2616
2617 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2618 let buffer = project
2619 .update(cx, |project, cx| project.open_local_buffer("/dir/a.rs", cx))
2620 .await
2621 .unwrap();
2622
2623 project.update(cx, |project, cx| {
2624 project.lsp_store.update(cx, |lsp_store, cx| {
2625 lsp_store
2626 .update_diagnostic_entries(
2627 LanguageServerId(0),
2628 PathBuf::from("/dir/a.rs"),
2629 None,
2630 None,
2631 vec![
2632 DiagnosticEntry {
2633 range: Unclipped(PointUtf16::new(0, 10))
2634 ..Unclipped(PointUtf16::new(0, 10)),
2635 diagnostic: Diagnostic {
2636 severity: DiagnosticSeverity::ERROR,
2637 message: "syntax error 1".to_string(),
2638 source_kind: DiagnosticSourceKind::Pushed,
2639 ..Diagnostic::default()
2640 },
2641 },
2642 DiagnosticEntry {
2643 range: Unclipped(PointUtf16::new(1, 10))
2644 ..Unclipped(PointUtf16::new(1, 10)),
2645 diagnostic: Diagnostic {
2646 severity: DiagnosticSeverity::ERROR,
2647 message: "syntax error 2".to_string(),
2648 source_kind: DiagnosticSourceKind::Pushed,
2649 ..Diagnostic::default()
2650 },
2651 },
2652 ],
2653 cx,
2654 )
2655 .unwrap();
2656 })
2657 });
2658
2659 // An empty range is extended forward to include the following character.
2660 // At the end of a line, an empty range is extended backward to include
2661 // the preceding character.
2662 buffer.update(cx, |buffer, _| {
2663 let chunks = chunks_with_diagnostics(buffer, 0..buffer.len());
2664 assert_eq!(
2665 chunks
2666 .iter()
2667 .map(|(s, d)| (s.as_str(), *d))
2668 .collect::<Vec<_>>(),
2669 &[
2670 ("let one = ", None),
2671 (";", Some(DiagnosticSeverity::ERROR)),
2672 ("\nlet two =", None),
2673 (" ", Some(DiagnosticSeverity::ERROR)),
2674 ("\nlet three = 3;\n", None)
2675 ]
2676 );
2677 });
2678}
2679
2680#[gpui::test]
2681async fn test_diagnostics_from_multiple_language_servers(cx: &mut gpui::TestAppContext) {
2682 init_test(cx);
2683
2684 let fs = FakeFs::new(cx.executor());
2685 fs.insert_tree("/dir", json!({ "a.rs": "one two three" }))
2686 .await;
2687
2688 let project = Project::test(fs, ["/dir".as_ref()], cx).await;
2689 let lsp_store = project.read_with(cx, |project, _| project.lsp_store.clone());
2690
2691 lsp_store.update(cx, |lsp_store, cx| {
2692 lsp_store
2693 .update_diagnostic_entries(
2694 LanguageServerId(0),
2695 Path::new("/dir/a.rs").to_owned(),
2696 None,
2697 None,
2698 vec![DiagnosticEntry {
2699 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2700 diagnostic: Diagnostic {
2701 severity: DiagnosticSeverity::ERROR,
2702 is_primary: true,
2703 message: "syntax error a1".to_string(),
2704 source_kind: DiagnosticSourceKind::Pushed,
2705 ..Diagnostic::default()
2706 },
2707 }],
2708 cx,
2709 )
2710 .unwrap();
2711 lsp_store
2712 .update_diagnostic_entries(
2713 LanguageServerId(1),
2714 Path::new("/dir/a.rs").to_owned(),
2715 None,
2716 None,
2717 vec![DiagnosticEntry {
2718 range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 3)),
2719 diagnostic: Diagnostic {
2720 severity: DiagnosticSeverity::ERROR,
2721 is_primary: true,
2722 message: "syntax error b1".to_string(),
2723 source_kind: DiagnosticSourceKind::Pushed,
2724 ..Diagnostic::default()
2725 },
2726 }],
2727 cx,
2728 )
2729 .unwrap();
2730
2731 assert_eq!(
2732 lsp_store.diagnostic_summary(false, cx),
2733 DiagnosticSummary {
2734 error_count: 2,
2735 warning_count: 0,
2736 }
2737 );
2738 });
2739}
2740
2741#[gpui::test]
2742async fn test_edits_from_lsp2_with_past_version(cx: &mut gpui::TestAppContext) {
2743 init_test(cx);
2744
2745 let text = "
2746 fn a() {
2747 f1();
2748 }
2749 fn b() {
2750 f2();
2751 }
2752 fn c() {
2753 f3();
2754 }
2755 "
2756 .unindent();
2757
2758 let fs = FakeFs::new(cx.executor());
2759 fs.insert_tree(
2760 path!("/dir"),
2761 json!({
2762 "a.rs": text.clone(),
2763 }),
2764 )
2765 .await;
2766
2767 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2768 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2769
2770 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
2771 language_registry.add(rust_lang());
2772 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
2773
2774 let (buffer, _handle) = project
2775 .update(cx, |project, cx| {
2776 project.open_local_buffer_with_lsp(path!("/dir/a.rs"), cx)
2777 })
2778 .await
2779 .unwrap();
2780
2781 let mut fake_server = fake_servers.next().await.unwrap();
2782 let lsp_document_version = fake_server
2783 .receive_notification::<lsp::notification::DidOpenTextDocument>()
2784 .await
2785 .text_document
2786 .version;
2787
2788 // Simulate editing the buffer after the language server computes some edits.
2789 buffer.update(cx, |buffer, cx| {
2790 buffer.edit(
2791 [(
2792 Point::new(0, 0)..Point::new(0, 0),
2793 "// above first function\n",
2794 )],
2795 None,
2796 cx,
2797 );
2798 buffer.edit(
2799 [(
2800 Point::new(2, 0)..Point::new(2, 0),
2801 " // inside first function\n",
2802 )],
2803 None,
2804 cx,
2805 );
2806 buffer.edit(
2807 [(
2808 Point::new(6, 4)..Point::new(6, 4),
2809 "// inside second function ",
2810 )],
2811 None,
2812 cx,
2813 );
2814
2815 assert_eq!(
2816 buffer.text(),
2817 "
2818 // above first function
2819 fn a() {
2820 // inside first function
2821 f1();
2822 }
2823 fn b() {
2824 // inside second function f2();
2825 }
2826 fn c() {
2827 f3();
2828 }
2829 "
2830 .unindent()
2831 );
2832 });
2833
2834 let edits = lsp_store
2835 .update(cx, |lsp_store, cx| {
2836 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2837 &buffer,
2838 vec![
2839 // replace body of first function
2840 lsp::TextEdit {
2841 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(3, 0)),
2842 new_text: "
2843 fn a() {
2844 f10();
2845 }
2846 "
2847 .unindent(),
2848 },
2849 // edit inside second function
2850 lsp::TextEdit {
2851 range: lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 6)),
2852 new_text: "00".into(),
2853 },
2854 // edit inside third function via two distinct edits
2855 lsp::TextEdit {
2856 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 5)),
2857 new_text: "4000".into(),
2858 },
2859 lsp::TextEdit {
2860 range: lsp::Range::new(lsp::Position::new(7, 5), lsp::Position::new(7, 6)),
2861 new_text: "".into(),
2862 },
2863 ],
2864 LanguageServerId(0),
2865 Some(lsp_document_version),
2866 cx,
2867 )
2868 })
2869 .await
2870 .unwrap();
2871
2872 buffer.update(cx, |buffer, cx| {
2873 for (range, new_text) in edits {
2874 buffer.edit([(range, new_text)], None, cx);
2875 }
2876 assert_eq!(
2877 buffer.text(),
2878 "
2879 // above first function
2880 fn a() {
2881 // inside first function
2882 f10();
2883 }
2884 fn b() {
2885 // inside second function f200();
2886 }
2887 fn c() {
2888 f4000();
2889 }
2890 "
2891 .unindent()
2892 );
2893 });
2894}
2895
2896#[gpui::test]
2897async fn test_edits_from_lsp2_with_edits_on_adjacent_lines(cx: &mut gpui::TestAppContext) {
2898 init_test(cx);
2899
2900 let text = "
2901 use a::b;
2902 use a::c;
2903
2904 fn f() {
2905 b();
2906 c();
2907 }
2908 "
2909 .unindent();
2910
2911 let fs = FakeFs::new(cx.executor());
2912 fs.insert_tree(
2913 path!("/dir"),
2914 json!({
2915 "a.rs": text.clone(),
2916 }),
2917 )
2918 .await;
2919
2920 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
2921 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
2922 let buffer = project
2923 .update(cx, |project, cx| {
2924 project.open_local_buffer(path!("/dir/a.rs"), cx)
2925 })
2926 .await
2927 .unwrap();
2928
2929 // Simulate the language server sending us a small edit in the form of a very large diff.
2930 // Rust-analyzer does this when performing a merge-imports code action.
2931 let edits = lsp_store
2932 .update(cx, |lsp_store, cx| {
2933 lsp_store.as_local_mut().unwrap().edits_from_lsp(
2934 &buffer,
2935 [
2936 // Replace the first use statement without editing the semicolon.
2937 lsp::TextEdit {
2938 range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 8)),
2939 new_text: "a::{b, c}".into(),
2940 },
2941 // Reinsert the remainder of the file between the semicolon and the final
2942 // newline of the file.
2943 lsp::TextEdit {
2944 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2945 new_text: "\n\n".into(),
2946 },
2947 lsp::TextEdit {
2948 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
2949 new_text: "
2950 fn f() {
2951 b();
2952 c();
2953 }"
2954 .unindent(),
2955 },
2956 // Delete everything after the first newline of the file.
2957 lsp::TextEdit {
2958 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(7, 0)),
2959 new_text: "".into(),
2960 },
2961 ],
2962 LanguageServerId(0),
2963 None,
2964 cx,
2965 )
2966 })
2967 .await
2968 .unwrap();
2969
2970 buffer.update(cx, |buffer, cx| {
2971 let edits = edits
2972 .into_iter()
2973 .map(|(range, text)| {
2974 (
2975 range.start.to_point(buffer)..range.end.to_point(buffer),
2976 text,
2977 )
2978 })
2979 .collect::<Vec<_>>();
2980
2981 assert_eq!(
2982 edits,
2983 [
2984 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
2985 (Point::new(1, 0)..Point::new(2, 0), "".into())
2986 ]
2987 );
2988
2989 for (range, new_text) in edits {
2990 buffer.edit([(range, new_text)], None, cx);
2991 }
2992 assert_eq!(
2993 buffer.text(),
2994 "
2995 use a::{b, c};
2996
2997 fn f() {
2998 b();
2999 c();
3000 }
3001 "
3002 .unindent()
3003 );
3004 });
3005}
3006
3007#[gpui::test]
3008async fn test_edits_from_lsp_with_replacement_followed_by_adjacent_insertion(
3009 cx: &mut gpui::TestAppContext,
3010) {
3011 init_test(cx);
3012
3013 let text = "Path()";
3014
3015 let fs = FakeFs::new(cx.executor());
3016 fs.insert_tree(
3017 path!("/dir"),
3018 json!({
3019 "a.rs": text
3020 }),
3021 )
3022 .await;
3023
3024 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3025 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3026 let buffer = project
3027 .update(cx, |project, cx| {
3028 project.open_local_buffer(path!("/dir/a.rs"), cx)
3029 })
3030 .await
3031 .unwrap();
3032
3033 // Simulate the language server sending us a pair of edits at the same location,
3034 // with an insertion following a replacement (which violates the LSP spec).
3035 let edits = lsp_store
3036 .update(cx, |lsp_store, cx| {
3037 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3038 &buffer,
3039 [
3040 lsp::TextEdit {
3041 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 4)),
3042 new_text: "Path".into(),
3043 },
3044 lsp::TextEdit {
3045 range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
3046 new_text: "from path import Path\n\n\n".into(),
3047 },
3048 ],
3049 LanguageServerId(0),
3050 None,
3051 cx,
3052 )
3053 })
3054 .await
3055 .unwrap();
3056
3057 buffer.update(cx, |buffer, cx| {
3058 buffer.edit(edits, None, cx);
3059 assert_eq!(buffer.text(), "from path import Path\n\n\nPath()")
3060 });
3061}
3062
3063#[gpui::test]
3064async fn test_invalid_edits_from_lsp2(cx: &mut gpui::TestAppContext) {
3065 init_test(cx);
3066
3067 let text = "
3068 use a::b;
3069 use a::c;
3070
3071 fn f() {
3072 b();
3073 c();
3074 }
3075 "
3076 .unindent();
3077
3078 let fs = FakeFs::new(cx.executor());
3079 fs.insert_tree(
3080 path!("/dir"),
3081 json!({
3082 "a.rs": text.clone(),
3083 }),
3084 )
3085 .await;
3086
3087 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3088 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
3089 let buffer = project
3090 .update(cx, |project, cx| {
3091 project.open_local_buffer(path!("/dir/a.rs"), cx)
3092 })
3093 .await
3094 .unwrap();
3095
3096 // Simulate the language server sending us edits in a non-ordered fashion,
3097 // with ranges sometimes being inverted or pointing to invalid locations.
3098 let edits = lsp_store
3099 .update(cx, |lsp_store, cx| {
3100 lsp_store.as_local_mut().unwrap().edits_from_lsp(
3101 &buffer,
3102 [
3103 lsp::TextEdit {
3104 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3105 new_text: "\n\n".into(),
3106 },
3107 lsp::TextEdit {
3108 range: lsp::Range::new(lsp::Position::new(0, 8), lsp::Position::new(0, 4)),
3109 new_text: "a::{b, c}".into(),
3110 },
3111 lsp::TextEdit {
3112 range: lsp::Range::new(lsp::Position::new(1, 0), lsp::Position::new(99, 0)),
3113 new_text: "".into(),
3114 },
3115 lsp::TextEdit {
3116 range: lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 9)),
3117 new_text: "
3118 fn f() {
3119 b();
3120 c();
3121 }"
3122 .unindent(),
3123 },
3124 ],
3125 LanguageServerId(0),
3126 None,
3127 cx,
3128 )
3129 })
3130 .await
3131 .unwrap();
3132
3133 buffer.update(cx, |buffer, cx| {
3134 let edits = edits
3135 .into_iter()
3136 .map(|(range, text)| {
3137 (
3138 range.start.to_point(buffer)..range.end.to_point(buffer),
3139 text,
3140 )
3141 })
3142 .collect::<Vec<_>>();
3143
3144 assert_eq!(
3145 edits,
3146 [
3147 (Point::new(0, 4)..Point::new(0, 8), "a::{b, c}".into()),
3148 (Point::new(1, 0)..Point::new(2, 0), "".into())
3149 ]
3150 );
3151
3152 for (range, new_text) in edits {
3153 buffer.edit([(range, new_text)], None, cx);
3154 }
3155 assert_eq!(
3156 buffer.text(),
3157 "
3158 use a::{b, c};
3159
3160 fn f() {
3161 b();
3162 c();
3163 }
3164 "
3165 .unindent()
3166 );
3167 });
3168}
3169
3170fn chunks_with_diagnostics<T: ToOffset + ToPoint>(
3171 buffer: &Buffer,
3172 range: Range<T>,
3173) -> Vec<(String, Option<DiagnosticSeverity>)> {
3174 let mut chunks: Vec<(String, Option<DiagnosticSeverity>)> = Vec::new();
3175 for chunk in buffer.snapshot().chunks(range, true) {
3176 if chunks
3177 .last()
3178 .is_some_and(|prev_chunk| prev_chunk.1 == chunk.diagnostic_severity)
3179 {
3180 chunks.last_mut().unwrap().0.push_str(chunk.text);
3181 } else {
3182 chunks.push((chunk.text.to_string(), chunk.diagnostic_severity));
3183 }
3184 }
3185 chunks
3186}
3187
3188#[gpui::test(iterations = 10)]
3189async fn test_definition(cx: &mut gpui::TestAppContext) {
3190 init_test(cx);
3191
3192 let fs = FakeFs::new(cx.executor());
3193 fs.insert_tree(
3194 path!("/dir"),
3195 json!({
3196 "a.rs": "const fn a() { A }",
3197 "b.rs": "const y: i32 = crate::a()",
3198 }),
3199 )
3200 .await;
3201
3202 let project = Project::test(fs, [path!("/dir/b.rs").as_ref()], cx).await;
3203
3204 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3205 language_registry.add(rust_lang());
3206 let mut fake_servers = language_registry.register_fake_lsp("Rust", FakeLspAdapter::default());
3207
3208 let (buffer, _handle) = project
3209 .update(cx, |project, cx| {
3210 project.open_local_buffer_with_lsp(path!("/dir/b.rs"), cx)
3211 })
3212 .await
3213 .unwrap();
3214
3215 let fake_server = fake_servers.next().await.unwrap();
3216 fake_server.set_request_handler::<lsp::request::GotoDefinition, _, _>(|params, _| async move {
3217 let params = params.text_document_position_params;
3218 assert_eq!(
3219 params.text_document.uri.to_file_path().unwrap(),
3220 Path::new(path!("/dir/b.rs")),
3221 );
3222 assert_eq!(params.position, lsp::Position::new(0, 22));
3223
3224 Ok(Some(lsp::GotoDefinitionResponse::Scalar(
3225 lsp::Location::new(
3226 lsp::Uri::from_file_path(path!("/dir/a.rs")).unwrap(),
3227 lsp::Range::new(lsp::Position::new(0, 9), lsp::Position::new(0, 10)),
3228 ),
3229 )))
3230 });
3231 let mut definitions = project
3232 .update(cx, |project, cx| project.definitions(&buffer, 22, cx))
3233 .await
3234 .unwrap()
3235 .unwrap();
3236
3237 // Assert no new language server started
3238 cx.executor().run_until_parked();
3239 assert!(fake_servers.try_next().is_err());
3240
3241 assert_eq!(definitions.len(), 1);
3242 let definition = definitions.pop().unwrap();
3243 cx.update(|cx| {
3244 let target_buffer = definition.target.buffer.read(cx);
3245 assert_eq!(
3246 target_buffer
3247 .file()
3248 .unwrap()
3249 .as_local()
3250 .unwrap()
3251 .abs_path(cx),
3252 Path::new(path!("/dir/a.rs")),
3253 );
3254 assert_eq!(definition.target.range.to_offset(target_buffer), 9..10);
3255 assert_eq!(
3256 list_worktrees(&project, cx),
3257 [
3258 (path!("/dir/a.rs").as_ref(), false),
3259 (path!("/dir/b.rs").as_ref(), true)
3260 ],
3261 );
3262
3263 drop(definition);
3264 });
3265 cx.update(|cx| {
3266 assert_eq!(
3267 list_worktrees(&project, cx),
3268 [(path!("/dir/b.rs").as_ref(), true)]
3269 );
3270 });
3271
3272 fn list_worktrees<'a>(project: &'a Entity<Project>, cx: &'a App) -> Vec<(&'a Path, bool)> {
3273 project
3274 .read(cx)
3275 .worktrees(cx)
3276 .map(|worktree| {
3277 let worktree = worktree.read(cx);
3278 (
3279 worktree.as_local().unwrap().abs_path().as_ref(),
3280 worktree.is_visible(),
3281 )
3282 })
3283 .collect::<Vec<_>>()
3284 }
3285}
3286
3287#[gpui::test]
3288async fn test_completions_with_text_edit(cx: &mut gpui::TestAppContext) {
3289 init_test(cx);
3290
3291 let fs = FakeFs::new(cx.executor());
3292 fs.insert_tree(
3293 path!("/dir"),
3294 json!({
3295 "a.ts": "",
3296 }),
3297 )
3298 .await;
3299
3300 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3301
3302 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3303 language_registry.add(typescript_lang());
3304 let mut fake_language_servers = language_registry.register_fake_lsp(
3305 "TypeScript",
3306 FakeLspAdapter {
3307 capabilities: lsp::ServerCapabilities {
3308 completion_provider: Some(lsp::CompletionOptions {
3309 trigger_characters: Some(vec![".".to_string()]),
3310 ..Default::default()
3311 }),
3312 ..Default::default()
3313 },
3314 ..Default::default()
3315 },
3316 );
3317
3318 let (buffer, _handle) = project
3319 .update(cx, |p, cx| {
3320 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3321 })
3322 .await
3323 .unwrap();
3324
3325 let fake_server = fake_language_servers.next().await.unwrap();
3326
3327 // When text_edit exists, it takes precedence over insert_text and label
3328 let text = "let a = obj.fqn";
3329 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3330 let completions = project.update(cx, |project, cx| {
3331 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3332 });
3333
3334 fake_server
3335 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3336 Ok(Some(lsp::CompletionResponse::Array(vec![
3337 lsp::CompletionItem {
3338 label: "labelText".into(),
3339 insert_text: Some("insertText".into()),
3340 text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
3341 range: lsp::Range::new(
3342 lsp::Position::new(0, text.len() as u32 - 3),
3343 lsp::Position::new(0, text.len() as u32),
3344 ),
3345 new_text: "textEditText".into(),
3346 })),
3347 ..Default::default()
3348 },
3349 ])))
3350 })
3351 .next()
3352 .await;
3353
3354 let completions = completions
3355 .await
3356 .unwrap()
3357 .into_iter()
3358 .flat_map(|response| response.completions)
3359 .collect::<Vec<_>>();
3360 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3361
3362 assert_eq!(completions.len(), 1);
3363 assert_eq!(completions[0].new_text, "textEditText");
3364 assert_eq!(
3365 completions[0].replace_range.to_offset(&snapshot),
3366 text.len() - 3..text.len()
3367 );
3368}
3369
3370#[gpui::test]
3371async fn test_completions_with_edit_ranges(cx: &mut gpui::TestAppContext) {
3372 init_test(cx);
3373
3374 let fs = FakeFs::new(cx.executor());
3375 fs.insert_tree(
3376 path!("/dir"),
3377 json!({
3378 "a.ts": "",
3379 }),
3380 )
3381 .await;
3382
3383 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3384
3385 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3386 language_registry.add(typescript_lang());
3387 let mut fake_language_servers = language_registry.register_fake_lsp(
3388 "TypeScript",
3389 FakeLspAdapter {
3390 capabilities: lsp::ServerCapabilities {
3391 completion_provider: Some(lsp::CompletionOptions {
3392 trigger_characters: Some(vec![".".to_string()]),
3393 ..Default::default()
3394 }),
3395 ..Default::default()
3396 },
3397 ..Default::default()
3398 },
3399 );
3400
3401 let (buffer, _handle) = project
3402 .update(cx, |p, cx| {
3403 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3404 })
3405 .await
3406 .unwrap();
3407
3408 let fake_server = fake_language_servers.next().await.unwrap();
3409 let text = "let a = obj.fqn";
3410
3411 // Test 1: When text_edit is None but text_edit_text exists with default edit_range
3412 {
3413 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3414 let completions = project.update(cx, |project, cx| {
3415 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3416 });
3417
3418 fake_server
3419 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3420 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3421 is_incomplete: false,
3422 item_defaults: Some(lsp::CompletionListItemDefaults {
3423 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3424 lsp::Range::new(
3425 lsp::Position::new(0, text.len() as u32 - 3),
3426 lsp::Position::new(0, text.len() as u32),
3427 ),
3428 )),
3429 ..Default::default()
3430 }),
3431 items: vec![lsp::CompletionItem {
3432 label: "labelText".into(),
3433 text_edit_text: Some("textEditText".into()),
3434 text_edit: None,
3435 ..Default::default()
3436 }],
3437 })))
3438 })
3439 .next()
3440 .await;
3441
3442 let completions = completions
3443 .await
3444 .unwrap()
3445 .into_iter()
3446 .flat_map(|response| response.completions)
3447 .collect::<Vec<_>>();
3448 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3449
3450 assert_eq!(completions.len(), 1);
3451 assert_eq!(completions[0].new_text, "textEditText");
3452 assert_eq!(
3453 completions[0].replace_range.to_offset(&snapshot),
3454 text.len() - 3..text.len()
3455 );
3456 }
3457
3458 // Test 2: When both text_edit and text_edit_text are None with default edit_range
3459 {
3460 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3461 let completions = project.update(cx, |project, cx| {
3462 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3463 });
3464
3465 fake_server
3466 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async {
3467 Ok(Some(lsp::CompletionResponse::List(lsp::CompletionList {
3468 is_incomplete: false,
3469 item_defaults: Some(lsp::CompletionListItemDefaults {
3470 edit_range: Some(lsp::CompletionListItemDefaultsEditRange::Range(
3471 lsp::Range::new(
3472 lsp::Position::new(0, text.len() as u32 - 3),
3473 lsp::Position::new(0, text.len() as u32),
3474 ),
3475 )),
3476 ..Default::default()
3477 }),
3478 items: vec![lsp::CompletionItem {
3479 label: "labelText".into(),
3480 text_edit_text: None,
3481 insert_text: Some("irrelevant".into()),
3482 text_edit: None,
3483 ..Default::default()
3484 }],
3485 })))
3486 })
3487 .next()
3488 .await;
3489
3490 let completions = completions
3491 .await
3492 .unwrap()
3493 .into_iter()
3494 .flat_map(|response| response.completions)
3495 .collect::<Vec<_>>();
3496 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3497
3498 assert_eq!(completions.len(), 1);
3499 assert_eq!(completions[0].new_text, "labelText");
3500 assert_eq!(
3501 completions[0].replace_range.to_offset(&snapshot),
3502 text.len() - 3..text.len()
3503 );
3504 }
3505}
3506
3507#[gpui::test]
3508async fn test_completions_without_edit_ranges(cx: &mut gpui::TestAppContext) {
3509 init_test(cx);
3510
3511 let fs = FakeFs::new(cx.executor());
3512 fs.insert_tree(
3513 path!("/dir"),
3514 json!({
3515 "a.ts": "",
3516 }),
3517 )
3518 .await;
3519
3520 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3521
3522 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3523 language_registry.add(typescript_lang());
3524 let mut fake_language_servers = language_registry.register_fake_lsp(
3525 "TypeScript",
3526 FakeLspAdapter {
3527 capabilities: lsp::ServerCapabilities {
3528 completion_provider: Some(lsp::CompletionOptions {
3529 trigger_characters: Some(vec![":".to_string()]),
3530 ..Default::default()
3531 }),
3532 ..Default::default()
3533 },
3534 ..Default::default()
3535 },
3536 );
3537
3538 let (buffer, _handle) = project
3539 .update(cx, |p, cx| {
3540 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3541 })
3542 .await
3543 .unwrap();
3544
3545 let fake_server = fake_language_servers.next().await.unwrap();
3546
3547 // Test 1: When text_edit is None but insert_text exists (no edit_range in defaults)
3548 let text = "let a = b.fqn";
3549 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3550 let completions = project.update(cx, |project, cx| {
3551 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3552 });
3553
3554 fake_server
3555 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3556 Ok(Some(lsp::CompletionResponse::Array(vec![
3557 lsp::CompletionItem {
3558 label: "fullyQualifiedName?".into(),
3559 insert_text: Some("fullyQualifiedName".into()),
3560 ..Default::default()
3561 },
3562 ])))
3563 })
3564 .next()
3565 .await;
3566 let completions = completions
3567 .await
3568 .unwrap()
3569 .into_iter()
3570 .flat_map(|response| response.completions)
3571 .collect::<Vec<_>>();
3572 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3573 assert_eq!(completions.len(), 1);
3574 assert_eq!(completions[0].new_text, "fullyQualifiedName");
3575 assert_eq!(
3576 completions[0].replace_range.to_offset(&snapshot),
3577 text.len() - 3..text.len()
3578 );
3579
3580 // Test 2: When both text_edit and insert_text are None (no edit_range in defaults)
3581 let text = "let a = \"atoms/cmp\"";
3582 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3583 let completions = project.update(cx, |project, cx| {
3584 project.completions(&buffer, text.len() - 1, DEFAULT_COMPLETION_CONTEXT, cx)
3585 });
3586
3587 fake_server
3588 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3589 Ok(Some(lsp::CompletionResponse::Array(vec![
3590 lsp::CompletionItem {
3591 label: "component".into(),
3592 ..Default::default()
3593 },
3594 ])))
3595 })
3596 .next()
3597 .await;
3598 let completions = completions
3599 .await
3600 .unwrap()
3601 .into_iter()
3602 .flat_map(|response| response.completions)
3603 .collect::<Vec<_>>();
3604 let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
3605 assert_eq!(completions.len(), 1);
3606 assert_eq!(completions[0].new_text, "component");
3607 assert_eq!(
3608 completions[0].replace_range.to_offset(&snapshot),
3609 text.len() - 4..text.len() - 1
3610 );
3611}
3612
3613#[gpui::test]
3614async fn test_completions_with_carriage_returns(cx: &mut gpui::TestAppContext) {
3615 init_test(cx);
3616
3617 let fs = FakeFs::new(cx.executor());
3618 fs.insert_tree(
3619 path!("/dir"),
3620 json!({
3621 "a.ts": "",
3622 }),
3623 )
3624 .await;
3625
3626 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3627
3628 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3629 language_registry.add(typescript_lang());
3630 let mut fake_language_servers = language_registry.register_fake_lsp(
3631 "TypeScript",
3632 FakeLspAdapter {
3633 capabilities: lsp::ServerCapabilities {
3634 completion_provider: Some(lsp::CompletionOptions {
3635 trigger_characters: Some(vec![":".to_string()]),
3636 ..Default::default()
3637 }),
3638 ..Default::default()
3639 },
3640 ..Default::default()
3641 },
3642 );
3643
3644 let (buffer, _handle) = project
3645 .update(cx, |p, cx| {
3646 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3647 })
3648 .await
3649 .unwrap();
3650
3651 let fake_server = fake_language_servers.next().await.unwrap();
3652
3653 let text = "let a = b.fqn";
3654 buffer.update(cx, |buffer, cx| buffer.set_text(text, cx));
3655 let completions = project.update(cx, |project, cx| {
3656 project.completions(&buffer, text.len(), DEFAULT_COMPLETION_CONTEXT, cx)
3657 });
3658
3659 fake_server
3660 .set_request_handler::<lsp::request::Completion, _, _>(|_, _| async move {
3661 Ok(Some(lsp::CompletionResponse::Array(vec![
3662 lsp::CompletionItem {
3663 label: "fullyQualifiedName?".into(),
3664 insert_text: Some("fully\rQualified\r\nName".into()),
3665 ..Default::default()
3666 },
3667 ])))
3668 })
3669 .next()
3670 .await;
3671 let completions = completions
3672 .await
3673 .unwrap()
3674 .into_iter()
3675 .flat_map(|response| response.completions)
3676 .collect::<Vec<_>>();
3677 assert_eq!(completions.len(), 1);
3678 assert_eq!(completions[0].new_text, "fully\nQualified\nName");
3679}
3680
3681#[gpui::test(iterations = 10)]
3682async fn test_apply_code_actions_with_commands(cx: &mut gpui::TestAppContext) {
3683 init_test(cx);
3684
3685 let fs = FakeFs::new(cx.executor());
3686 fs.insert_tree(
3687 path!("/dir"),
3688 json!({
3689 "a.ts": "a",
3690 }),
3691 )
3692 .await;
3693
3694 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
3695
3696 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3697 language_registry.add(typescript_lang());
3698 let mut fake_language_servers = language_registry.register_fake_lsp(
3699 "TypeScript",
3700 FakeLspAdapter {
3701 capabilities: lsp::ServerCapabilities {
3702 code_action_provider: Some(lsp::CodeActionProviderCapability::Options(
3703 lsp::CodeActionOptions {
3704 resolve_provider: Some(true),
3705 ..lsp::CodeActionOptions::default()
3706 },
3707 )),
3708 execute_command_provider: Some(lsp::ExecuteCommandOptions {
3709 commands: vec!["_the/command".to_string()],
3710 ..lsp::ExecuteCommandOptions::default()
3711 }),
3712 ..lsp::ServerCapabilities::default()
3713 },
3714 ..FakeLspAdapter::default()
3715 },
3716 );
3717
3718 let (buffer, _handle) = project
3719 .update(cx, |p, cx| {
3720 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
3721 })
3722 .await
3723 .unwrap();
3724
3725 let fake_server = fake_language_servers.next().await.unwrap();
3726
3727 // Language server returns code actions that contain commands, and not edits.
3728 let actions = project.update(cx, |project, cx| {
3729 project.code_actions(&buffer, 0..0, None, cx)
3730 });
3731 fake_server
3732 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
3733 Ok(Some(vec![
3734 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3735 title: "The code action".into(),
3736 data: Some(serde_json::json!({
3737 "command": "_the/command",
3738 })),
3739 ..lsp::CodeAction::default()
3740 }),
3741 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
3742 title: "two".into(),
3743 ..lsp::CodeAction::default()
3744 }),
3745 ]))
3746 })
3747 .next()
3748 .await;
3749
3750 let action = actions.await.unwrap().unwrap()[0].clone();
3751 let apply = project.update(cx, |project, cx| {
3752 project.apply_code_action(buffer.clone(), action, true, cx)
3753 });
3754
3755 // Resolving the code action does not populate its edits. In absence of
3756 // edits, we must execute the given command.
3757 fake_server.set_request_handler::<lsp::request::CodeActionResolveRequest, _, _>(
3758 |mut action, _| async move {
3759 if action.data.is_some() {
3760 action.command = Some(lsp::Command {
3761 title: "The command".into(),
3762 command: "_the/command".into(),
3763 arguments: Some(vec![json!("the-argument")]),
3764 });
3765 }
3766 Ok(action)
3767 },
3768 );
3769
3770 // While executing the command, the language server sends the editor
3771 // a `workspaceEdit` request.
3772 fake_server
3773 .set_request_handler::<lsp::request::ExecuteCommand, _, _>({
3774 let fake = fake_server.clone();
3775 move |params, _| {
3776 assert_eq!(params.command, "_the/command");
3777 let fake = fake.clone();
3778 async move {
3779 fake.server
3780 .request::<lsp::request::ApplyWorkspaceEdit>(
3781 lsp::ApplyWorkspaceEditParams {
3782 label: None,
3783 edit: lsp::WorkspaceEdit {
3784 changes: Some(
3785 [(
3786 lsp::Uri::from_file_path(path!("/dir/a.ts")).unwrap(),
3787 vec![lsp::TextEdit {
3788 range: lsp::Range::new(
3789 lsp::Position::new(0, 0),
3790 lsp::Position::new(0, 0),
3791 ),
3792 new_text: "X".into(),
3793 }],
3794 )]
3795 .into_iter()
3796 .collect(),
3797 ),
3798 ..Default::default()
3799 },
3800 },
3801 )
3802 .await
3803 .into_response()
3804 .unwrap();
3805 Ok(Some(json!(null)))
3806 }
3807 }
3808 })
3809 .next()
3810 .await;
3811
3812 // Applying the code action returns a project transaction containing the edits
3813 // sent by the language server in its `workspaceEdit` request.
3814 let transaction = apply.await.unwrap();
3815 assert!(transaction.0.contains_key(&buffer));
3816 buffer.update(cx, |buffer, cx| {
3817 assert_eq!(buffer.text(), "Xa");
3818 buffer.undo(cx);
3819 assert_eq!(buffer.text(), "a");
3820 });
3821}
3822
3823#[gpui::test]
3824async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
3825 init_test(cx);
3826 let fs = FakeFs::new(cx.background_executor.clone());
3827 let expected_contents = "content";
3828 fs.as_fake()
3829 .insert_tree(
3830 "/root",
3831 json!({
3832 "test.txt": expected_contents
3833 }),
3834 )
3835 .await;
3836
3837 let project = Project::test(fs, [path!("/root").as_ref()], cx).await;
3838
3839 let (worktree, entry_id) = project.read_with(cx, |project, cx| {
3840 let worktree = project.worktrees(cx).next().unwrap();
3841 let entry_id = worktree
3842 .read(cx)
3843 .entry_for_path(rel_path("test.txt"))
3844 .unwrap()
3845 .id;
3846 (worktree, entry_id)
3847 });
3848 let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id());
3849 let _result = project
3850 .update(cx, |project, cx| {
3851 project.rename_entry(
3852 entry_id,
3853 (worktree_id, rel_path("dir1/dir2/dir3/test.txt")).into(),
3854 cx,
3855 )
3856 })
3857 .await
3858 .unwrap();
3859 worktree.read_with(cx, |worktree, _| {
3860 assert!(
3861 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3862 "Old file should have been removed"
3863 );
3864 assert!(
3865 worktree
3866 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3867 .is_some(),
3868 "Whole directory hierarchy and the new file should have been created"
3869 );
3870 });
3871 assert_eq!(
3872 worktree
3873 .update(cx, |worktree, cx| {
3874 worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
3875 })
3876 .await
3877 .unwrap()
3878 .text,
3879 expected_contents,
3880 "Moved file's contents should be preserved"
3881 );
3882
3883 let entry_id = worktree.read_with(cx, |worktree, _| {
3884 worktree
3885 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3886 .unwrap()
3887 .id
3888 });
3889
3890 let _result = project
3891 .update(cx, |project, cx| {
3892 project.rename_entry(
3893 entry_id,
3894 (worktree_id, rel_path("dir1/dir2/test.txt")).into(),
3895 cx,
3896 )
3897 })
3898 .await
3899 .unwrap();
3900 worktree.read_with(cx, |worktree, _| {
3901 assert!(
3902 worktree.entry_for_path(rel_path("test.txt")).is_none(),
3903 "First file should not reappear"
3904 );
3905 assert!(
3906 worktree
3907 .entry_for_path(rel_path("dir1/dir2/dir3/test.txt"))
3908 .is_none(),
3909 "Old file should have been removed"
3910 );
3911 assert!(
3912 worktree
3913 .entry_for_path(rel_path("dir1/dir2/test.txt"))
3914 .is_some(),
3915 "No error should have occurred after moving into existing directory"
3916 );
3917 });
3918 assert_eq!(
3919 worktree
3920 .update(cx, |worktree, cx| {
3921 worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
3922 })
3923 .await
3924 .unwrap()
3925 .text,
3926 expected_contents,
3927 "Moved file's contents should be preserved"
3928 );
3929}
3930
3931#[gpui::test(iterations = 10)]
3932async fn test_save_file(cx: &mut gpui::TestAppContext) {
3933 init_test(cx);
3934
3935 let fs = FakeFs::new(cx.executor());
3936 fs.insert_tree(
3937 path!("/dir"),
3938 json!({
3939 "file1": "the old contents",
3940 }),
3941 )
3942 .await;
3943
3944 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3945 let buffer = project
3946 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
3947 .await
3948 .unwrap();
3949 buffer.update(cx, |buffer, cx| {
3950 assert_eq!(buffer.text(), "the old contents");
3951 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
3952 });
3953
3954 project
3955 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
3956 .await
3957 .unwrap();
3958
3959 let new_text = fs
3960 .load(Path::new(path!("/dir/file1")))
3961 .await
3962 .unwrap()
3963 .replace("\r\n", "\n");
3964 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
3965}
3966
3967#[gpui::test(iterations = 10)]
3968async fn test_save_file_spawns_language_server(cx: &mut gpui::TestAppContext) {
3969 // Issue: #24349
3970 init_test(cx);
3971
3972 let fs = FakeFs::new(cx.executor());
3973 fs.insert_tree(path!("/dir"), json!({})).await;
3974
3975 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
3976 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
3977
3978 language_registry.add(rust_lang());
3979 let mut fake_rust_servers = language_registry.register_fake_lsp(
3980 "Rust",
3981 FakeLspAdapter {
3982 name: "the-rust-language-server",
3983 capabilities: lsp::ServerCapabilities {
3984 completion_provider: Some(lsp::CompletionOptions {
3985 trigger_characters: Some(vec![".".to_string(), "::".to_string()]),
3986 ..Default::default()
3987 }),
3988 text_document_sync: Some(lsp::TextDocumentSyncCapability::Options(
3989 lsp::TextDocumentSyncOptions {
3990 save: Some(lsp::TextDocumentSyncSaveOptions::Supported(true)),
3991 ..Default::default()
3992 },
3993 )),
3994 ..Default::default()
3995 },
3996 ..Default::default()
3997 },
3998 );
3999
4000 let buffer = project
4001 .update(cx, |this, cx| this.create_buffer(false, cx))
4002 .unwrap()
4003 .await;
4004 project.update(cx, |this, cx| {
4005 this.register_buffer_with_language_servers(&buffer, cx);
4006 buffer.update(cx, |buffer, cx| {
4007 assert!(!this.has_language_servers_for(buffer, cx));
4008 })
4009 });
4010
4011 project
4012 .update(cx, |this, cx| {
4013 let worktree_id = this.worktrees(cx).next().unwrap().read(cx).id();
4014 this.save_buffer_as(
4015 buffer.clone(),
4016 ProjectPath {
4017 worktree_id,
4018 path: rel_path("file.rs").into(),
4019 },
4020 cx,
4021 )
4022 })
4023 .await
4024 .unwrap();
4025 // A server is started up, and it is notified about Rust files.
4026 let mut fake_rust_server = fake_rust_servers.next().await.unwrap();
4027 assert_eq!(
4028 fake_rust_server
4029 .receive_notification::<lsp::notification::DidOpenTextDocument>()
4030 .await
4031 .text_document,
4032 lsp::TextDocumentItem {
4033 uri: lsp::Uri::from_file_path(path!("/dir/file.rs")).unwrap(),
4034 version: 0,
4035 text: "".to_string(),
4036 language_id: "rust".to_string(),
4037 }
4038 );
4039
4040 project.update(cx, |this, cx| {
4041 buffer.update(cx, |buffer, cx| {
4042 assert!(this.has_language_servers_for(buffer, cx));
4043 })
4044 });
4045}
4046
4047#[gpui::test(iterations = 30)]
4048async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext) {
4049 init_test(cx);
4050
4051 let fs = FakeFs::new(cx.executor());
4052 fs.insert_tree(
4053 path!("/dir"),
4054 json!({
4055 "file1": "the original contents",
4056 }),
4057 )
4058 .await;
4059
4060 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4061 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4062 let buffer = project
4063 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4064 .await
4065 .unwrap();
4066
4067 // Simulate buffer diffs being slow, so that they don't complete before
4068 // the next file change occurs.
4069 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4070
4071 // Change the buffer's file on disk, and then wait for the file change
4072 // to be detected by the worktree, so that the buffer starts reloading.
4073 fs.save(
4074 path!("/dir/file1").as_ref(),
4075 &Rope::from_str("the first contents", cx.background_executor()),
4076 Default::default(),
4077 )
4078 .await
4079 .unwrap();
4080 worktree.next_event(cx).await;
4081
4082 // Change the buffer's file again. Depending on the random seed, the
4083 // previous file change may still be in progress.
4084 fs.save(
4085 path!("/dir/file1").as_ref(),
4086 &Rope::from_str("the second contents", cx.background_executor()),
4087 Default::default(),
4088 )
4089 .await
4090 .unwrap();
4091 worktree.next_event(cx).await;
4092
4093 cx.executor().run_until_parked();
4094 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4095 buffer.read_with(cx, |buffer, _| {
4096 assert_eq!(buffer.text(), on_disk_text);
4097 assert!(!buffer.is_dirty(), "buffer should not be dirty");
4098 assert!(!buffer.has_conflict(), "buffer should not be dirty");
4099 });
4100}
4101
4102#[gpui::test(iterations = 30)]
4103async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
4104 init_test(cx);
4105
4106 let fs = FakeFs::new(cx.executor());
4107 fs.insert_tree(
4108 path!("/dir"),
4109 json!({
4110 "file1": "the original contents",
4111 }),
4112 )
4113 .await;
4114
4115 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4116 let worktree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
4117 let buffer = project
4118 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4119 .await
4120 .unwrap();
4121
4122 // Simulate buffer diffs being slow, so that they don't complete before
4123 // the next file change occurs.
4124 cx.executor().deprioritize(*language::BUFFER_DIFF_TASK);
4125
4126 // Change the buffer's file on disk, and then wait for the file change
4127 // to be detected by the worktree, so that the buffer starts reloading.
4128 fs.save(
4129 path!("/dir/file1").as_ref(),
4130 &Rope::from_str("the first contents", cx.background_executor()),
4131 Default::default(),
4132 )
4133 .await
4134 .unwrap();
4135 worktree.next_event(cx).await;
4136
4137 cx.executor()
4138 .spawn(cx.executor().simulate_random_delay())
4139 .await;
4140
4141 // Perform a noop edit, causing the buffer's version to increase.
4142 buffer.update(cx, |buffer, cx| {
4143 buffer.edit([(0..0, " ")], None, cx);
4144 buffer.undo(cx);
4145 });
4146
4147 cx.executor().run_until_parked();
4148 let on_disk_text = fs.load(Path::new(path!("/dir/file1"))).await.unwrap();
4149 buffer.read_with(cx, |buffer, _| {
4150 let buffer_text = buffer.text();
4151 if buffer_text == on_disk_text {
4152 assert!(
4153 !buffer.is_dirty() && !buffer.has_conflict(),
4154 "buffer shouldn't be dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}",
4155 );
4156 }
4157 // If the file change occurred while the buffer was processing the first
4158 // change, the buffer will be in a conflicting state.
4159 else {
4160 assert!(buffer.is_dirty(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4161 assert!(buffer.has_conflict(), "buffer should report that it is dirty. text: {buffer_text:?}, disk text: {on_disk_text:?}");
4162 }
4163 });
4164}
4165
4166#[gpui::test]
4167async fn test_save_in_single_file_worktree(cx: &mut gpui::TestAppContext) {
4168 init_test(cx);
4169
4170 let fs = FakeFs::new(cx.executor());
4171 fs.insert_tree(
4172 path!("/dir"),
4173 json!({
4174 "file1": "the old contents",
4175 }),
4176 )
4177 .await;
4178
4179 let project = Project::test(fs.clone(), [path!("/dir/file1").as_ref()], cx).await;
4180 let buffer = project
4181 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4182 .await
4183 .unwrap();
4184 buffer.update(cx, |buffer, cx| {
4185 buffer.edit([(0..0, "a line of text.\n".repeat(10 * 1024))], None, cx);
4186 });
4187
4188 project
4189 .update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
4190 .await
4191 .unwrap();
4192
4193 let new_text = fs
4194 .load(Path::new(path!("/dir/file1")))
4195 .await
4196 .unwrap()
4197 .replace("\r\n", "\n");
4198 assert_eq!(new_text, buffer.update(cx, |buffer, _| buffer.text()));
4199}
4200
4201#[gpui::test]
4202async fn test_save_as(cx: &mut gpui::TestAppContext) {
4203 init_test(cx);
4204
4205 let fs = FakeFs::new(cx.executor());
4206 fs.insert_tree("/dir", json!({})).await;
4207
4208 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4209
4210 let languages = project.update(cx, |project, _| project.languages().clone());
4211 languages.add(rust_lang());
4212
4213 let buffer = project.update(cx, |project, cx| {
4214 project.create_local_buffer("", None, false, cx)
4215 });
4216 buffer.update(cx, |buffer, cx| {
4217 buffer.edit([(0..0, "abc")], None, cx);
4218 assert!(buffer.is_dirty());
4219 assert!(!buffer.has_conflict());
4220 assert_eq!(buffer.language().unwrap().name(), "Plain Text".into());
4221 });
4222 project
4223 .update(cx, |project, cx| {
4224 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4225 let path = ProjectPath {
4226 worktree_id,
4227 path: rel_path("file1.rs").into(),
4228 };
4229 project.save_buffer_as(buffer.clone(), path, cx)
4230 })
4231 .await
4232 .unwrap();
4233 assert_eq!(fs.load(Path::new("/dir/file1.rs")).await.unwrap(), "abc");
4234
4235 cx.executor().run_until_parked();
4236 buffer.update(cx, |buffer, cx| {
4237 assert_eq!(
4238 buffer.file().unwrap().full_path(cx),
4239 Path::new("dir/file1.rs")
4240 );
4241 assert!(!buffer.is_dirty());
4242 assert!(!buffer.has_conflict());
4243 assert_eq!(buffer.language().unwrap().name(), "Rust".into());
4244 });
4245
4246 let opened_buffer = project
4247 .update(cx, |project, cx| {
4248 project.open_local_buffer("/dir/file1.rs", cx)
4249 })
4250 .await
4251 .unwrap();
4252 assert_eq!(opened_buffer, buffer);
4253}
4254
4255#[gpui::test]
4256async fn test_save_as_existing_file(cx: &mut gpui::TestAppContext) {
4257 init_test(cx);
4258
4259 let fs = FakeFs::new(cx.executor());
4260 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4261
4262 fs.insert_tree(
4263 path!("/dir"),
4264 json!({
4265 "data_a.txt": "data about a"
4266 }),
4267 )
4268 .await;
4269
4270 let buffer = project
4271 .update(cx, |project, cx| {
4272 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4273 })
4274 .await
4275 .unwrap();
4276
4277 buffer.update(cx, |buffer, cx| {
4278 buffer.edit([(11..12, "b")], None, cx);
4279 });
4280
4281 // Save buffer's contents as a new file and confirm that the buffer's now
4282 // associated with `data_b.txt` instead of `data_a.txt`, confirming that the
4283 // file associated with the buffer has now been updated to `data_b.txt`
4284 project
4285 .update(cx, |project, cx| {
4286 let worktree_id = project.worktrees(cx).next().unwrap().read(cx).id();
4287 let new_path = ProjectPath {
4288 worktree_id,
4289 path: rel_path("data_b.txt").into(),
4290 };
4291
4292 project.save_buffer_as(buffer.clone(), new_path, cx)
4293 })
4294 .await
4295 .unwrap();
4296
4297 buffer.update(cx, |buffer, cx| {
4298 assert_eq!(
4299 buffer.file().unwrap().full_path(cx),
4300 Path::new("dir/data_b.txt")
4301 )
4302 });
4303
4304 // Open the original `data_a.txt` file, confirming that its contents are
4305 // unchanged and the resulting buffer's associated file is `data_a.txt`.
4306 let original_buffer = project
4307 .update(cx, |project, cx| {
4308 project.open_local_buffer(path!("/dir/data_a.txt"), cx)
4309 })
4310 .await
4311 .unwrap();
4312
4313 original_buffer.update(cx, |buffer, cx| {
4314 assert_eq!(buffer.text(), "data about a");
4315 assert_eq!(
4316 buffer.file().unwrap().full_path(cx),
4317 Path::new("dir/data_a.txt")
4318 )
4319 });
4320}
4321
4322#[gpui::test(retries = 5)]
4323async fn test_rescan_and_remote_updates(cx: &mut gpui::TestAppContext) {
4324 use worktree::WorktreeModelHandle as _;
4325
4326 init_test(cx);
4327 cx.executor().allow_parking();
4328
4329 let dir = TempTree::new(json!({
4330 "a": {
4331 "file1": "",
4332 "file2": "",
4333 "file3": "",
4334 },
4335 "b": {
4336 "c": {
4337 "file4": "",
4338 "file5": "",
4339 }
4340 }
4341 }));
4342
4343 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [dir.path()], cx).await;
4344
4345 let buffer_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4346 let buffer = project.update(cx, |p, cx| p.open_local_buffer(dir.path().join(path), cx));
4347 async move { buffer.await.unwrap() }
4348 };
4349 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4350 project.update(cx, |project, cx| {
4351 let tree = project.worktrees(cx).next().unwrap();
4352 tree.read(cx)
4353 .entry_for_path(rel_path(path))
4354 .unwrap_or_else(|| panic!("no entry for path {}", path))
4355 .id
4356 })
4357 };
4358
4359 let buffer2 = buffer_for_path("a/file2", cx).await;
4360 let buffer3 = buffer_for_path("a/file3", cx).await;
4361 let buffer4 = buffer_for_path("b/c/file4", cx).await;
4362 let buffer5 = buffer_for_path("b/c/file5", cx).await;
4363
4364 let file2_id = id_for_path("a/file2", cx);
4365 let file3_id = id_for_path("a/file3", cx);
4366 let file4_id = id_for_path("b/c/file4", cx);
4367
4368 // Create a remote copy of this worktree.
4369 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4370 let metadata = tree.update(cx, |tree, _| tree.metadata_proto());
4371
4372 let updates = Arc::new(Mutex::new(Vec::new()));
4373 tree.update(cx, |tree, cx| {
4374 let updates = updates.clone();
4375 tree.observe_updates(0, cx, move |update| {
4376 updates.lock().push(update);
4377 async { true }
4378 });
4379 });
4380
4381 let remote = cx.update(|cx| {
4382 Worktree::remote(
4383 0,
4384 ReplicaId::REMOTE_SERVER,
4385 metadata,
4386 project.read(cx).client().into(),
4387 project.read(cx).path_style(cx),
4388 cx,
4389 )
4390 });
4391
4392 cx.executor().run_until_parked();
4393
4394 cx.update(|cx| {
4395 assert!(!buffer2.read(cx).is_dirty());
4396 assert!(!buffer3.read(cx).is_dirty());
4397 assert!(!buffer4.read(cx).is_dirty());
4398 assert!(!buffer5.read(cx).is_dirty());
4399 });
4400
4401 // Rename and delete files and directories.
4402 tree.flush_fs_events(cx).await;
4403 std::fs::rename(dir.path().join("a/file3"), dir.path().join("b/c/file3")).unwrap();
4404 std::fs::remove_file(dir.path().join("b/c/file5")).unwrap();
4405 std::fs::rename(dir.path().join("b/c"), dir.path().join("d")).unwrap();
4406 std::fs::rename(dir.path().join("a/file2"), dir.path().join("a/file2.new")).unwrap();
4407 tree.flush_fs_events(cx).await;
4408
4409 cx.update(|app| {
4410 assert_eq!(
4411 tree.read(app).paths().collect::<Vec<_>>(),
4412 vec![
4413 rel_path("a"),
4414 rel_path("a/file1"),
4415 rel_path("a/file2.new"),
4416 rel_path("b"),
4417 rel_path("d"),
4418 rel_path("d/file3"),
4419 rel_path("d/file4"),
4420 ]
4421 );
4422 });
4423
4424 assert_eq!(id_for_path("a/file2.new", cx), file2_id);
4425 assert_eq!(id_for_path("d/file3", cx), file3_id);
4426 assert_eq!(id_for_path("d/file4", cx), file4_id);
4427
4428 cx.update(|cx| {
4429 assert_eq!(
4430 buffer2.read(cx).file().unwrap().path().as_ref(),
4431 rel_path("a/file2.new")
4432 );
4433 assert_eq!(
4434 buffer3.read(cx).file().unwrap().path().as_ref(),
4435 rel_path("d/file3")
4436 );
4437 assert_eq!(
4438 buffer4.read(cx).file().unwrap().path().as_ref(),
4439 rel_path("d/file4")
4440 );
4441 assert_eq!(
4442 buffer5.read(cx).file().unwrap().path().as_ref(),
4443 rel_path("b/c/file5")
4444 );
4445
4446 assert_matches!(
4447 buffer2.read(cx).file().unwrap().disk_state(),
4448 DiskState::Present { .. }
4449 );
4450 assert_matches!(
4451 buffer3.read(cx).file().unwrap().disk_state(),
4452 DiskState::Present { .. }
4453 );
4454 assert_matches!(
4455 buffer4.read(cx).file().unwrap().disk_state(),
4456 DiskState::Present { .. }
4457 );
4458 assert_eq!(
4459 buffer5.read(cx).file().unwrap().disk_state(),
4460 DiskState::Deleted
4461 );
4462 });
4463
4464 // Update the remote worktree. Check that it becomes consistent with the
4465 // local worktree.
4466 cx.executor().run_until_parked();
4467
4468 remote.update(cx, |remote, _| {
4469 for update in updates.lock().drain(..) {
4470 remote.as_remote_mut().unwrap().update_from_remote(update);
4471 }
4472 });
4473 cx.executor().run_until_parked();
4474 remote.update(cx, |remote, _| {
4475 assert_eq!(
4476 remote.paths().collect::<Vec<_>>(),
4477 vec![
4478 rel_path("a"),
4479 rel_path("a/file1"),
4480 rel_path("a/file2.new"),
4481 rel_path("b"),
4482 rel_path("d"),
4483 rel_path("d/file3"),
4484 rel_path("d/file4"),
4485 ]
4486 );
4487 });
4488}
4489
4490#[gpui::test(iterations = 10)]
4491async fn test_buffer_identity_across_renames(cx: &mut gpui::TestAppContext) {
4492 init_test(cx);
4493
4494 let fs = FakeFs::new(cx.executor());
4495 fs.insert_tree(
4496 path!("/dir"),
4497 json!({
4498 "a": {
4499 "file1": "",
4500 }
4501 }),
4502 )
4503 .await;
4504
4505 let project = Project::test(fs, [Path::new(path!("/dir"))], cx).await;
4506 let tree = project.update(cx, |project, cx| project.worktrees(cx).next().unwrap());
4507 let tree_id = tree.update(cx, |tree, _| tree.id());
4508
4509 let id_for_path = |path: &'static str, cx: &mut gpui::TestAppContext| {
4510 project.update(cx, |project, cx| {
4511 let tree = project.worktrees(cx).next().unwrap();
4512 tree.read(cx)
4513 .entry_for_path(rel_path(path))
4514 .unwrap_or_else(|| panic!("no entry for path {}", path))
4515 .id
4516 })
4517 };
4518
4519 let dir_id = id_for_path("a", cx);
4520 let file_id = id_for_path("a/file1", cx);
4521 let buffer = project
4522 .update(cx, |p, cx| {
4523 p.open_buffer((tree_id, rel_path("a/file1")), cx)
4524 })
4525 .await
4526 .unwrap();
4527 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4528
4529 project
4530 .update(cx, |project, cx| {
4531 project.rename_entry(dir_id, (tree_id, rel_path("b")).into(), cx)
4532 })
4533 .unwrap()
4534 .await
4535 .into_included()
4536 .unwrap();
4537 cx.executor().run_until_parked();
4538
4539 assert_eq!(id_for_path("b", cx), dir_id);
4540 assert_eq!(id_for_path("b/file1", cx), file_id);
4541 buffer.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4542}
4543
4544#[gpui::test]
4545async fn test_buffer_deduping(cx: &mut gpui::TestAppContext) {
4546 init_test(cx);
4547
4548 let fs = FakeFs::new(cx.executor());
4549 fs.insert_tree(
4550 "/dir",
4551 json!({
4552 "a.txt": "a-contents",
4553 "b.txt": "b-contents",
4554 }),
4555 )
4556 .await;
4557
4558 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
4559
4560 // Spawn multiple tasks to open paths, repeating some paths.
4561 let (buffer_a_1, buffer_b, buffer_a_2) = project.update(cx, |p, cx| {
4562 (
4563 p.open_local_buffer("/dir/a.txt", cx),
4564 p.open_local_buffer("/dir/b.txt", cx),
4565 p.open_local_buffer("/dir/a.txt", cx),
4566 )
4567 });
4568
4569 let buffer_a_1 = buffer_a_1.await.unwrap();
4570 let buffer_a_2 = buffer_a_2.await.unwrap();
4571 let buffer_b = buffer_b.await.unwrap();
4572 assert_eq!(buffer_a_1.update(cx, |b, _| b.text()), "a-contents");
4573 assert_eq!(buffer_b.update(cx, |b, _| b.text()), "b-contents");
4574
4575 // There is only one buffer per path.
4576 let buffer_a_id = buffer_a_1.entity_id();
4577 assert_eq!(buffer_a_2.entity_id(), buffer_a_id);
4578
4579 // Open the same path again while it is still open.
4580 drop(buffer_a_1);
4581 let buffer_a_3 = project
4582 .update(cx, |p, cx| p.open_local_buffer("/dir/a.txt", cx))
4583 .await
4584 .unwrap();
4585
4586 // There's still only one buffer per path.
4587 assert_eq!(buffer_a_3.entity_id(), buffer_a_id);
4588}
4589
4590#[gpui::test]
4591async fn test_buffer_is_dirty(cx: &mut gpui::TestAppContext) {
4592 init_test(cx);
4593
4594 let fs = FakeFs::new(cx.executor());
4595 fs.insert_tree(
4596 path!("/dir"),
4597 json!({
4598 "file1": "abc",
4599 "file2": "def",
4600 "file3": "ghi",
4601 }),
4602 )
4603 .await;
4604
4605 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4606
4607 let buffer1 = project
4608 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4609 .await
4610 .unwrap();
4611 let events = Arc::new(Mutex::new(Vec::new()));
4612
4613 // initially, the buffer isn't dirty.
4614 buffer1.update(cx, |buffer, cx| {
4615 cx.subscribe(&buffer1, {
4616 let events = events.clone();
4617 move |_, _, event, _| match event {
4618 BufferEvent::Operation { .. } => {}
4619 _ => events.lock().push(event.clone()),
4620 }
4621 })
4622 .detach();
4623
4624 assert!(!buffer.is_dirty());
4625 assert!(events.lock().is_empty());
4626
4627 buffer.edit([(1..2, "")], None, cx);
4628 });
4629
4630 // after the first edit, the buffer is dirty, and emits a dirtied event.
4631 buffer1.update(cx, |buffer, cx| {
4632 assert!(buffer.text() == "ac");
4633 assert!(buffer.is_dirty());
4634 assert_eq!(
4635 *events.lock(),
4636 &[
4637 language::BufferEvent::Edited,
4638 language::BufferEvent::DirtyChanged
4639 ]
4640 );
4641 events.lock().clear();
4642 buffer.did_save(
4643 buffer.version(),
4644 buffer.file().unwrap().disk_state().mtime(),
4645 cx,
4646 );
4647 });
4648
4649 // after saving, the buffer is not dirty, and emits a saved event.
4650 buffer1.update(cx, |buffer, cx| {
4651 assert!(!buffer.is_dirty());
4652 assert_eq!(*events.lock(), &[language::BufferEvent::Saved]);
4653 events.lock().clear();
4654
4655 buffer.edit([(1..1, "B")], None, cx);
4656 buffer.edit([(2..2, "D")], None, cx);
4657 });
4658
4659 // after editing again, the buffer is dirty, and emits another dirty event.
4660 buffer1.update(cx, |buffer, cx| {
4661 assert!(buffer.text() == "aBDc");
4662 assert!(buffer.is_dirty());
4663 assert_eq!(
4664 *events.lock(),
4665 &[
4666 language::BufferEvent::Edited,
4667 language::BufferEvent::DirtyChanged,
4668 language::BufferEvent::Edited,
4669 ],
4670 );
4671 events.lock().clear();
4672
4673 // After restoring the buffer to its previously-saved state,
4674 // the buffer is not considered dirty anymore.
4675 buffer.edit([(1..3, "")], None, cx);
4676 assert!(buffer.text() == "ac");
4677 assert!(!buffer.is_dirty());
4678 });
4679
4680 assert_eq!(
4681 *events.lock(),
4682 &[
4683 language::BufferEvent::Edited,
4684 language::BufferEvent::DirtyChanged
4685 ]
4686 );
4687
4688 // When a file is deleted, it is not considered dirty.
4689 let events = Arc::new(Mutex::new(Vec::new()));
4690 let buffer2 = project
4691 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4692 .await
4693 .unwrap();
4694 buffer2.update(cx, |_, cx| {
4695 cx.subscribe(&buffer2, {
4696 let events = events.clone();
4697 move |_, _, event, _| match event {
4698 BufferEvent::Operation { .. } => {}
4699 _ => events.lock().push(event.clone()),
4700 }
4701 })
4702 .detach();
4703 });
4704
4705 fs.remove_file(path!("/dir/file2").as_ref(), Default::default())
4706 .await
4707 .unwrap();
4708 cx.executor().run_until_parked();
4709 buffer2.update(cx, |buffer, _| assert!(!buffer.is_dirty()));
4710 assert_eq!(
4711 mem::take(&mut *events.lock()),
4712 &[language::BufferEvent::FileHandleChanged]
4713 );
4714
4715 // Buffer becomes dirty when edited.
4716 buffer2.update(cx, |buffer, cx| {
4717 buffer.edit([(2..3, "")], None, cx);
4718 assert_eq!(buffer.is_dirty(), true);
4719 });
4720 assert_eq!(
4721 mem::take(&mut *events.lock()),
4722 &[
4723 language::BufferEvent::Edited,
4724 language::BufferEvent::DirtyChanged
4725 ]
4726 );
4727
4728 // Buffer becomes clean again when all of its content is removed, because
4729 // the file was deleted.
4730 buffer2.update(cx, |buffer, cx| {
4731 buffer.edit([(0..2, "")], None, cx);
4732 assert_eq!(buffer.is_empty(), true);
4733 assert_eq!(buffer.is_dirty(), false);
4734 });
4735 assert_eq!(
4736 *events.lock(),
4737 &[
4738 language::BufferEvent::Edited,
4739 language::BufferEvent::DirtyChanged
4740 ]
4741 );
4742
4743 // When a file is already dirty when deleted, we don't emit a Dirtied event.
4744 let events = Arc::new(Mutex::new(Vec::new()));
4745 let buffer3 = project
4746 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file3"), cx))
4747 .await
4748 .unwrap();
4749 buffer3.update(cx, |_, cx| {
4750 cx.subscribe(&buffer3, {
4751 let events = events.clone();
4752 move |_, _, event, _| match event {
4753 BufferEvent::Operation { .. } => {}
4754 _ => events.lock().push(event.clone()),
4755 }
4756 })
4757 .detach();
4758 });
4759
4760 buffer3.update(cx, |buffer, cx| {
4761 buffer.edit([(0..0, "x")], None, cx);
4762 });
4763 events.lock().clear();
4764 fs.remove_file(path!("/dir/file3").as_ref(), Default::default())
4765 .await
4766 .unwrap();
4767 cx.executor().run_until_parked();
4768 assert_eq!(*events.lock(), &[language::BufferEvent::FileHandleChanged]);
4769 cx.update(|cx| assert!(buffer3.read(cx).is_dirty()));
4770}
4771
4772#[gpui::test]
4773async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
4774 init_test(cx);
4775
4776 let (initial_contents, initial_offsets) =
4777 marked_text_offsets("one twoˇ\nthree ˇfourˇ five\nsixˇ seven\n");
4778 let fs = FakeFs::new(cx.executor());
4779 fs.insert_tree(
4780 path!("/dir"),
4781 json!({
4782 "the-file": initial_contents,
4783 }),
4784 )
4785 .await;
4786 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4787 let buffer = project
4788 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/the-file"), cx))
4789 .await
4790 .unwrap();
4791
4792 let anchors = initial_offsets
4793 .iter()
4794 .map(|offset| buffer.update(cx, |b, _| b.anchor_before(offset)))
4795 .collect::<Vec<_>>();
4796
4797 // Change the file on disk, adding two new lines of text, and removing
4798 // one line.
4799 buffer.update(cx, |buffer, _| {
4800 assert!(!buffer.is_dirty());
4801 assert!(!buffer.has_conflict());
4802 });
4803
4804 let (new_contents, new_offsets) =
4805 marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
4806 fs.save(
4807 path!("/dir/the-file").as_ref(),
4808 &Rope::from_str(new_contents.as_str(), cx.background_executor()),
4809 LineEnding::Unix,
4810 )
4811 .await
4812 .unwrap();
4813
4814 // Because the buffer was not modified, it is reloaded from disk. Its
4815 // contents are edited according to the diff between the old and new
4816 // file contents.
4817 cx.executor().run_until_parked();
4818 buffer.update(cx, |buffer, _| {
4819 assert_eq!(buffer.text(), new_contents);
4820 assert!(!buffer.is_dirty());
4821 assert!(!buffer.has_conflict());
4822
4823 let anchor_offsets = anchors
4824 .iter()
4825 .map(|anchor| anchor.to_offset(&*buffer))
4826 .collect::<Vec<_>>();
4827 assert_eq!(anchor_offsets, new_offsets);
4828 });
4829
4830 // Modify the buffer
4831 buffer.update(cx, |buffer, cx| {
4832 buffer.edit([(0..0, " ")], None, cx);
4833 assert!(buffer.is_dirty());
4834 assert!(!buffer.has_conflict());
4835 });
4836
4837 // Change the file on disk again, adding blank lines to the beginning.
4838 fs.save(
4839 path!("/dir/the-file").as_ref(),
4840 &Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
4841 LineEnding::Unix,
4842 )
4843 .await
4844 .unwrap();
4845
4846 // Because the buffer is modified, it doesn't reload from disk, but is
4847 // marked as having a conflict.
4848 cx.executor().run_until_parked();
4849 buffer.update(cx, |buffer, _| {
4850 assert_eq!(buffer.text(), " ".to_string() + &new_contents);
4851 assert!(buffer.has_conflict());
4852 });
4853}
4854
4855#[gpui::test]
4856async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
4857 init_test(cx);
4858
4859 let fs = FakeFs::new(cx.executor());
4860 fs.insert_tree(
4861 path!("/dir"),
4862 json!({
4863 "file1": "a\nb\nc\n",
4864 "file2": "one\r\ntwo\r\nthree\r\n",
4865 }),
4866 )
4867 .await;
4868
4869 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4870 let buffer1 = project
4871 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file1"), cx))
4872 .await
4873 .unwrap();
4874 let buffer2 = project
4875 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/file2"), cx))
4876 .await
4877 .unwrap();
4878
4879 buffer1.update(cx, |buffer, _| {
4880 assert_eq!(buffer.text(), "a\nb\nc\n");
4881 assert_eq!(buffer.line_ending(), LineEnding::Unix);
4882 });
4883 buffer2.update(cx, |buffer, _| {
4884 assert_eq!(buffer.text(), "one\ntwo\nthree\n");
4885 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4886 });
4887
4888 // Change a file's line endings on disk from unix to windows. The buffer's
4889 // state updates correctly.
4890 fs.save(
4891 path!("/dir/file1").as_ref(),
4892 &Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
4893 LineEnding::Windows,
4894 )
4895 .await
4896 .unwrap();
4897 cx.executor().run_until_parked();
4898 buffer1.update(cx, |buffer, _| {
4899 assert_eq!(buffer.text(), "aaa\nb\nc\n");
4900 assert_eq!(buffer.line_ending(), LineEnding::Windows);
4901 });
4902
4903 // Save a file with windows line endings. The file is written correctly.
4904 buffer2.update(cx, |buffer, cx| {
4905 buffer.set_text("one\ntwo\nthree\nfour\n", cx);
4906 });
4907 project
4908 .update(cx, |project, cx| project.save_buffer(buffer2, cx))
4909 .await
4910 .unwrap();
4911 assert_eq!(
4912 fs.load(path!("/dir/file2").as_ref()).await.unwrap(),
4913 "one\r\ntwo\r\nthree\r\nfour\r\n",
4914 );
4915}
4916
4917#[gpui::test]
4918async fn test_grouped_diagnostics(cx: &mut gpui::TestAppContext) {
4919 init_test(cx);
4920
4921 let fs = FakeFs::new(cx.executor());
4922 fs.insert_tree(
4923 path!("/dir"),
4924 json!({
4925 "a.rs": "
4926 fn foo(mut v: Vec<usize>) {
4927 for x in &v {
4928 v.push(1);
4929 }
4930 }
4931 "
4932 .unindent(),
4933 }),
4934 )
4935 .await;
4936
4937 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
4938 let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
4939 let buffer = project
4940 .update(cx, |p, cx| p.open_local_buffer(path!("/dir/a.rs"), cx))
4941 .await
4942 .unwrap();
4943
4944 let buffer_uri = Uri::from_file_path(path!("/dir/a.rs")).unwrap();
4945 let message = lsp::PublishDiagnosticsParams {
4946 uri: buffer_uri.clone(),
4947 diagnostics: vec![
4948 lsp::Diagnostic {
4949 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4950 severity: Some(DiagnosticSeverity::WARNING),
4951 message: "error 1".to_string(),
4952 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4953 location: lsp::Location {
4954 uri: buffer_uri.clone(),
4955 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4956 },
4957 message: "error 1 hint 1".to_string(),
4958 }]),
4959 ..Default::default()
4960 },
4961 lsp::Diagnostic {
4962 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4963 severity: Some(DiagnosticSeverity::HINT),
4964 message: "error 1 hint 1".to_string(),
4965 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
4966 location: lsp::Location {
4967 uri: buffer_uri.clone(),
4968 range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9)),
4969 },
4970 message: "original diagnostic".to_string(),
4971 }]),
4972 ..Default::default()
4973 },
4974 lsp::Diagnostic {
4975 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
4976 severity: Some(DiagnosticSeverity::ERROR),
4977 message: "error 2".to_string(),
4978 related_information: Some(vec![
4979 lsp::DiagnosticRelatedInformation {
4980 location: lsp::Location {
4981 uri: buffer_uri.clone(),
4982 range: lsp::Range::new(
4983 lsp::Position::new(1, 13),
4984 lsp::Position::new(1, 15),
4985 ),
4986 },
4987 message: "error 2 hint 1".to_string(),
4988 },
4989 lsp::DiagnosticRelatedInformation {
4990 location: lsp::Location {
4991 uri: buffer_uri.clone(),
4992 range: lsp::Range::new(
4993 lsp::Position::new(1, 13),
4994 lsp::Position::new(1, 15),
4995 ),
4996 },
4997 message: "error 2 hint 2".to_string(),
4998 },
4999 ]),
5000 ..Default::default()
5001 },
5002 lsp::Diagnostic {
5003 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5004 severity: Some(DiagnosticSeverity::HINT),
5005 message: "error 2 hint 1".to_string(),
5006 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5007 location: lsp::Location {
5008 uri: buffer_uri.clone(),
5009 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5010 },
5011 message: "original diagnostic".to_string(),
5012 }]),
5013 ..Default::default()
5014 },
5015 lsp::Diagnostic {
5016 range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 15)),
5017 severity: Some(DiagnosticSeverity::HINT),
5018 message: "error 2 hint 2".to_string(),
5019 related_information: Some(vec![lsp::DiagnosticRelatedInformation {
5020 location: lsp::Location {
5021 uri: buffer_uri,
5022 range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 17)),
5023 },
5024 message: "original diagnostic".to_string(),
5025 }]),
5026 ..Default::default()
5027 },
5028 ],
5029 version: None,
5030 };
5031
5032 lsp_store
5033 .update(cx, |lsp_store, cx| {
5034 lsp_store.update_diagnostics(
5035 LanguageServerId(0),
5036 message,
5037 None,
5038 DiagnosticSourceKind::Pushed,
5039 &[],
5040 cx,
5041 )
5042 })
5043 .unwrap();
5044 let buffer = buffer.update(cx, |buffer, _| buffer.snapshot());
5045
5046 assert_eq!(
5047 buffer
5048 .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
5049 .collect::<Vec<_>>(),
5050 &[
5051 DiagnosticEntry {
5052 range: Point::new(1, 8)..Point::new(1, 9),
5053 diagnostic: Diagnostic {
5054 severity: DiagnosticSeverity::WARNING,
5055 message: "error 1".to_string(),
5056 group_id: 1,
5057 is_primary: true,
5058 source_kind: DiagnosticSourceKind::Pushed,
5059 ..Diagnostic::default()
5060 }
5061 },
5062 DiagnosticEntry {
5063 range: Point::new(1, 8)..Point::new(1, 9),
5064 diagnostic: Diagnostic {
5065 severity: DiagnosticSeverity::HINT,
5066 message: "error 1 hint 1".to_string(),
5067 group_id: 1,
5068 is_primary: false,
5069 source_kind: DiagnosticSourceKind::Pushed,
5070 ..Diagnostic::default()
5071 }
5072 },
5073 DiagnosticEntry {
5074 range: Point::new(1, 13)..Point::new(1, 15),
5075 diagnostic: Diagnostic {
5076 severity: DiagnosticSeverity::HINT,
5077 message: "error 2 hint 1".to_string(),
5078 group_id: 0,
5079 is_primary: false,
5080 source_kind: DiagnosticSourceKind::Pushed,
5081 ..Diagnostic::default()
5082 }
5083 },
5084 DiagnosticEntry {
5085 range: Point::new(1, 13)..Point::new(1, 15),
5086 diagnostic: Diagnostic {
5087 severity: DiagnosticSeverity::HINT,
5088 message: "error 2 hint 2".to_string(),
5089 group_id: 0,
5090 is_primary: false,
5091 source_kind: DiagnosticSourceKind::Pushed,
5092 ..Diagnostic::default()
5093 }
5094 },
5095 DiagnosticEntry {
5096 range: Point::new(2, 8)..Point::new(2, 17),
5097 diagnostic: Diagnostic {
5098 severity: DiagnosticSeverity::ERROR,
5099 message: "error 2".to_string(),
5100 group_id: 0,
5101 is_primary: true,
5102 source_kind: DiagnosticSourceKind::Pushed,
5103 ..Diagnostic::default()
5104 }
5105 }
5106 ]
5107 );
5108
5109 assert_eq!(
5110 buffer.diagnostic_group::<Point>(0).collect::<Vec<_>>(),
5111 &[
5112 DiagnosticEntry {
5113 range: Point::new(1, 13)..Point::new(1, 15),
5114 diagnostic: Diagnostic {
5115 severity: DiagnosticSeverity::HINT,
5116 message: "error 2 hint 1".to_string(),
5117 group_id: 0,
5118 is_primary: false,
5119 source_kind: DiagnosticSourceKind::Pushed,
5120 ..Diagnostic::default()
5121 }
5122 },
5123 DiagnosticEntry {
5124 range: Point::new(1, 13)..Point::new(1, 15),
5125 diagnostic: Diagnostic {
5126 severity: DiagnosticSeverity::HINT,
5127 message: "error 2 hint 2".to_string(),
5128 group_id: 0,
5129 is_primary: false,
5130 source_kind: DiagnosticSourceKind::Pushed,
5131 ..Diagnostic::default()
5132 }
5133 },
5134 DiagnosticEntry {
5135 range: Point::new(2, 8)..Point::new(2, 17),
5136 diagnostic: Diagnostic {
5137 severity: DiagnosticSeverity::ERROR,
5138 message: "error 2".to_string(),
5139 group_id: 0,
5140 is_primary: true,
5141 source_kind: DiagnosticSourceKind::Pushed,
5142 ..Diagnostic::default()
5143 }
5144 }
5145 ]
5146 );
5147
5148 assert_eq!(
5149 buffer.diagnostic_group::<Point>(1).collect::<Vec<_>>(),
5150 &[
5151 DiagnosticEntry {
5152 range: Point::new(1, 8)..Point::new(1, 9),
5153 diagnostic: Diagnostic {
5154 severity: DiagnosticSeverity::WARNING,
5155 message: "error 1".to_string(),
5156 group_id: 1,
5157 is_primary: true,
5158 source_kind: DiagnosticSourceKind::Pushed,
5159 ..Diagnostic::default()
5160 }
5161 },
5162 DiagnosticEntry {
5163 range: Point::new(1, 8)..Point::new(1, 9),
5164 diagnostic: Diagnostic {
5165 severity: DiagnosticSeverity::HINT,
5166 message: "error 1 hint 1".to_string(),
5167 group_id: 1,
5168 is_primary: false,
5169 source_kind: DiagnosticSourceKind::Pushed,
5170 ..Diagnostic::default()
5171 }
5172 },
5173 ]
5174 );
5175}
5176
5177#[gpui::test]
5178async fn test_lsp_rename_notifications(cx: &mut gpui::TestAppContext) {
5179 init_test(cx);
5180
5181 let fs = FakeFs::new(cx.executor());
5182 fs.insert_tree(
5183 path!("/dir"),
5184 json!({
5185 "one.rs": "const ONE: usize = 1;",
5186 "two": {
5187 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5188 }
5189
5190 }),
5191 )
5192 .await;
5193 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5194
5195 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5196 language_registry.add(rust_lang());
5197 let watched_paths = lsp::FileOperationRegistrationOptions {
5198 filters: vec![
5199 FileOperationFilter {
5200 scheme: Some("file".to_owned()),
5201 pattern: lsp::FileOperationPattern {
5202 glob: "**/*.rs".to_owned(),
5203 matches: Some(lsp::FileOperationPatternKind::File),
5204 options: None,
5205 },
5206 },
5207 FileOperationFilter {
5208 scheme: Some("file".to_owned()),
5209 pattern: lsp::FileOperationPattern {
5210 glob: "**/**".to_owned(),
5211 matches: Some(lsp::FileOperationPatternKind::Folder),
5212 options: None,
5213 },
5214 },
5215 ],
5216 };
5217 let mut fake_servers = language_registry.register_fake_lsp(
5218 "Rust",
5219 FakeLspAdapter {
5220 capabilities: lsp::ServerCapabilities {
5221 workspace: Some(lsp::WorkspaceServerCapabilities {
5222 workspace_folders: None,
5223 file_operations: Some(lsp::WorkspaceFileOperationsServerCapabilities {
5224 did_rename: Some(watched_paths.clone()),
5225 will_rename: Some(watched_paths),
5226 ..Default::default()
5227 }),
5228 }),
5229 ..Default::default()
5230 },
5231 ..Default::default()
5232 },
5233 );
5234
5235 let _ = project
5236 .update(cx, |project, cx| {
5237 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5238 })
5239 .await
5240 .unwrap();
5241
5242 let fake_server = fake_servers.next().await.unwrap();
5243 let response = project.update(cx, |project, cx| {
5244 let worktree = project.worktrees(cx).next().unwrap();
5245 let entry = worktree
5246 .read(cx)
5247 .entry_for_path(rel_path("one.rs"))
5248 .unwrap();
5249 project.rename_entry(
5250 entry.id,
5251 (worktree.read(cx).id(), rel_path("three.rs")).into(),
5252 cx,
5253 )
5254 });
5255 let expected_edit = lsp::WorkspaceEdit {
5256 changes: None,
5257 document_changes: Some(DocumentChanges::Edits({
5258 vec![TextDocumentEdit {
5259 edits: vec![lsp::Edit::Plain(lsp::TextEdit {
5260 range: lsp::Range {
5261 start: lsp::Position {
5262 line: 0,
5263 character: 1,
5264 },
5265 end: lsp::Position {
5266 line: 0,
5267 character: 3,
5268 },
5269 },
5270 new_text: "This is not a drill".to_owned(),
5271 })],
5272 text_document: lsp::OptionalVersionedTextDocumentIdentifier {
5273 uri: Uri::from_str(uri!("file:///dir/two/two.rs")).unwrap(),
5274 version: Some(1337),
5275 },
5276 }]
5277 })),
5278 change_annotations: None,
5279 };
5280 let resolved_workspace_edit = Arc::new(OnceLock::new());
5281 fake_server
5282 .set_request_handler::<WillRenameFiles, _, _>({
5283 let resolved_workspace_edit = resolved_workspace_edit.clone();
5284 let expected_edit = expected_edit.clone();
5285 move |params, _| {
5286 let resolved_workspace_edit = resolved_workspace_edit.clone();
5287 let expected_edit = expected_edit.clone();
5288 async move {
5289 assert_eq!(params.files.len(), 1);
5290 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5291 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5292 resolved_workspace_edit.set(expected_edit.clone()).unwrap();
5293 Ok(Some(expected_edit))
5294 }
5295 }
5296 })
5297 .next()
5298 .await
5299 .unwrap();
5300 let _ = response.await.unwrap();
5301 fake_server
5302 .handle_notification::<DidRenameFiles, _>(|params, _| {
5303 assert_eq!(params.files.len(), 1);
5304 assert_eq!(params.files[0].old_uri, uri!("file:///dir/one.rs"));
5305 assert_eq!(params.files[0].new_uri, uri!("file:///dir/three.rs"));
5306 })
5307 .next()
5308 .await
5309 .unwrap();
5310 assert_eq!(resolved_workspace_edit.get(), Some(&expected_edit));
5311}
5312
5313#[gpui::test]
5314async fn test_rename(cx: &mut gpui::TestAppContext) {
5315 // hi
5316 init_test(cx);
5317
5318 let fs = FakeFs::new(cx.executor());
5319 fs.insert_tree(
5320 path!("/dir"),
5321 json!({
5322 "one.rs": "const ONE: usize = 1;",
5323 "two.rs": "const TWO: usize = one::ONE + one::ONE;"
5324 }),
5325 )
5326 .await;
5327
5328 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5329
5330 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
5331 language_registry.add(rust_lang());
5332 let mut fake_servers = language_registry.register_fake_lsp(
5333 "Rust",
5334 FakeLspAdapter {
5335 capabilities: lsp::ServerCapabilities {
5336 rename_provider: Some(lsp::OneOf::Right(lsp::RenameOptions {
5337 prepare_provider: Some(true),
5338 work_done_progress_options: Default::default(),
5339 })),
5340 ..Default::default()
5341 },
5342 ..Default::default()
5343 },
5344 );
5345
5346 let (buffer, _handle) = project
5347 .update(cx, |project, cx| {
5348 project.open_local_buffer_with_lsp(path!("/dir/one.rs"), cx)
5349 })
5350 .await
5351 .unwrap();
5352
5353 let fake_server = fake_servers.next().await.unwrap();
5354
5355 let response = project.update(cx, |project, cx| {
5356 project.prepare_rename(buffer.clone(), 7, cx)
5357 });
5358 fake_server
5359 .set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
5360 assert_eq!(
5361 params.text_document.uri.as_str(),
5362 uri!("file:///dir/one.rs")
5363 );
5364 assert_eq!(params.position, lsp::Position::new(0, 7));
5365 Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
5366 lsp::Position::new(0, 6),
5367 lsp::Position::new(0, 9),
5368 ))))
5369 })
5370 .next()
5371 .await
5372 .unwrap();
5373 let response = response.await.unwrap();
5374 let PrepareRenameResponse::Success(range) = response else {
5375 panic!("{:?}", response);
5376 };
5377 let range = buffer.update(cx, |buffer, _| range.to_offset(buffer));
5378 assert_eq!(range, 6..9);
5379
5380 let response = project.update(cx, |project, cx| {
5381 project.perform_rename(buffer.clone(), 7, "THREE".to_string(), cx)
5382 });
5383 fake_server
5384 .set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
5385 assert_eq!(
5386 params.text_document_position.text_document.uri.as_str(),
5387 uri!("file:///dir/one.rs")
5388 );
5389 assert_eq!(
5390 params.text_document_position.position,
5391 lsp::Position::new(0, 7)
5392 );
5393 assert_eq!(params.new_name, "THREE");
5394 Ok(Some(lsp::WorkspaceEdit {
5395 changes: Some(
5396 [
5397 (
5398 lsp::Uri::from_file_path(path!("/dir/one.rs")).unwrap(),
5399 vec![lsp::TextEdit::new(
5400 lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
5401 "THREE".to_string(),
5402 )],
5403 ),
5404 (
5405 lsp::Uri::from_file_path(path!("/dir/two.rs")).unwrap(),
5406 vec![
5407 lsp::TextEdit::new(
5408 lsp::Range::new(
5409 lsp::Position::new(0, 24),
5410 lsp::Position::new(0, 27),
5411 ),
5412 "THREE".to_string(),
5413 ),
5414 lsp::TextEdit::new(
5415 lsp::Range::new(
5416 lsp::Position::new(0, 35),
5417 lsp::Position::new(0, 38),
5418 ),
5419 "THREE".to_string(),
5420 ),
5421 ],
5422 ),
5423 ]
5424 .into_iter()
5425 .collect(),
5426 ),
5427 ..Default::default()
5428 }))
5429 })
5430 .next()
5431 .await
5432 .unwrap();
5433 let mut transaction = response.await.unwrap().0;
5434 assert_eq!(transaction.len(), 2);
5435 assert_eq!(
5436 transaction
5437 .remove_entry(&buffer)
5438 .unwrap()
5439 .0
5440 .update(cx, |buffer, _| buffer.text()),
5441 "const THREE: usize = 1;"
5442 );
5443 assert_eq!(
5444 transaction
5445 .into_keys()
5446 .next()
5447 .unwrap()
5448 .update(cx, |buffer, _| buffer.text()),
5449 "const TWO: usize = one::THREE + one::THREE;"
5450 );
5451}
5452
5453#[gpui::test]
5454async fn test_search(cx: &mut gpui::TestAppContext) {
5455 init_test(cx);
5456
5457 let fs = FakeFs::new(cx.executor());
5458 fs.insert_tree(
5459 path!("/dir"),
5460 json!({
5461 "one.rs": "const ONE: usize = 1;",
5462 "two.rs": "const TWO: usize = one::ONE + one::ONE;",
5463 "three.rs": "const THREE: usize = one::ONE + two::TWO;",
5464 "four.rs": "const FOUR: usize = one::ONE + three::THREE;",
5465 }),
5466 )
5467 .await;
5468 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5469 assert_eq!(
5470 search(
5471 &project,
5472 SearchQuery::text(
5473 "TWO",
5474 false,
5475 true,
5476 false,
5477 Default::default(),
5478 Default::default(),
5479 false,
5480 None
5481 )
5482 .unwrap(),
5483 cx
5484 )
5485 .await
5486 .unwrap(),
5487 HashMap::from_iter([
5488 (path!("dir/two.rs").to_string(), vec![6..9]),
5489 (path!("dir/three.rs").to_string(), vec![37..40])
5490 ])
5491 );
5492
5493 let buffer_4 = project
5494 .update(cx, |project, cx| {
5495 project.open_local_buffer(path!("/dir/four.rs"), cx)
5496 })
5497 .await
5498 .unwrap();
5499 buffer_4.update(cx, |buffer, cx| {
5500 let text = "two::TWO";
5501 buffer.edit([(20..28, text), (31..43, text)], None, cx);
5502 });
5503
5504 assert_eq!(
5505 search(
5506 &project,
5507 SearchQuery::text(
5508 "TWO",
5509 false,
5510 true,
5511 false,
5512 Default::default(),
5513 Default::default(),
5514 false,
5515 None,
5516 )
5517 .unwrap(),
5518 cx
5519 )
5520 .await
5521 .unwrap(),
5522 HashMap::from_iter([
5523 (path!("dir/two.rs").to_string(), vec![6..9]),
5524 (path!("dir/three.rs").to_string(), vec![37..40]),
5525 (path!("dir/four.rs").to_string(), vec![25..28, 36..39])
5526 ])
5527 );
5528}
5529
5530#[gpui::test]
5531async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
5532 init_test(cx);
5533
5534 let search_query = "file";
5535
5536 let fs = FakeFs::new(cx.executor());
5537 fs.insert_tree(
5538 path!("/dir"),
5539 json!({
5540 "one.rs": r#"// Rust file one"#,
5541 "one.ts": r#"// TypeScript file one"#,
5542 "two.rs": r#"// Rust file two"#,
5543 "two.ts": r#"// TypeScript file two"#,
5544 }),
5545 )
5546 .await;
5547 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5548
5549 assert!(
5550 search(
5551 &project,
5552 SearchQuery::text(
5553 search_query,
5554 false,
5555 true,
5556 false,
5557 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5558 Default::default(),
5559 false,
5560 None
5561 )
5562 .unwrap(),
5563 cx
5564 )
5565 .await
5566 .unwrap()
5567 .is_empty(),
5568 "If no inclusions match, no files should be returned"
5569 );
5570
5571 assert_eq!(
5572 search(
5573 &project,
5574 SearchQuery::text(
5575 search_query,
5576 false,
5577 true,
5578 false,
5579 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5580 Default::default(),
5581 false,
5582 None
5583 )
5584 .unwrap(),
5585 cx
5586 )
5587 .await
5588 .unwrap(),
5589 HashMap::from_iter([
5590 (path!("dir/one.rs").to_string(), vec![8..12]),
5591 (path!("dir/two.rs").to_string(), vec![8..12]),
5592 ]),
5593 "Rust only search should give only Rust files"
5594 );
5595
5596 assert_eq!(
5597 search(
5598 &project,
5599 SearchQuery::text(
5600 search_query,
5601 false,
5602 true,
5603 false,
5604 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5605 .unwrap(),
5606 Default::default(),
5607 false,
5608 None,
5609 )
5610 .unwrap(),
5611 cx
5612 )
5613 .await
5614 .unwrap(),
5615 HashMap::from_iter([
5616 (path!("dir/one.ts").to_string(), vec![14..18]),
5617 (path!("dir/two.ts").to_string(), vec![14..18]),
5618 ]),
5619 "TypeScript only search should give only TypeScript files, even if other inclusions don't match anything"
5620 );
5621
5622 assert_eq!(
5623 search(
5624 &project,
5625 SearchQuery::text(
5626 search_query,
5627 false,
5628 true,
5629 false,
5630 PathMatcher::new(
5631 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5632 PathStyle::local()
5633 )
5634 .unwrap(),
5635 Default::default(),
5636 false,
5637 None,
5638 )
5639 .unwrap(),
5640 cx
5641 )
5642 .await
5643 .unwrap(),
5644 HashMap::from_iter([
5645 (path!("dir/two.ts").to_string(), vec![14..18]),
5646 (path!("dir/one.rs").to_string(), vec![8..12]),
5647 (path!("dir/one.ts").to_string(), vec![14..18]),
5648 (path!("dir/two.rs").to_string(), vec![8..12]),
5649 ]),
5650 "Rust and typescript search should give both Rust and TypeScript files, even if other inclusions don't match anything"
5651 );
5652}
5653
5654#[gpui::test]
5655async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
5656 init_test(cx);
5657
5658 let search_query = "file";
5659
5660 let fs = FakeFs::new(cx.executor());
5661 fs.insert_tree(
5662 path!("/dir"),
5663 json!({
5664 "one.rs": r#"// Rust file one"#,
5665 "one.ts": r#"// TypeScript file one"#,
5666 "two.rs": r#"// Rust file two"#,
5667 "two.ts": r#"// TypeScript file two"#,
5668 }),
5669 )
5670 .await;
5671 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5672
5673 assert_eq!(
5674 search(
5675 &project,
5676 SearchQuery::text(
5677 search_query,
5678 false,
5679 true,
5680 false,
5681 Default::default(),
5682 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5683 false,
5684 None,
5685 )
5686 .unwrap(),
5687 cx
5688 )
5689 .await
5690 .unwrap(),
5691 HashMap::from_iter([
5692 (path!("dir/one.rs").to_string(), vec![8..12]),
5693 (path!("dir/one.ts").to_string(), vec![14..18]),
5694 (path!("dir/two.rs").to_string(), vec![8..12]),
5695 (path!("dir/two.ts").to_string(), vec![14..18]),
5696 ]),
5697 "If no exclusions match, all files should be returned"
5698 );
5699
5700 assert_eq!(
5701 search(
5702 &project,
5703 SearchQuery::text(
5704 search_query,
5705 false,
5706 true,
5707 false,
5708 Default::default(),
5709 PathMatcher::new(&["*.rs".to_owned()], PathStyle::local()).unwrap(),
5710 false,
5711 None,
5712 )
5713 .unwrap(),
5714 cx
5715 )
5716 .await
5717 .unwrap(),
5718 HashMap::from_iter([
5719 (path!("dir/one.ts").to_string(), vec![14..18]),
5720 (path!("dir/two.ts").to_string(), vec![14..18]),
5721 ]),
5722 "Rust exclusion search should give only TypeScript files"
5723 );
5724
5725 assert_eq!(
5726 search(
5727 &project,
5728 SearchQuery::text(
5729 search_query,
5730 false,
5731 true,
5732 false,
5733 Default::default(),
5734 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5735 .unwrap(),
5736 false,
5737 None,
5738 )
5739 .unwrap(),
5740 cx
5741 )
5742 .await
5743 .unwrap(),
5744 HashMap::from_iter([
5745 (path!("dir/one.rs").to_string(), vec![8..12]),
5746 (path!("dir/two.rs").to_string(), vec![8..12]),
5747 ]),
5748 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5749 );
5750
5751 assert!(
5752 search(
5753 &project,
5754 SearchQuery::text(
5755 search_query,
5756 false,
5757 true,
5758 false,
5759 Default::default(),
5760 PathMatcher::new(
5761 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5762 PathStyle::local(),
5763 )
5764 .unwrap(),
5765 false,
5766 None,
5767 )
5768 .unwrap(),
5769 cx
5770 )
5771 .await
5772 .unwrap()
5773 .is_empty(),
5774 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5775 );
5776}
5777
5778#[gpui::test]
5779async fn test_search_with_buffer_exclusions(cx: &mut gpui::TestAppContext) {
5780 init_test(cx);
5781
5782 let search_query = "file";
5783
5784 let fs = FakeFs::new(cx.executor());
5785 fs.insert_tree(
5786 path!("/dir"),
5787 json!({
5788 "one.rs": r#"// Rust file one"#,
5789 "one.ts": r#"// TypeScript file one"#,
5790 "two.rs": r#"// Rust file two"#,
5791 "two.ts": r#"// TypeScript file two"#,
5792 }),
5793 )
5794 .await;
5795
5796 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5797 let path_style = PathStyle::local();
5798 let _buffer = project.update(cx, |project, cx| {
5799 project.create_local_buffer("file", None, false, cx)
5800 });
5801
5802 assert_eq!(
5803 search(
5804 &project,
5805 SearchQuery::text(
5806 search_query,
5807 false,
5808 true,
5809 false,
5810 Default::default(),
5811 PathMatcher::new(&["*.odd".to_owned()], path_style).unwrap(),
5812 false,
5813 None,
5814 )
5815 .unwrap(),
5816 cx
5817 )
5818 .await
5819 .unwrap(),
5820 HashMap::from_iter([
5821 (path!("dir/one.rs").to_string(), vec![8..12]),
5822 (path!("dir/one.ts").to_string(), vec![14..18]),
5823 (path!("dir/two.rs").to_string(), vec![8..12]),
5824 (path!("dir/two.ts").to_string(), vec![14..18]),
5825 ]),
5826 "If no exclusions match, all files should be returned"
5827 );
5828
5829 assert_eq!(
5830 search(
5831 &project,
5832 SearchQuery::text(
5833 search_query,
5834 false,
5835 true,
5836 false,
5837 Default::default(),
5838 PathMatcher::new(&["*.rs".to_owned()], path_style).unwrap(),
5839 false,
5840 None,
5841 )
5842 .unwrap(),
5843 cx
5844 )
5845 .await
5846 .unwrap(),
5847 HashMap::from_iter([
5848 (path!("dir/one.ts").to_string(), vec![14..18]),
5849 (path!("dir/two.ts").to_string(), vec![14..18]),
5850 ]),
5851 "Rust exclusion search should give only TypeScript files"
5852 );
5853
5854 assert_eq!(
5855 search(
5856 &project,
5857 SearchQuery::text(
5858 search_query,
5859 false,
5860 true,
5861 false,
5862 Default::default(),
5863 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], path_style).unwrap(),
5864 false,
5865 None,
5866 )
5867 .unwrap(),
5868 cx
5869 )
5870 .await
5871 .unwrap(),
5872 HashMap::from_iter([
5873 (path!("dir/one.rs").to_string(), vec![8..12]),
5874 (path!("dir/two.rs").to_string(), vec![8..12]),
5875 ]),
5876 "TypeScript exclusion search should give only Rust files, even if other exclusions don't match anything"
5877 );
5878
5879 assert!(
5880 search(
5881 &project,
5882 SearchQuery::text(
5883 search_query,
5884 false,
5885 true,
5886 false,
5887 Default::default(),
5888 PathMatcher::new(
5889 &["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()],
5890 PathStyle::local(),
5891 )
5892 .unwrap(),
5893 false,
5894 None,
5895 )
5896 .unwrap(),
5897 cx
5898 )
5899 .await
5900 .unwrap()
5901 .is_empty(),
5902 "Rust and typescript exclusion should give no files, even if other exclusions don't match anything"
5903 );
5904}
5905
5906#[gpui::test]
5907async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContext) {
5908 init_test(cx);
5909
5910 let search_query = "file";
5911
5912 let fs = FakeFs::new(cx.executor());
5913 fs.insert_tree(
5914 path!("/dir"),
5915 json!({
5916 "one.rs": r#"// Rust file one"#,
5917 "one.ts": r#"// TypeScript file one"#,
5918 "two.rs": r#"// Rust file two"#,
5919 "two.ts": r#"// TypeScript file two"#,
5920 }),
5921 )
5922 .await;
5923 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
5924 assert!(
5925 search(
5926 &project,
5927 SearchQuery::text(
5928 search_query,
5929 false,
5930 true,
5931 false,
5932 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5933 PathMatcher::new(&["*.odd".to_owned()], PathStyle::local()).unwrap(),
5934 false,
5935 None,
5936 )
5937 .unwrap(),
5938 cx
5939 )
5940 .await
5941 .unwrap()
5942 .is_empty(),
5943 "If both no exclusions and inclusions match, exclusions should win and return nothing"
5944 );
5945
5946 assert!(
5947 search(
5948 &project,
5949 SearchQuery::text(
5950 search_query,
5951 false,
5952 true,
5953 false,
5954 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5955 PathMatcher::new(&["*.ts".to_owned()], PathStyle::local()).unwrap(),
5956 false,
5957 None,
5958 )
5959 .unwrap(),
5960 cx
5961 )
5962 .await
5963 .unwrap()
5964 .is_empty(),
5965 "If both TypeScript exclusions and inclusions match, exclusions should win and return nothing files."
5966 );
5967
5968 assert!(
5969 search(
5970 &project,
5971 SearchQuery::text(
5972 search_query,
5973 false,
5974 true,
5975 false,
5976 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5977 .unwrap(),
5978 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
5979 .unwrap(),
5980 false,
5981 None,
5982 )
5983 .unwrap(),
5984 cx
5985 )
5986 .await
5987 .unwrap()
5988 .is_empty(),
5989 "Non-matching inclusions and exclusions should not change that."
5990 );
5991
5992 assert_eq!(
5993 search(
5994 &project,
5995 SearchQuery::text(
5996 search_query,
5997 false,
5998 true,
5999 false,
6000 PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()], PathStyle::local())
6001 .unwrap(),
6002 PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()], PathStyle::local())
6003 .unwrap(),
6004 false,
6005 None,
6006 )
6007 .unwrap(),
6008 cx
6009 )
6010 .await
6011 .unwrap(),
6012 HashMap::from_iter([
6013 (path!("dir/one.ts").to_string(), vec![14..18]),
6014 (path!("dir/two.ts").to_string(), vec![14..18]),
6015 ]),
6016 "Non-intersecting TypeScript inclusions and Rust exclusions should return TypeScript files"
6017 );
6018}
6019
6020#[gpui::test]
6021async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppContext) {
6022 init_test(cx);
6023
6024 let fs = FakeFs::new(cx.executor());
6025 fs.insert_tree(
6026 path!("/worktree-a"),
6027 json!({
6028 "haystack.rs": r#"// NEEDLE"#,
6029 "haystack.ts": r#"// NEEDLE"#,
6030 }),
6031 )
6032 .await;
6033 fs.insert_tree(
6034 path!("/worktree-b"),
6035 json!({
6036 "haystack.rs": r#"// NEEDLE"#,
6037 "haystack.ts": r#"// NEEDLE"#,
6038 }),
6039 )
6040 .await;
6041
6042 let path_style = PathStyle::local();
6043 let project = Project::test(
6044 fs.clone(),
6045 [path!("/worktree-a").as_ref(), path!("/worktree-b").as_ref()],
6046 cx,
6047 )
6048 .await;
6049
6050 assert_eq!(
6051 search(
6052 &project,
6053 SearchQuery::text(
6054 "NEEDLE",
6055 false,
6056 true,
6057 false,
6058 PathMatcher::new(&["worktree-a/*.rs".to_owned()], path_style).unwrap(),
6059 Default::default(),
6060 true,
6061 None,
6062 )
6063 .unwrap(),
6064 cx
6065 )
6066 .await
6067 .unwrap(),
6068 HashMap::from_iter([(path!("worktree-a/haystack.rs").to_string(), vec![3..9])]),
6069 "should only return results from included worktree"
6070 );
6071 assert_eq!(
6072 search(
6073 &project,
6074 SearchQuery::text(
6075 "NEEDLE",
6076 false,
6077 true,
6078 false,
6079 PathMatcher::new(&["worktree-b/*.rs".to_owned()], path_style).unwrap(),
6080 Default::default(),
6081 true,
6082 None,
6083 )
6084 .unwrap(),
6085 cx
6086 )
6087 .await
6088 .unwrap(),
6089 HashMap::from_iter([(path!("worktree-b/haystack.rs").to_string(), vec![3..9])]),
6090 "should only return results from included worktree"
6091 );
6092
6093 assert_eq!(
6094 search(
6095 &project,
6096 SearchQuery::text(
6097 "NEEDLE",
6098 false,
6099 true,
6100 false,
6101 PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap(),
6102 Default::default(),
6103 false,
6104 None,
6105 )
6106 .unwrap(),
6107 cx
6108 )
6109 .await
6110 .unwrap(),
6111 HashMap::from_iter([
6112 (path!("worktree-a/haystack.ts").to_string(), vec![3..9]),
6113 (path!("worktree-b/haystack.ts").to_string(), vec![3..9])
6114 ]),
6115 "should return results from both worktrees"
6116 );
6117}
6118
6119#[gpui::test]
6120async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
6121 init_test(cx);
6122
6123 let fs = FakeFs::new(cx.background_executor.clone());
6124 fs.insert_tree(
6125 path!("/dir"),
6126 json!({
6127 ".git": {},
6128 ".gitignore": "**/target\n/node_modules\n",
6129 "target": {
6130 "index.txt": "index_key:index_value"
6131 },
6132 "node_modules": {
6133 "eslint": {
6134 "index.ts": "const eslint_key = 'eslint value'",
6135 "package.json": r#"{ "some_key": "some value" }"#,
6136 },
6137 "prettier": {
6138 "index.ts": "const prettier_key = 'prettier value'",
6139 "package.json": r#"{ "other_key": "other value" }"#,
6140 },
6141 },
6142 "package.json": r#"{ "main_key": "main value" }"#,
6143 }),
6144 )
6145 .await;
6146 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6147
6148 let query = "key";
6149 assert_eq!(
6150 search(
6151 &project,
6152 SearchQuery::text(
6153 query,
6154 false,
6155 false,
6156 false,
6157 Default::default(),
6158 Default::default(),
6159 false,
6160 None,
6161 )
6162 .unwrap(),
6163 cx
6164 )
6165 .await
6166 .unwrap(),
6167 HashMap::from_iter([(path!("dir/package.json").to_string(), vec![8..11])]),
6168 "Only one non-ignored file should have the query"
6169 );
6170
6171 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6172 let path_style = PathStyle::local();
6173 assert_eq!(
6174 search(
6175 &project,
6176 SearchQuery::text(
6177 query,
6178 false,
6179 false,
6180 true,
6181 Default::default(),
6182 Default::default(),
6183 false,
6184 None,
6185 )
6186 .unwrap(),
6187 cx
6188 )
6189 .await
6190 .unwrap(),
6191 HashMap::from_iter([
6192 (path!("dir/package.json").to_string(), vec![8..11]),
6193 (path!("dir/target/index.txt").to_string(), vec![6..9]),
6194 (
6195 path!("dir/node_modules/prettier/package.json").to_string(),
6196 vec![9..12]
6197 ),
6198 (
6199 path!("dir/node_modules/prettier/index.ts").to_string(),
6200 vec![15..18]
6201 ),
6202 (
6203 path!("dir/node_modules/eslint/index.ts").to_string(),
6204 vec![13..16]
6205 ),
6206 (
6207 path!("dir/node_modules/eslint/package.json").to_string(),
6208 vec![8..11]
6209 ),
6210 ]),
6211 "Unrestricted search with ignored directories should find every file with the query"
6212 );
6213
6214 let files_to_include =
6215 PathMatcher::new(&["node_modules/prettier/**".to_owned()], path_style).unwrap();
6216 let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()], path_style).unwrap();
6217 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6218 assert_eq!(
6219 search(
6220 &project,
6221 SearchQuery::text(
6222 query,
6223 false,
6224 false,
6225 true,
6226 files_to_include,
6227 files_to_exclude,
6228 false,
6229 None,
6230 )
6231 .unwrap(),
6232 cx
6233 )
6234 .await
6235 .unwrap(),
6236 HashMap::from_iter([(
6237 path!("dir/node_modules/prettier/package.json").to_string(),
6238 vec![9..12]
6239 )]),
6240 "With search including ignored prettier directory and excluding TS files, only one file should be found"
6241 );
6242}
6243
6244#[gpui::test]
6245async fn test_search_with_unicode(cx: &mut gpui::TestAppContext) {
6246 init_test(cx);
6247
6248 let fs = FakeFs::new(cx.executor());
6249 fs.insert_tree(
6250 path!("/dir"),
6251 json!({
6252 "one.rs": "// ПРИВЕТ? привет!",
6253 "two.rs": "// ПРИВЕТ.",
6254 "three.rs": "// привет",
6255 }),
6256 )
6257 .await;
6258 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
6259 let unicode_case_sensitive_query = SearchQuery::text(
6260 "привет",
6261 false,
6262 true,
6263 false,
6264 Default::default(),
6265 Default::default(),
6266 false,
6267 None,
6268 );
6269 assert_matches!(unicode_case_sensitive_query, Ok(SearchQuery::Text { .. }));
6270 assert_eq!(
6271 search(&project, unicode_case_sensitive_query.unwrap(), cx)
6272 .await
6273 .unwrap(),
6274 HashMap::from_iter([
6275 (path!("dir/one.rs").to_string(), vec![17..29]),
6276 (path!("dir/three.rs").to_string(), vec![3..15]),
6277 ])
6278 );
6279
6280 let unicode_case_insensitive_query = SearchQuery::text(
6281 "привет",
6282 false,
6283 false,
6284 false,
6285 Default::default(),
6286 Default::default(),
6287 false,
6288 None,
6289 );
6290 assert_matches!(
6291 unicode_case_insensitive_query,
6292 Ok(SearchQuery::Regex { .. })
6293 );
6294 assert_eq!(
6295 search(&project, unicode_case_insensitive_query.unwrap(), cx)
6296 .await
6297 .unwrap(),
6298 HashMap::from_iter([
6299 (path!("dir/one.rs").to_string(), vec![3..15, 17..29]),
6300 (path!("dir/two.rs").to_string(), vec![3..15]),
6301 (path!("dir/three.rs").to_string(), vec![3..15]),
6302 ])
6303 );
6304
6305 assert_eq!(
6306 search(
6307 &project,
6308 SearchQuery::text(
6309 "привет.",
6310 false,
6311 false,
6312 false,
6313 Default::default(),
6314 Default::default(),
6315 false,
6316 None,
6317 )
6318 .unwrap(),
6319 cx
6320 )
6321 .await
6322 .unwrap(),
6323 HashMap::from_iter([(path!("dir/two.rs").to_string(), vec![3..16]),])
6324 );
6325}
6326
6327#[gpui::test]
6328async fn test_create_entry(cx: &mut gpui::TestAppContext) {
6329 init_test(cx);
6330
6331 let fs = FakeFs::new(cx.executor());
6332 fs.insert_tree(
6333 "/one/two",
6334 json!({
6335 "three": {
6336 "a.txt": "",
6337 "four": {}
6338 },
6339 "c.rs": ""
6340 }),
6341 )
6342 .await;
6343
6344 let project = Project::test(fs.clone(), ["/one/two/three".as_ref()], cx).await;
6345 project
6346 .update(cx, |project, cx| {
6347 let id = project.worktrees(cx).next().unwrap().read(cx).id();
6348 project.create_entry((id, rel_path("b..")), true, cx)
6349 })
6350 .await
6351 .unwrap()
6352 .into_included()
6353 .unwrap();
6354
6355 assert_eq!(
6356 fs.paths(true),
6357 vec![
6358 PathBuf::from(path!("/")),
6359 PathBuf::from(path!("/one")),
6360 PathBuf::from(path!("/one/two")),
6361 PathBuf::from(path!("/one/two/c.rs")),
6362 PathBuf::from(path!("/one/two/three")),
6363 PathBuf::from(path!("/one/two/three/a.txt")),
6364 PathBuf::from(path!("/one/two/three/b..")),
6365 PathBuf::from(path!("/one/two/three/four")),
6366 ]
6367 );
6368}
6369
6370#[gpui::test]
6371async fn test_multiple_language_server_hovers(cx: &mut gpui::TestAppContext) {
6372 init_test(cx);
6373
6374 let fs = FakeFs::new(cx.executor());
6375 fs.insert_tree(
6376 path!("/dir"),
6377 json!({
6378 "a.tsx": "a",
6379 }),
6380 )
6381 .await;
6382
6383 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6384
6385 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6386 language_registry.add(tsx_lang());
6387 let language_server_names = [
6388 "TypeScriptServer",
6389 "TailwindServer",
6390 "ESLintServer",
6391 "NoHoverCapabilitiesServer",
6392 ];
6393 let mut language_servers = [
6394 language_registry.register_fake_lsp(
6395 "tsx",
6396 FakeLspAdapter {
6397 name: language_server_names[0],
6398 capabilities: lsp::ServerCapabilities {
6399 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6400 ..lsp::ServerCapabilities::default()
6401 },
6402 ..FakeLspAdapter::default()
6403 },
6404 ),
6405 language_registry.register_fake_lsp(
6406 "tsx",
6407 FakeLspAdapter {
6408 name: language_server_names[1],
6409 capabilities: lsp::ServerCapabilities {
6410 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6411 ..lsp::ServerCapabilities::default()
6412 },
6413 ..FakeLspAdapter::default()
6414 },
6415 ),
6416 language_registry.register_fake_lsp(
6417 "tsx",
6418 FakeLspAdapter {
6419 name: language_server_names[2],
6420 capabilities: lsp::ServerCapabilities {
6421 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6422 ..lsp::ServerCapabilities::default()
6423 },
6424 ..FakeLspAdapter::default()
6425 },
6426 ),
6427 language_registry.register_fake_lsp(
6428 "tsx",
6429 FakeLspAdapter {
6430 name: language_server_names[3],
6431 capabilities: lsp::ServerCapabilities {
6432 hover_provider: None,
6433 ..lsp::ServerCapabilities::default()
6434 },
6435 ..FakeLspAdapter::default()
6436 },
6437 ),
6438 ];
6439
6440 let (buffer, _handle) = project
6441 .update(cx, |p, cx| {
6442 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6443 })
6444 .await
6445 .unwrap();
6446 cx.executor().run_until_parked();
6447
6448 let mut servers_with_hover_requests = HashMap::default();
6449 for i in 0..language_server_names.len() {
6450 let new_server = language_servers[i].next().await.unwrap_or_else(|| {
6451 panic!(
6452 "Failed to get language server #{i} with name {}",
6453 &language_server_names[i]
6454 )
6455 });
6456 let new_server_name = new_server.server.name();
6457 assert!(
6458 !servers_with_hover_requests.contains_key(&new_server_name),
6459 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6460 );
6461 match new_server_name.as_ref() {
6462 "TailwindServer" | "TypeScriptServer" => {
6463 servers_with_hover_requests.insert(
6464 new_server_name.clone(),
6465 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6466 move |_, _| {
6467 let name = new_server_name.clone();
6468 async move {
6469 Ok(Some(lsp::Hover {
6470 contents: lsp::HoverContents::Scalar(
6471 lsp::MarkedString::String(format!("{name} hover")),
6472 ),
6473 range: None,
6474 }))
6475 }
6476 },
6477 ),
6478 );
6479 }
6480 "ESLintServer" => {
6481 servers_with_hover_requests.insert(
6482 new_server_name,
6483 new_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6484 |_, _| async move { Ok(None) },
6485 ),
6486 );
6487 }
6488 "NoHoverCapabilitiesServer" => {
6489 let _never_handled = new_server
6490 .set_request_handler::<lsp::request::HoverRequest, _, _>(|_, _| async move {
6491 panic!(
6492 "Should not call for hovers server with no corresponding capabilities"
6493 )
6494 });
6495 }
6496 unexpected => panic!("Unexpected server name: {unexpected}"),
6497 }
6498 }
6499
6500 let hover_task = project.update(cx, |project, cx| {
6501 project.hover(&buffer, Point::new(0, 0), cx)
6502 });
6503 let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
6504 |mut hover_request| async move {
6505 hover_request
6506 .next()
6507 .await
6508 .expect("All hover requests should have been triggered")
6509 },
6510 ))
6511 .await;
6512 assert_eq!(
6513 vec!["TailwindServer hover", "TypeScriptServer hover"],
6514 hover_task
6515 .await
6516 .into_iter()
6517 .flatten()
6518 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6519 .sorted()
6520 .collect::<Vec<_>>(),
6521 "Should receive hover responses from all related servers with hover capabilities"
6522 );
6523}
6524
6525#[gpui::test]
6526async fn test_hovers_with_empty_parts(cx: &mut gpui::TestAppContext) {
6527 init_test(cx);
6528
6529 let fs = FakeFs::new(cx.executor());
6530 fs.insert_tree(
6531 path!("/dir"),
6532 json!({
6533 "a.ts": "a",
6534 }),
6535 )
6536 .await;
6537
6538 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6539
6540 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6541 language_registry.add(typescript_lang());
6542 let mut fake_language_servers = language_registry.register_fake_lsp(
6543 "TypeScript",
6544 FakeLspAdapter {
6545 capabilities: lsp::ServerCapabilities {
6546 hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
6547 ..lsp::ServerCapabilities::default()
6548 },
6549 ..FakeLspAdapter::default()
6550 },
6551 );
6552
6553 let (buffer, _handle) = project
6554 .update(cx, |p, cx| {
6555 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6556 })
6557 .await
6558 .unwrap();
6559 cx.executor().run_until_parked();
6560
6561 let fake_server = fake_language_servers
6562 .next()
6563 .await
6564 .expect("failed to get the language server");
6565
6566 let mut request_handled = fake_server.set_request_handler::<lsp::request::HoverRequest, _, _>(
6567 move |_, _| async move {
6568 Ok(Some(lsp::Hover {
6569 contents: lsp::HoverContents::Array(vec![
6570 lsp::MarkedString::String("".to_string()),
6571 lsp::MarkedString::String(" ".to_string()),
6572 lsp::MarkedString::String("\n\n\n".to_string()),
6573 ]),
6574 range: None,
6575 }))
6576 },
6577 );
6578
6579 let hover_task = project.update(cx, |project, cx| {
6580 project.hover(&buffer, Point::new(0, 0), cx)
6581 });
6582 let () = request_handled
6583 .next()
6584 .await
6585 .expect("All hover requests should have been triggered");
6586 assert_eq!(
6587 Vec::<String>::new(),
6588 hover_task
6589 .await
6590 .into_iter()
6591 .flatten()
6592 .map(|hover| hover.contents.iter().map(|block| &block.text).join("|"))
6593 .sorted()
6594 .collect::<Vec<_>>(),
6595 "Empty hover parts should be ignored"
6596 );
6597}
6598
6599#[gpui::test]
6600async fn test_code_actions_only_kinds(cx: &mut gpui::TestAppContext) {
6601 init_test(cx);
6602
6603 let fs = FakeFs::new(cx.executor());
6604 fs.insert_tree(
6605 path!("/dir"),
6606 json!({
6607 "a.ts": "a",
6608 }),
6609 )
6610 .await;
6611
6612 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6613
6614 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6615 language_registry.add(typescript_lang());
6616 let mut fake_language_servers = language_registry.register_fake_lsp(
6617 "TypeScript",
6618 FakeLspAdapter {
6619 capabilities: lsp::ServerCapabilities {
6620 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6621 ..lsp::ServerCapabilities::default()
6622 },
6623 ..FakeLspAdapter::default()
6624 },
6625 );
6626
6627 let (buffer, _handle) = project
6628 .update(cx, |p, cx| {
6629 p.open_local_buffer_with_lsp(path!("/dir/a.ts"), cx)
6630 })
6631 .await
6632 .unwrap();
6633 cx.executor().run_until_parked();
6634
6635 let fake_server = fake_language_servers
6636 .next()
6637 .await
6638 .expect("failed to get the language server");
6639
6640 let mut request_handled = fake_server
6641 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(move |_, _| async move {
6642 Ok(Some(vec![
6643 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6644 title: "organize imports".to_string(),
6645 kind: Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS),
6646 ..lsp::CodeAction::default()
6647 }),
6648 lsp::CodeActionOrCommand::CodeAction(lsp::CodeAction {
6649 title: "fix code".to_string(),
6650 kind: Some(CodeActionKind::SOURCE_FIX_ALL),
6651 ..lsp::CodeAction::default()
6652 }),
6653 ]))
6654 });
6655
6656 let code_actions_task = project.update(cx, |project, cx| {
6657 project.code_actions(
6658 &buffer,
6659 0..buffer.read(cx).len(),
6660 Some(vec![CodeActionKind::SOURCE_ORGANIZE_IMPORTS]),
6661 cx,
6662 )
6663 });
6664
6665 let () = request_handled
6666 .next()
6667 .await
6668 .expect("The code action request should have been triggered");
6669
6670 let code_actions = code_actions_task.await.unwrap().unwrap();
6671 assert_eq!(code_actions.len(), 1);
6672 assert_eq!(
6673 code_actions[0].lsp_action.action_kind(),
6674 Some(CodeActionKind::SOURCE_ORGANIZE_IMPORTS)
6675 );
6676}
6677
6678#[gpui::test]
6679async fn test_multiple_language_server_actions(cx: &mut gpui::TestAppContext) {
6680 init_test(cx);
6681
6682 let fs = FakeFs::new(cx.executor());
6683 fs.insert_tree(
6684 path!("/dir"),
6685 json!({
6686 "a.tsx": "a",
6687 }),
6688 )
6689 .await;
6690
6691 let project = Project::test(fs, [path!("/dir").as_ref()], cx).await;
6692
6693 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
6694 language_registry.add(tsx_lang());
6695 let language_server_names = [
6696 "TypeScriptServer",
6697 "TailwindServer",
6698 "ESLintServer",
6699 "NoActionsCapabilitiesServer",
6700 ];
6701
6702 let mut language_server_rxs = [
6703 language_registry.register_fake_lsp(
6704 "tsx",
6705 FakeLspAdapter {
6706 name: language_server_names[0],
6707 capabilities: lsp::ServerCapabilities {
6708 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6709 ..lsp::ServerCapabilities::default()
6710 },
6711 ..FakeLspAdapter::default()
6712 },
6713 ),
6714 language_registry.register_fake_lsp(
6715 "tsx",
6716 FakeLspAdapter {
6717 name: language_server_names[1],
6718 capabilities: lsp::ServerCapabilities {
6719 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6720 ..lsp::ServerCapabilities::default()
6721 },
6722 ..FakeLspAdapter::default()
6723 },
6724 ),
6725 language_registry.register_fake_lsp(
6726 "tsx",
6727 FakeLspAdapter {
6728 name: language_server_names[2],
6729 capabilities: lsp::ServerCapabilities {
6730 code_action_provider: Some(lsp::CodeActionProviderCapability::Simple(true)),
6731 ..lsp::ServerCapabilities::default()
6732 },
6733 ..FakeLspAdapter::default()
6734 },
6735 ),
6736 language_registry.register_fake_lsp(
6737 "tsx",
6738 FakeLspAdapter {
6739 name: language_server_names[3],
6740 capabilities: lsp::ServerCapabilities {
6741 code_action_provider: None,
6742 ..lsp::ServerCapabilities::default()
6743 },
6744 ..FakeLspAdapter::default()
6745 },
6746 ),
6747 ];
6748
6749 let (buffer, _handle) = project
6750 .update(cx, |p, cx| {
6751 p.open_local_buffer_with_lsp(path!("/dir/a.tsx"), cx)
6752 })
6753 .await
6754 .unwrap();
6755 cx.executor().run_until_parked();
6756
6757 let mut servers_with_actions_requests = HashMap::default();
6758 for i in 0..language_server_names.len() {
6759 let new_server = language_server_rxs[i].next().await.unwrap_or_else(|| {
6760 panic!(
6761 "Failed to get language server #{i} with name {}",
6762 &language_server_names[i]
6763 )
6764 });
6765 let new_server_name = new_server.server.name();
6766
6767 assert!(
6768 !servers_with_actions_requests.contains_key(&new_server_name),
6769 "Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
6770 );
6771 match new_server_name.0.as_ref() {
6772 "TailwindServer" | "TypeScriptServer" => {
6773 servers_with_actions_requests.insert(
6774 new_server_name.clone(),
6775 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6776 move |_, _| {
6777 let name = new_server_name.clone();
6778 async move {
6779 Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
6780 lsp::CodeAction {
6781 title: format!("{name} code action"),
6782 ..lsp::CodeAction::default()
6783 },
6784 )]))
6785 }
6786 },
6787 ),
6788 );
6789 }
6790 "ESLintServer" => {
6791 servers_with_actions_requests.insert(
6792 new_server_name,
6793 new_server.set_request_handler::<lsp::request::CodeActionRequest, _, _>(
6794 |_, _| async move { Ok(None) },
6795 ),
6796 );
6797 }
6798 "NoActionsCapabilitiesServer" => {
6799 let _never_handled = new_server
6800 .set_request_handler::<lsp::request::CodeActionRequest, _, _>(|_, _| async move {
6801 panic!(
6802 "Should not call for code actions server with no corresponding capabilities"
6803 )
6804 });
6805 }
6806 unexpected => panic!("Unexpected server name: {unexpected}"),
6807 }
6808 }
6809
6810 let code_actions_task = project.update(cx, |project, cx| {
6811 project.code_actions(&buffer, 0..buffer.read(cx).len(), None, cx)
6812 });
6813
6814 // cx.run_until_parked();
6815 let _: Vec<()> = futures::future::join_all(servers_with_actions_requests.into_values().map(
6816 |mut code_actions_request| async move {
6817 code_actions_request
6818 .next()
6819 .await
6820 .expect("All code actions requests should have been triggered")
6821 },
6822 ))
6823 .await;
6824 assert_eq!(
6825 vec!["TailwindServer code action", "TypeScriptServer code action"],
6826 code_actions_task
6827 .await
6828 .unwrap()
6829 .unwrap()
6830 .into_iter()
6831 .map(|code_action| code_action.lsp_action.title().to_owned())
6832 .sorted()
6833 .collect::<Vec<_>>(),
6834 "Should receive code actions responses from all related servers with hover capabilities"
6835 );
6836}
6837
6838#[gpui::test]
6839async fn test_reordering_worktrees(cx: &mut gpui::TestAppContext) {
6840 init_test(cx);
6841
6842 let fs = FakeFs::new(cx.executor());
6843 fs.insert_tree(
6844 "/dir",
6845 json!({
6846 "a.rs": "let a = 1;",
6847 "b.rs": "let b = 2;",
6848 "c.rs": "let c = 2;",
6849 }),
6850 )
6851 .await;
6852
6853 let project = Project::test(
6854 fs,
6855 [
6856 "/dir/a.rs".as_ref(),
6857 "/dir/b.rs".as_ref(),
6858 "/dir/c.rs".as_ref(),
6859 ],
6860 cx,
6861 )
6862 .await;
6863
6864 // check the initial state and get the worktrees
6865 let (worktree_a, worktree_b, worktree_c) = project.update(cx, |project, cx| {
6866 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6867 assert_eq!(worktrees.len(), 3);
6868
6869 let worktree_a = worktrees[0].read(cx);
6870 let worktree_b = worktrees[1].read(cx);
6871 let worktree_c = worktrees[2].read(cx);
6872
6873 // check they start in the right order
6874 assert_eq!(worktree_a.abs_path().to_str().unwrap(), "/dir/a.rs");
6875 assert_eq!(worktree_b.abs_path().to_str().unwrap(), "/dir/b.rs");
6876 assert_eq!(worktree_c.abs_path().to_str().unwrap(), "/dir/c.rs");
6877
6878 (
6879 worktrees[0].clone(),
6880 worktrees[1].clone(),
6881 worktrees[2].clone(),
6882 )
6883 });
6884
6885 // move first worktree to after the second
6886 // [a, b, c] -> [b, a, c]
6887 project
6888 .update(cx, |project, cx| {
6889 let first = worktree_a.read(cx);
6890 let second = worktree_b.read(cx);
6891 project.move_worktree(first.id(), second.id(), cx)
6892 })
6893 .expect("moving first after second");
6894
6895 // check the state after moving
6896 project.update(cx, |project, cx| {
6897 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6898 assert_eq!(worktrees.len(), 3);
6899
6900 let first = worktrees[0].read(cx);
6901 let second = worktrees[1].read(cx);
6902 let third = worktrees[2].read(cx);
6903
6904 // check they are now in the right order
6905 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
6906 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/a.rs");
6907 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6908 });
6909
6910 // move the second worktree to before the first
6911 // [b, a, c] -> [a, b, c]
6912 project
6913 .update(cx, |project, cx| {
6914 let second = worktree_a.read(cx);
6915 let first = worktree_b.read(cx);
6916 project.move_worktree(first.id(), second.id(), cx)
6917 })
6918 .expect("moving second before first");
6919
6920 // check the state after moving
6921 project.update(cx, |project, cx| {
6922 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6923 assert_eq!(worktrees.len(), 3);
6924
6925 let first = worktrees[0].read(cx);
6926 let second = worktrees[1].read(cx);
6927 let third = worktrees[2].read(cx);
6928
6929 // check they are now in the right order
6930 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6931 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6932 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6933 });
6934
6935 // move the second worktree to after the third
6936 // [a, b, c] -> [a, c, b]
6937 project
6938 .update(cx, |project, cx| {
6939 let second = worktree_b.read(cx);
6940 let third = worktree_c.read(cx);
6941 project.move_worktree(second.id(), third.id(), cx)
6942 })
6943 .expect("moving second after third");
6944
6945 // check the state after moving
6946 project.update(cx, |project, cx| {
6947 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6948 assert_eq!(worktrees.len(), 3);
6949
6950 let first = worktrees[0].read(cx);
6951 let second = worktrees[1].read(cx);
6952 let third = worktrees[2].read(cx);
6953
6954 // check they are now in the right order
6955 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6956 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
6957 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/b.rs");
6958 });
6959
6960 // move the third worktree to before the second
6961 // [a, c, b] -> [a, b, c]
6962 project
6963 .update(cx, |project, cx| {
6964 let third = worktree_c.read(cx);
6965 let second = worktree_b.read(cx);
6966 project.move_worktree(third.id(), second.id(), cx)
6967 })
6968 .expect("moving third before second");
6969
6970 // check the state after moving
6971 project.update(cx, |project, cx| {
6972 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6973 assert_eq!(worktrees.len(), 3);
6974
6975 let first = worktrees[0].read(cx);
6976 let second = worktrees[1].read(cx);
6977 let third = worktrees[2].read(cx);
6978
6979 // check they are now in the right order
6980 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
6981 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
6982 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
6983 });
6984
6985 // move the first worktree to after the third
6986 // [a, b, c] -> [b, c, a]
6987 project
6988 .update(cx, |project, cx| {
6989 let first = worktree_a.read(cx);
6990 let third = worktree_c.read(cx);
6991 project.move_worktree(first.id(), third.id(), cx)
6992 })
6993 .expect("moving first after third");
6994
6995 // check the state after moving
6996 project.update(cx, |project, cx| {
6997 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
6998 assert_eq!(worktrees.len(), 3);
6999
7000 let first = worktrees[0].read(cx);
7001 let second = worktrees[1].read(cx);
7002 let third = worktrees[2].read(cx);
7003
7004 // check they are now in the right order
7005 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/b.rs");
7006 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/c.rs");
7007 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/a.rs");
7008 });
7009
7010 // move the third worktree to before the first
7011 // [b, c, a] -> [a, b, c]
7012 project
7013 .update(cx, |project, cx| {
7014 let third = worktree_a.read(cx);
7015 let first = worktree_b.read(cx);
7016 project.move_worktree(third.id(), first.id(), cx)
7017 })
7018 .expect("moving third before first");
7019
7020 // check the state after moving
7021 project.update(cx, |project, cx| {
7022 let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
7023 assert_eq!(worktrees.len(), 3);
7024
7025 let first = worktrees[0].read(cx);
7026 let second = worktrees[1].read(cx);
7027 let third = worktrees[2].read(cx);
7028
7029 // check they are now in the right order
7030 assert_eq!(first.abs_path().to_str().unwrap(), "/dir/a.rs");
7031 assert_eq!(second.abs_path().to_str().unwrap(), "/dir/b.rs");
7032 assert_eq!(third.abs_path().to_str().unwrap(), "/dir/c.rs");
7033 });
7034}
7035
7036#[gpui::test]
7037async fn test_unstaged_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7038 init_test(cx);
7039
7040 let staged_contents = r#"
7041 fn main() {
7042 println!("hello world");
7043 }
7044 "#
7045 .unindent();
7046 let file_contents = r#"
7047 // print goodbye
7048 fn main() {
7049 println!("goodbye world");
7050 }
7051 "#
7052 .unindent();
7053
7054 let fs = FakeFs::new(cx.background_executor.clone());
7055 fs.insert_tree(
7056 "/dir",
7057 json!({
7058 ".git": {},
7059 "src": {
7060 "main.rs": file_contents,
7061 }
7062 }),
7063 )
7064 .await;
7065
7066 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7067
7068 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7069
7070 let buffer = project
7071 .update(cx, |project, cx| {
7072 project.open_local_buffer("/dir/src/main.rs", cx)
7073 })
7074 .await
7075 .unwrap();
7076 let unstaged_diff = project
7077 .update(cx, |project, cx| {
7078 project.open_unstaged_diff(buffer.clone(), cx)
7079 })
7080 .await
7081 .unwrap();
7082
7083 cx.run_until_parked();
7084 unstaged_diff.update(cx, |unstaged_diff, cx| {
7085 let snapshot = buffer.read(cx).snapshot();
7086 assert_hunks(
7087 unstaged_diff.hunks(&snapshot, cx),
7088 &snapshot,
7089 &unstaged_diff.base_text_string().unwrap(),
7090 &[
7091 (0..1, "", "// print goodbye\n", DiffHunkStatus::added_none()),
7092 (
7093 2..3,
7094 " println!(\"hello world\");\n",
7095 " println!(\"goodbye world\");\n",
7096 DiffHunkStatus::modified_none(),
7097 ),
7098 ],
7099 );
7100 });
7101
7102 let staged_contents = r#"
7103 // print goodbye
7104 fn main() {
7105 }
7106 "#
7107 .unindent();
7108
7109 fs.set_index_for_repo(Path::new("/dir/.git"), &[("src/main.rs", staged_contents)]);
7110
7111 cx.run_until_parked();
7112 unstaged_diff.update(cx, |unstaged_diff, cx| {
7113 let snapshot = buffer.read(cx).snapshot();
7114 assert_hunks(
7115 unstaged_diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7116 &snapshot,
7117 &unstaged_diff.base_text().text(),
7118 &[(
7119 2..3,
7120 "",
7121 " println!(\"goodbye world\");\n",
7122 DiffHunkStatus::added_none(),
7123 )],
7124 );
7125 });
7126}
7127
7128#[gpui::test]
7129async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
7130 init_test(cx);
7131
7132 let committed_contents = r#"
7133 fn main() {
7134 println!("hello world");
7135 }
7136 "#
7137 .unindent();
7138 let staged_contents = r#"
7139 fn main() {
7140 println!("goodbye world");
7141 }
7142 "#
7143 .unindent();
7144 let file_contents = r#"
7145 // print goodbye
7146 fn main() {
7147 println!("goodbye world");
7148 }
7149 "#
7150 .unindent();
7151
7152 let fs = FakeFs::new(cx.background_executor.clone());
7153 fs.insert_tree(
7154 "/dir",
7155 json!({
7156 ".git": {},
7157 "src": {
7158 "modification.rs": file_contents,
7159 }
7160 }),
7161 )
7162 .await;
7163
7164 fs.set_head_for_repo(
7165 Path::new("/dir/.git"),
7166 &[
7167 ("src/modification.rs", committed_contents),
7168 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7169 ],
7170 "deadbeef",
7171 );
7172 fs.set_index_for_repo(
7173 Path::new("/dir/.git"),
7174 &[
7175 ("src/modification.rs", staged_contents),
7176 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7177 ],
7178 );
7179
7180 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7181 let language_registry = project.read_with(cx, |project, _| project.languages().clone());
7182 let language = rust_lang();
7183 language_registry.add(language.clone());
7184
7185 let buffer_1 = project
7186 .update(cx, |project, cx| {
7187 project.open_local_buffer("/dir/src/modification.rs", cx)
7188 })
7189 .await
7190 .unwrap();
7191 let diff_1 = project
7192 .update(cx, |project, cx| {
7193 project.open_uncommitted_diff(buffer_1.clone(), cx)
7194 })
7195 .await
7196 .unwrap();
7197 diff_1.read_with(cx, |diff, _| {
7198 assert_eq!(diff.base_text().language().cloned(), Some(language))
7199 });
7200 cx.run_until_parked();
7201 diff_1.update(cx, |diff, cx| {
7202 let snapshot = buffer_1.read(cx).snapshot();
7203 assert_hunks(
7204 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7205 &snapshot,
7206 &diff.base_text_string().unwrap(),
7207 &[
7208 (
7209 0..1,
7210 "",
7211 "// print goodbye\n",
7212 DiffHunkStatus::added(DiffHunkSecondaryStatus::HasSecondaryHunk),
7213 ),
7214 (
7215 2..3,
7216 " println!(\"hello world\");\n",
7217 " println!(\"goodbye world\");\n",
7218 DiffHunkStatus::modified_none(),
7219 ),
7220 ],
7221 );
7222 });
7223
7224 // Reset HEAD to a version that differs from both the buffer and the index.
7225 let committed_contents = r#"
7226 // print goodbye
7227 fn main() {
7228 }
7229 "#
7230 .unindent();
7231 fs.set_head_for_repo(
7232 Path::new("/dir/.git"),
7233 &[
7234 ("src/modification.rs", committed_contents.clone()),
7235 ("src/deletion.rs", "// the-deleted-contents\n".into()),
7236 ],
7237 "deadbeef",
7238 );
7239
7240 // Buffer now has an unstaged hunk.
7241 cx.run_until_parked();
7242 diff_1.update(cx, |diff, cx| {
7243 let snapshot = buffer_1.read(cx).snapshot();
7244 assert_hunks(
7245 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7246 &snapshot,
7247 &diff.base_text().text(),
7248 &[(
7249 2..3,
7250 "",
7251 " println!(\"goodbye world\");\n",
7252 DiffHunkStatus::added_none(),
7253 )],
7254 );
7255 });
7256
7257 // Open a buffer for a file that's been deleted.
7258 let buffer_2 = project
7259 .update(cx, |project, cx| {
7260 project.open_local_buffer("/dir/src/deletion.rs", cx)
7261 })
7262 .await
7263 .unwrap();
7264 let diff_2 = project
7265 .update(cx, |project, cx| {
7266 project.open_uncommitted_diff(buffer_2.clone(), cx)
7267 })
7268 .await
7269 .unwrap();
7270 cx.run_until_parked();
7271 diff_2.update(cx, |diff, cx| {
7272 let snapshot = buffer_2.read(cx).snapshot();
7273 assert_hunks(
7274 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7275 &snapshot,
7276 &diff.base_text_string().unwrap(),
7277 &[(
7278 0..0,
7279 "// the-deleted-contents\n",
7280 "",
7281 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::HasSecondaryHunk),
7282 )],
7283 );
7284 });
7285
7286 // Stage the deletion of this file
7287 fs.set_index_for_repo(
7288 Path::new("/dir/.git"),
7289 &[("src/modification.rs", committed_contents.clone())],
7290 );
7291 cx.run_until_parked();
7292 diff_2.update(cx, |diff, cx| {
7293 let snapshot = buffer_2.read(cx).snapshot();
7294 assert_hunks(
7295 diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx),
7296 &snapshot,
7297 &diff.base_text_string().unwrap(),
7298 &[(
7299 0..0,
7300 "// the-deleted-contents\n",
7301 "",
7302 DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
7303 )],
7304 );
7305 });
7306}
7307
7308#[gpui::test]
7309async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
7310 use DiffHunkSecondaryStatus::*;
7311 init_test(cx);
7312
7313 let committed_contents = r#"
7314 zero
7315 one
7316 two
7317 three
7318 four
7319 five
7320 "#
7321 .unindent();
7322 let file_contents = r#"
7323 one
7324 TWO
7325 three
7326 FOUR
7327 five
7328 "#
7329 .unindent();
7330
7331 let fs = FakeFs::new(cx.background_executor.clone());
7332 fs.insert_tree(
7333 "/dir",
7334 json!({
7335 ".git": {},
7336 "file.txt": file_contents.clone()
7337 }),
7338 )
7339 .await;
7340
7341 fs.set_head_and_index_for_repo(
7342 path!("/dir/.git").as_ref(),
7343 &[("file.txt", committed_contents.clone())],
7344 );
7345
7346 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7347
7348 let buffer = project
7349 .update(cx, |project, cx| {
7350 project.open_local_buffer("/dir/file.txt", cx)
7351 })
7352 .await
7353 .unwrap();
7354 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7355 let uncommitted_diff = project
7356 .update(cx, |project, cx| {
7357 project.open_uncommitted_diff(buffer.clone(), cx)
7358 })
7359 .await
7360 .unwrap();
7361 let mut diff_events = cx.events(&uncommitted_diff);
7362
7363 // The hunks are initially unstaged.
7364 uncommitted_diff.read_with(cx, |diff, cx| {
7365 assert_hunks(
7366 diff.hunks(&snapshot, cx),
7367 &snapshot,
7368 &diff.base_text_string().unwrap(),
7369 &[
7370 (
7371 0..0,
7372 "zero\n",
7373 "",
7374 DiffHunkStatus::deleted(HasSecondaryHunk),
7375 ),
7376 (
7377 1..2,
7378 "two\n",
7379 "TWO\n",
7380 DiffHunkStatus::modified(HasSecondaryHunk),
7381 ),
7382 (
7383 3..4,
7384 "four\n",
7385 "FOUR\n",
7386 DiffHunkStatus::modified(HasSecondaryHunk),
7387 ),
7388 ],
7389 );
7390 });
7391
7392 // Stage a hunk. It appears as optimistically staged.
7393 uncommitted_diff.update(cx, |diff, cx| {
7394 let range =
7395 snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_before(Point::new(2, 0));
7396 let hunks = diff
7397 .hunks_intersecting_range(range, &snapshot, cx)
7398 .collect::<Vec<_>>();
7399 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7400
7401 assert_hunks(
7402 diff.hunks(&snapshot, cx),
7403 &snapshot,
7404 &diff.base_text_string().unwrap(),
7405 &[
7406 (
7407 0..0,
7408 "zero\n",
7409 "",
7410 DiffHunkStatus::deleted(HasSecondaryHunk),
7411 ),
7412 (
7413 1..2,
7414 "two\n",
7415 "TWO\n",
7416 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7417 ),
7418 (
7419 3..4,
7420 "four\n",
7421 "FOUR\n",
7422 DiffHunkStatus::modified(HasSecondaryHunk),
7423 ),
7424 ],
7425 );
7426 });
7427
7428 // The diff emits a change event for the range of the staged hunk.
7429 assert!(matches!(
7430 diff_events.next().await.unwrap(),
7431 BufferDiffEvent::HunksStagedOrUnstaged(_)
7432 ));
7433 let event = diff_events.next().await.unwrap();
7434 if let BufferDiffEvent::DiffChanged {
7435 changed_range: Some(changed_range),
7436 } = event
7437 {
7438 let changed_range = changed_range.to_point(&snapshot);
7439 assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
7440 } else {
7441 panic!("Unexpected event {event:?}");
7442 }
7443
7444 // When the write to the index completes, it appears as staged.
7445 cx.run_until_parked();
7446 uncommitted_diff.update(cx, |diff, cx| {
7447 assert_hunks(
7448 diff.hunks(&snapshot, cx),
7449 &snapshot,
7450 &diff.base_text_string().unwrap(),
7451 &[
7452 (
7453 0..0,
7454 "zero\n",
7455 "",
7456 DiffHunkStatus::deleted(HasSecondaryHunk),
7457 ),
7458 (
7459 1..2,
7460 "two\n",
7461 "TWO\n",
7462 DiffHunkStatus::modified(NoSecondaryHunk),
7463 ),
7464 (
7465 3..4,
7466 "four\n",
7467 "FOUR\n",
7468 DiffHunkStatus::modified(HasSecondaryHunk),
7469 ),
7470 ],
7471 );
7472 });
7473
7474 // The diff emits a change event for the changed index text.
7475 let event = diff_events.next().await.unwrap();
7476 if let BufferDiffEvent::DiffChanged {
7477 changed_range: Some(changed_range),
7478 } = event
7479 {
7480 let changed_range = changed_range.to_point(&snapshot);
7481 assert_eq!(changed_range, Point::new(0, 0)..Point::new(4, 0));
7482 } else {
7483 panic!("Unexpected event {event:?}");
7484 }
7485
7486 // Simulate a problem writing to the git index.
7487 fs.set_error_message_for_index_write(
7488 "/dir/.git".as_ref(),
7489 Some("failed to write git index".into()),
7490 );
7491
7492 // Stage another hunk.
7493 uncommitted_diff.update(cx, |diff, cx| {
7494 let range =
7495 snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_before(Point::new(4, 0));
7496 let hunks = diff
7497 .hunks_intersecting_range(range, &snapshot, cx)
7498 .collect::<Vec<_>>();
7499 diff.stage_or_unstage_hunks(true, &hunks, &snapshot, true, cx);
7500
7501 assert_hunks(
7502 diff.hunks(&snapshot, cx),
7503 &snapshot,
7504 &diff.base_text_string().unwrap(),
7505 &[
7506 (
7507 0..0,
7508 "zero\n",
7509 "",
7510 DiffHunkStatus::deleted(HasSecondaryHunk),
7511 ),
7512 (
7513 1..2,
7514 "two\n",
7515 "TWO\n",
7516 DiffHunkStatus::modified(NoSecondaryHunk),
7517 ),
7518 (
7519 3..4,
7520 "four\n",
7521 "FOUR\n",
7522 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7523 ),
7524 ],
7525 );
7526 });
7527 assert!(matches!(
7528 diff_events.next().await.unwrap(),
7529 BufferDiffEvent::HunksStagedOrUnstaged(_)
7530 ));
7531 let event = diff_events.next().await.unwrap();
7532 if let BufferDiffEvent::DiffChanged {
7533 changed_range: Some(changed_range),
7534 } = event
7535 {
7536 let changed_range = changed_range.to_point(&snapshot);
7537 assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
7538 } else {
7539 panic!("Unexpected event {event:?}");
7540 }
7541
7542 // When the write fails, the hunk returns to being unstaged.
7543 cx.run_until_parked();
7544 uncommitted_diff.update(cx, |diff, cx| {
7545 assert_hunks(
7546 diff.hunks(&snapshot, cx),
7547 &snapshot,
7548 &diff.base_text_string().unwrap(),
7549 &[
7550 (
7551 0..0,
7552 "zero\n",
7553 "",
7554 DiffHunkStatus::deleted(HasSecondaryHunk),
7555 ),
7556 (
7557 1..2,
7558 "two\n",
7559 "TWO\n",
7560 DiffHunkStatus::modified(NoSecondaryHunk),
7561 ),
7562 (
7563 3..4,
7564 "four\n",
7565 "FOUR\n",
7566 DiffHunkStatus::modified(HasSecondaryHunk),
7567 ),
7568 ],
7569 );
7570 });
7571
7572 let event = diff_events.next().await.unwrap();
7573 if let BufferDiffEvent::DiffChanged {
7574 changed_range: Some(changed_range),
7575 } = event
7576 {
7577 let changed_range = changed_range.to_point(&snapshot);
7578 assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
7579 } else {
7580 panic!("Unexpected event {event:?}");
7581 }
7582
7583 // Allow writing to the git index to succeed again.
7584 fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
7585
7586 // Stage two hunks with separate operations.
7587 uncommitted_diff.update(cx, |diff, cx| {
7588 let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
7589 diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
7590 diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
7591 });
7592
7593 // Both staged hunks appear as pending.
7594 uncommitted_diff.update(cx, |diff, cx| {
7595 assert_hunks(
7596 diff.hunks(&snapshot, cx),
7597 &snapshot,
7598 &diff.base_text_string().unwrap(),
7599 &[
7600 (
7601 0..0,
7602 "zero\n",
7603 "",
7604 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7605 ),
7606 (
7607 1..2,
7608 "two\n",
7609 "TWO\n",
7610 DiffHunkStatus::modified(NoSecondaryHunk),
7611 ),
7612 (
7613 3..4,
7614 "four\n",
7615 "FOUR\n",
7616 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7617 ),
7618 ],
7619 );
7620 });
7621
7622 // Both staging operations take effect.
7623 cx.run_until_parked();
7624 uncommitted_diff.update(cx, |diff, cx| {
7625 assert_hunks(
7626 diff.hunks(&snapshot, cx),
7627 &snapshot,
7628 &diff.base_text_string().unwrap(),
7629 &[
7630 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7631 (
7632 1..2,
7633 "two\n",
7634 "TWO\n",
7635 DiffHunkStatus::modified(NoSecondaryHunk),
7636 ),
7637 (
7638 3..4,
7639 "four\n",
7640 "FOUR\n",
7641 DiffHunkStatus::modified(NoSecondaryHunk),
7642 ),
7643 ],
7644 );
7645 });
7646}
7647
7648#[gpui::test(seeds(340, 472))]
7649async fn test_staging_hunks_with_delayed_fs_event(cx: &mut gpui::TestAppContext) {
7650 use DiffHunkSecondaryStatus::*;
7651 init_test(cx);
7652
7653 let committed_contents = r#"
7654 zero
7655 one
7656 two
7657 three
7658 four
7659 five
7660 "#
7661 .unindent();
7662 let file_contents = r#"
7663 one
7664 TWO
7665 three
7666 FOUR
7667 five
7668 "#
7669 .unindent();
7670
7671 let fs = FakeFs::new(cx.background_executor.clone());
7672 fs.insert_tree(
7673 "/dir",
7674 json!({
7675 ".git": {},
7676 "file.txt": file_contents.clone()
7677 }),
7678 )
7679 .await;
7680
7681 fs.set_head_for_repo(
7682 "/dir/.git".as_ref(),
7683 &[("file.txt", committed_contents.clone())],
7684 "deadbeef",
7685 );
7686 fs.set_index_for_repo(
7687 "/dir/.git".as_ref(),
7688 &[("file.txt", committed_contents.clone())],
7689 );
7690
7691 let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
7692
7693 let buffer = project
7694 .update(cx, |project, cx| {
7695 project.open_local_buffer("/dir/file.txt", cx)
7696 })
7697 .await
7698 .unwrap();
7699 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7700 let uncommitted_diff = project
7701 .update(cx, |project, cx| {
7702 project.open_uncommitted_diff(buffer.clone(), cx)
7703 })
7704 .await
7705 .unwrap();
7706
7707 // The hunks are initially unstaged.
7708 uncommitted_diff.read_with(cx, |diff, cx| {
7709 assert_hunks(
7710 diff.hunks(&snapshot, cx),
7711 &snapshot,
7712 &diff.base_text_string().unwrap(),
7713 &[
7714 (
7715 0..0,
7716 "zero\n",
7717 "",
7718 DiffHunkStatus::deleted(HasSecondaryHunk),
7719 ),
7720 (
7721 1..2,
7722 "two\n",
7723 "TWO\n",
7724 DiffHunkStatus::modified(HasSecondaryHunk),
7725 ),
7726 (
7727 3..4,
7728 "four\n",
7729 "FOUR\n",
7730 DiffHunkStatus::modified(HasSecondaryHunk),
7731 ),
7732 ],
7733 );
7734 });
7735
7736 // Pause IO events
7737 fs.pause_events();
7738
7739 // Stage the first hunk.
7740 uncommitted_diff.update(cx, |diff, cx| {
7741 let hunk = diff.hunks(&snapshot, cx).next().unwrap();
7742 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7743 assert_hunks(
7744 diff.hunks(&snapshot, cx),
7745 &snapshot,
7746 &diff.base_text_string().unwrap(),
7747 &[
7748 (
7749 0..0,
7750 "zero\n",
7751 "",
7752 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7753 ),
7754 (
7755 1..2,
7756 "two\n",
7757 "TWO\n",
7758 DiffHunkStatus::modified(HasSecondaryHunk),
7759 ),
7760 (
7761 3..4,
7762 "four\n",
7763 "FOUR\n",
7764 DiffHunkStatus::modified(HasSecondaryHunk),
7765 ),
7766 ],
7767 );
7768 });
7769
7770 // Stage the second hunk *before* receiving the FS event for the first hunk.
7771 cx.run_until_parked();
7772 uncommitted_diff.update(cx, |diff, cx| {
7773 let hunk = diff.hunks(&snapshot, cx).nth(1).unwrap();
7774 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7775 assert_hunks(
7776 diff.hunks(&snapshot, cx),
7777 &snapshot,
7778 &diff.base_text_string().unwrap(),
7779 &[
7780 (
7781 0..0,
7782 "zero\n",
7783 "",
7784 DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
7785 ),
7786 (
7787 1..2,
7788 "two\n",
7789 "TWO\n",
7790 DiffHunkStatus::modified(SecondaryHunkRemovalPending),
7791 ),
7792 (
7793 3..4,
7794 "four\n",
7795 "FOUR\n",
7796 DiffHunkStatus::modified(HasSecondaryHunk),
7797 ),
7798 ],
7799 );
7800 });
7801
7802 // Process the FS event for staging the first hunk (second event is still pending).
7803 fs.flush_events(1);
7804 cx.run_until_parked();
7805
7806 // Stage the third hunk before receiving the second FS event.
7807 uncommitted_diff.update(cx, |diff, cx| {
7808 let hunk = diff.hunks(&snapshot, cx).nth(2).unwrap();
7809 diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
7810 });
7811
7812 // Wait for all remaining IO.
7813 cx.run_until_parked();
7814 fs.flush_events(fs.buffered_event_count());
7815
7816 // Now all hunks are staged.
7817 cx.run_until_parked();
7818 uncommitted_diff.update(cx, |diff, cx| {
7819 assert_hunks(
7820 diff.hunks(&snapshot, cx),
7821 &snapshot,
7822 &diff.base_text_string().unwrap(),
7823 &[
7824 (0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
7825 (
7826 1..2,
7827 "two\n",
7828 "TWO\n",
7829 DiffHunkStatus::modified(NoSecondaryHunk),
7830 ),
7831 (
7832 3..4,
7833 "four\n",
7834 "FOUR\n",
7835 DiffHunkStatus::modified(NoSecondaryHunk),
7836 ),
7837 ],
7838 );
7839 });
7840}
7841
7842#[gpui::test(iterations = 25)]
7843async fn test_staging_random_hunks(
7844 mut rng: StdRng,
7845 executor: BackgroundExecutor,
7846 cx: &mut gpui::TestAppContext,
7847) {
7848 let operations = env::var("OPERATIONS")
7849 .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
7850 .unwrap_or(20);
7851
7852 // Try to induce races between diff recalculation and index writes.
7853 if rng.random_bool(0.5) {
7854 executor.deprioritize(*CALCULATE_DIFF_TASK);
7855 }
7856
7857 use DiffHunkSecondaryStatus::*;
7858 init_test(cx);
7859
7860 let committed_text = (0..30).map(|i| format!("line {i}\n")).collect::<String>();
7861 let index_text = committed_text.clone();
7862 let buffer_text = (0..30)
7863 .map(|i| match i % 5 {
7864 0 => format!("line {i} (modified)\n"),
7865 _ => format!("line {i}\n"),
7866 })
7867 .collect::<String>();
7868
7869 let fs = FakeFs::new(cx.background_executor.clone());
7870 fs.insert_tree(
7871 path!("/dir"),
7872 json!({
7873 ".git": {},
7874 "file.txt": buffer_text.clone()
7875 }),
7876 )
7877 .await;
7878 fs.set_head_for_repo(
7879 path!("/dir/.git").as_ref(),
7880 &[("file.txt", committed_text.clone())],
7881 "deadbeef",
7882 );
7883 fs.set_index_for_repo(
7884 path!("/dir/.git").as_ref(),
7885 &[("file.txt", index_text.clone())],
7886 );
7887 let repo = fs
7888 .open_repo(path!("/dir/.git").as_ref(), Some("git".as_ref()))
7889 .unwrap();
7890
7891 let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
7892 let buffer = project
7893 .update(cx, |project, cx| {
7894 project.open_local_buffer(path!("/dir/file.txt"), cx)
7895 })
7896 .await
7897 .unwrap();
7898 let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
7899 let uncommitted_diff = project
7900 .update(cx, |project, cx| {
7901 project.open_uncommitted_diff(buffer.clone(), cx)
7902 })
7903 .await
7904 .unwrap();
7905
7906 let mut hunks =
7907 uncommitted_diff.update(cx, |diff, cx| diff.hunks(&snapshot, cx).collect::<Vec<_>>());
7908 assert_eq!(hunks.len(), 6);
7909
7910 for _i in 0..operations {
7911 let hunk_ix = rng.random_range(0..hunks.len());
7912 let hunk = &mut hunks[hunk_ix];
7913 let row = hunk.range.start.row;
7914
7915 if hunk.status().has_secondary_hunk() {
7916 log::info!("staging hunk at {row}");
7917 uncommitted_diff.update(cx, |diff, cx| {
7918 diff.stage_or_unstage_hunks(true, std::slice::from_ref(hunk), &snapshot, true, cx);
7919 });
7920 hunk.secondary_status = SecondaryHunkRemovalPending;
7921 } else {
7922 log::info!("unstaging hunk at {row}");
7923 uncommitted_diff.update(cx, |diff, cx| {
7924 diff.stage_or_unstage_hunks(false, std::slice::from_ref(hunk), &snapshot, true, cx);
7925 });
7926 hunk.secondary_status = SecondaryHunkAdditionPending;
7927 }
7928
7929 for _ in 0..rng.random_range(0..10) {
7930 log::info!("yielding");
7931 cx.executor().simulate_random_delay().await;
7932 }
7933 }
7934
7935 cx.executor().run_until_parked();
7936
7937 for hunk in &mut hunks {
7938 if hunk.secondary_status == SecondaryHunkRemovalPending {
7939 hunk.secondary_status = NoSecondaryHunk;
7940 } else if hunk.secondary_status == SecondaryHunkAdditionPending {
7941 hunk.secondary_status = HasSecondaryHunk;
7942 }
7943 }
7944
7945 log::info!(
7946 "index text:\n{}",
7947 repo.load_index_text(rel_path("file.txt").into())
7948 .await
7949 .unwrap()
7950 );
7951
7952 uncommitted_diff.update(cx, |diff, cx| {
7953 let expected_hunks = hunks
7954 .iter()
7955 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7956 .collect::<Vec<_>>();
7957 let actual_hunks = diff
7958 .hunks(&snapshot, cx)
7959 .map(|hunk| (hunk.range.start.row, hunk.secondary_status))
7960 .collect::<Vec<_>>();
7961 assert_eq!(actual_hunks, expected_hunks);
7962 });
7963}
7964
7965#[gpui::test]
7966async fn test_single_file_diffs(cx: &mut gpui::TestAppContext) {
7967 init_test(cx);
7968
7969 let committed_contents = r#"
7970 fn main() {
7971 println!("hello from HEAD");
7972 }
7973 "#
7974 .unindent();
7975 let file_contents = r#"
7976 fn main() {
7977 println!("hello from the working copy");
7978 }
7979 "#
7980 .unindent();
7981
7982 let fs = FakeFs::new(cx.background_executor.clone());
7983 fs.insert_tree(
7984 "/dir",
7985 json!({
7986 ".git": {},
7987 "src": {
7988 "main.rs": file_contents,
7989 }
7990 }),
7991 )
7992 .await;
7993
7994 fs.set_head_for_repo(
7995 Path::new("/dir/.git"),
7996 &[("src/main.rs", committed_contents.clone())],
7997 "deadbeef",
7998 );
7999 fs.set_index_for_repo(
8000 Path::new("/dir/.git"),
8001 &[("src/main.rs", committed_contents.clone())],
8002 );
8003
8004 let project = Project::test(fs.clone(), ["/dir/src/main.rs".as_ref()], cx).await;
8005
8006 let buffer = project
8007 .update(cx, |project, cx| {
8008 project.open_local_buffer("/dir/src/main.rs", cx)
8009 })
8010 .await
8011 .unwrap();
8012 let uncommitted_diff = project
8013 .update(cx, |project, cx| {
8014 project.open_uncommitted_diff(buffer.clone(), cx)
8015 })
8016 .await
8017 .unwrap();
8018
8019 cx.run_until_parked();
8020 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
8021 let snapshot = buffer.read(cx).snapshot();
8022 assert_hunks(
8023 uncommitted_diff.hunks(&snapshot, cx),
8024 &snapshot,
8025 &uncommitted_diff.base_text_string().unwrap(),
8026 &[(
8027 1..2,
8028 " println!(\"hello from HEAD\");\n",
8029 " println!(\"hello from the working copy\");\n",
8030 DiffHunkStatus {
8031 kind: DiffHunkStatusKind::Modified,
8032 secondary: DiffHunkSecondaryStatus::HasSecondaryHunk,
8033 },
8034 )],
8035 );
8036 });
8037}
8038
8039#[gpui::test]
8040async fn test_repository_and_path_for_project_path(
8041 background_executor: BackgroundExecutor,
8042 cx: &mut gpui::TestAppContext,
8043) {
8044 init_test(cx);
8045 let fs = FakeFs::new(background_executor);
8046 fs.insert_tree(
8047 path!("/root"),
8048 json!({
8049 "c.txt": "",
8050 "dir1": {
8051 ".git": {},
8052 "deps": {
8053 "dep1": {
8054 ".git": {},
8055 "src": {
8056 "a.txt": ""
8057 }
8058 }
8059 },
8060 "src": {
8061 "b.txt": ""
8062 }
8063 },
8064 }),
8065 )
8066 .await;
8067
8068 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8069 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8070 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8071 project
8072 .update(cx, |project, cx| project.git_scans_complete(cx))
8073 .await;
8074 cx.run_until_parked();
8075
8076 project.read_with(cx, |project, cx| {
8077 let git_store = project.git_store().read(cx);
8078 let pairs = [
8079 ("c.txt", None),
8080 ("dir1/src/b.txt", Some((path!("/root/dir1"), "src/b.txt"))),
8081 (
8082 "dir1/deps/dep1/src/a.txt",
8083 Some((path!("/root/dir1/deps/dep1"), "src/a.txt")),
8084 ),
8085 ];
8086 let expected = pairs
8087 .iter()
8088 .map(|(path, result)| {
8089 (
8090 path,
8091 result.map(|(repo, repo_path)| {
8092 (Path::new(repo).into(), RepoPath::new(repo_path).unwrap())
8093 }),
8094 )
8095 })
8096 .collect::<Vec<_>>();
8097 let actual = pairs
8098 .iter()
8099 .map(|(path, _)| {
8100 let project_path = (tree_id, rel_path(path)).into();
8101 let result = maybe!({
8102 let (repo, repo_path) =
8103 git_store.repository_and_path_for_project_path(&project_path, cx)?;
8104 Some((repo.read(cx).work_directory_abs_path.clone(), repo_path))
8105 });
8106 (path, result)
8107 })
8108 .collect::<Vec<_>>();
8109 pretty_assertions::assert_eq!(expected, actual);
8110 });
8111
8112 fs.remove_dir(path!("/root/dir1/.git").as_ref(), RemoveOptions::default())
8113 .await
8114 .unwrap();
8115 cx.run_until_parked();
8116
8117 project.read_with(cx, |project, cx| {
8118 let git_store = project.git_store().read(cx);
8119 assert_eq!(
8120 git_store.repository_and_path_for_project_path(
8121 &(tree_id, rel_path("dir1/src/b.txt")).into(),
8122 cx
8123 ),
8124 None
8125 );
8126 });
8127}
8128
8129#[gpui::test]
8130async fn test_home_dir_as_git_repository(cx: &mut gpui::TestAppContext) {
8131 init_test(cx);
8132 let fs = FakeFs::new(cx.background_executor.clone());
8133 let home = paths::home_dir();
8134 fs.insert_tree(
8135 home,
8136 json!({
8137 ".git": {},
8138 "project": {
8139 "a.txt": "A"
8140 },
8141 }),
8142 )
8143 .await;
8144
8145 let project = Project::test(fs.clone(), [home.join("project").as_ref()], cx).await;
8146 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8147 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8148
8149 project
8150 .update(cx, |project, cx| project.git_scans_complete(cx))
8151 .await;
8152 tree.flush_fs_events(cx).await;
8153
8154 project.read_with(cx, |project, cx| {
8155 let containing = project
8156 .git_store()
8157 .read(cx)
8158 .repository_and_path_for_project_path(&(tree_id, rel_path("a.txt")).into(), cx);
8159 assert!(containing.is_none());
8160 });
8161
8162 let project = Project::test(fs.clone(), [home.as_ref()], cx).await;
8163 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8164 let tree_id = tree.read_with(cx, |tree, _| tree.id());
8165 project
8166 .update(cx, |project, cx| project.git_scans_complete(cx))
8167 .await;
8168 tree.flush_fs_events(cx).await;
8169
8170 project.read_with(cx, |project, cx| {
8171 let containing = project
8172 .git_store()
8173 .read(cx)
8174 .repository_and_path_for_project_path(&(tree_id, rel_path("project/a.txt")).into(), cx);
8175 assert_eq!(
8176 containing
8177 .unwrap()
8178 .0
8179 .read(cx)
8180 .work_directory_abs_path
8181 .as_ref(),
8182 home,
8183 );
8184 });
8185}
8186
8187#[gpui::test]
8188async fn test_git_repository_status(cx: &mut gpui::TestAppContext) {
8189 init_test(cx);
8190 cx.executor().allow_parking();
8191
8192 let root = TempTree::new(json!({
8193 "project": {
8194 "a.txt": "a", // Modified
8195 "b.txt": "bb", // Added
8196 "c.txt": "ccc", // Unchanged
8197 "d.txt": "dddd", // Deleted
8198 },
8199 }));
8200
8201 // Set up git repository before creating the project.
8202 let work_dir = root.path().join("project");
8203 let repo = git_init(work_dir.as_path());
8204 git_add("a.txt", &repo);
8205 git_add("c.txt", &repo);
8206 git_add("d.txt", &repo);
8207 git_commit("Initial commit", &repo);
8208 std::fs::remove_file(work_dir.join("d.txt")).unwrap();
8209 std::fs::write(work_dir.join("a.txt"), "aa").unwrap();
8210
8211 let project = Project::test(
8212 Arc::new(RealFs::new(None, cx.executor())),
8213 [root.path()],
8214 cx,
8215 )
8216 .await;
8217
8218 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8219 tree.flush_fs_events(cx).await;
8220 project
8221 .update(cx, |project, cx| project.git_scans_complete(cx))
8222 .await;
8223 cx.executor().run_until_parked();
8224
8225 let repository = project.read_with(cx, |project, cx| {
8226 project.repositories(cx).values().next().unwrap().clone()
8227 });
8228
8229 // Check that the right git state is observed on startup
8230 repository.read_with(cx, |repository, _| {
8231 let entries = repository.cached_status().collect::<Vec<_>>();
8232 assert_eq!(
8233 entries,
8234 [
8235 StatusEntry {
8236 repo_path: repo_path("a.txt"),
8237 status: StatusCode::Modified.worktree(),
8238 },
8239 StatusEntry {
8240 repo_path: repo_path("b.txt"),
8241 status: FileStatus::Untracked,
8242 },
8243 StatusEntry {
8244 repo_path: repo_path("d.txt"),
8245 status: StatusCode::Deleted.worktree(),
8246 },
8247 ]
8248 );
8249 });
8250
8251 std::fs::write(work_dir.join("c.txt"), "some changes").unwrap();
8252
8253 tree.flush_fs_events(cx).await;
8254 project
8255 .update(cx, |project, cx| project.git_scans_complete(cx))
8256 .await;
8257 cx.executor().run_until_parked();
8258
8259 repository.read_with(cx, |repository, _| {
8260 let entries = repository.cached_status().collect::<Vec<_>>();
8261 assert_eq!(
8262 entries,
8263 [
8264 StatusEntry {
8265 repo_path: repo_path("a.txt"),
8266 status: StatusCode::Modified.worktree(),
8267 },
8268 StatusEntry {
8269 repo_path: repo_path("b.txt"),
8270 status: FileStatus::Untracked,
8271 },
8272 StatusEntry {
8273 repo_path: repo_path("c.txt"),
8274 status: StatusCode::Modified.worktree(),
8275 },
8276 StatusEntry {
8277 repo_path: repo_path("d.txt"),
8278 status: StatusCode::Deleted.worktree(),
8279 },
8280 ]
8281 );
8282 });
8283
8284 git_add("a.txt", &repo);
8285 git_add("c.txt", &repo);
8286 git_remove_index(Path::new("d.txt"), &repo);
8287 git_commit("Another commit", &repo);
8288 tree.flush_fs_events(cx).await;
8289 project
8290 .update(cx, |project, cx| project.git_scans_complete(cx))
8291 .await;
8292 cx.executor().run_until_parked();
8293
8294 std::fs::remove_file(work_dir.join("a.txt")).unwrap();
8295 std::fs::remove_file(work_dir.join("b.txt")).unwrap();
8296 tree.flush_fs_events(cx).await;
8297 project
8298 .update(cx, |project, cx| project.git_scans_complete(cx))
8299 .await;
8300 cx.executor().run_until_parked();
8301
8302 repository.read_with(cx, |repository, _cx| {
8303 let entries = repository.cached_status().collect::<Vec<_>>();
8304
8305 // Deleting an untracked entry, b.txt, should leave no status
8306 // a.txt was tracked, and so should have a status
8307 assert_eq!(
8308 entries,
8309 [StatusEntry {
8310 repo_path: repo_path("a.txt"),
8311 status: StatusCode::Deleted.worktree(),
8312 }]
8313 );
8314 });
8315}
8316
8317#[gpui::test]
8318async fn test_git_status_postprocessing(cx: &mut gpui::TestAppContext) {
8319 init_test(cx);
8320 cx.executor().allow_parking();
8321
8322 let root = TempTree::new(json!({
8323 "project": {
8324 "sub": {},
8325 "a.txt": "",
8326 },
8327 }));
8328
8329 let work_dir = root.path().join("project");
8330 let repo = git_init(work_dir.as_path());
8331 // a.txt exists in HEAD and the working copy but is deleted in the index.
8332 git_add("a.txt", &repo);
8333 git_commit("Initial commit", &repo);
8334 git_remove_index("a.txt".as_ref(), &repo);
8335 // `sub` is a nested git repository.
8336 let _sub = git_init(&work_dir.join("sub"));
8337
8338 let project = Project::test(
8339 Arc::new(RealFs::new(None, cx.executor())),
8340 [root.path()],
8341 cx,
8342 )
8343 .await;
8344
8345 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8346 tree.flush_fs_events(cx).await;
8347 project
8348 .update(cx, |project, cx| project.git_scans_complete(cx))
8349 .await;
8350 cx.executor().run_until_parked();
8351
8352 let repository = project.read_with(cx, |project, cx| {
8353 project
8354 .repositories(cx)
8355 .values()
8356 .find(|repo| repo.read(cx).work_directory_abs_path.ends_with("project"))
8357 .unwrap()
8358 .clone()
8359 });
8360
8361 repository.read_with(cx, |repository, _cx| {
8362 let entries = repository.cached_status().collect::<Vec<_>>();
8363
8364 // `sub` doesn't appear in our computed statuses.
8365 // a.txt appears with a combined `DA` status.
8366 assert_eq!(
8367 entries,
8368 [StatusEntry {
8369 repo_path: repo_path("a.txt"),
8370 status: TrackedStatus {
8371 index_status: StatusCode::Deleted,
8372 worktree_status: StatusCode::Added
8373 }
8374 .into(),
8375 }]
8376 )
8377 });
8378}
8379
8380#[gpui::test]
8381async fn test_repository_subfolder_git_status(
8382 executor: gpui::BackgroundExecutor,
8383 cx: &mut gpui::TestAppContext,
8384) {
8385 init_test(cx);
8386
8387 let fs = FakeFs::new(executor);
8388 fs.insert_tree(
8389 path!("/root"),
8390 json!({
8391 "my-repo": {
8392 ".git": {},
8393 "a.txt": "a",
8394 "sub-folder-1": {
8395 "sub-folder-2": {
8396 "c.txt": "cc",
8397 "d": {
8398 "e.txt": "eee"
8399 }
8400 },
8401 }
8402 },
8403 }),
8404 )
8405 .await;
8406
8407 const C_TXT: &str = "sub-folder-1/sub-folder-2/c.txt";
8408 const E_TXT: &str = "sub-folder-1/sub-folder-2/d/e.txt";
8409
8410 fs.set_status_for_repo(
8411 path!("/root/my-repo/.git").as_ref(),
8412 &[(E_TXT, FileStatus::Untracked)],
8413 );
8414
8415 let project = Project::test(
8416 fs.clone(),
8417 [path!("/root/my-repo/sub-folder-1/sub-folder-2").as_ref()],
8418 cx,
8419 )
8420 .await;
8421
8422 project
8423 .update(cx, |project, cx| project.git_scans_complete(cx))
8424 .await;
8425 cx.run_until_parked();
8426
8427 let repository = project.read_with(cx, |project, cx| {
8428 project.repositories(cx).values().next().unwrap().clone()
8429 });
8430
8431 // Ensure that the git status is loaded correctly
8432 repository.read_with(cx, |repository, _cx| {
8433 assert_eq!(
8434 repository.work_directory_abs_path,
8435 Path::new(path!("/root/my-repo")).into()
8436 );
8437
8438 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8439 assert_eq!(
8440 repository
8441 .status_for_path(&repo_path(E_TXT))
8442 .unwrap()
8443 .status,
8444 FileStatus::Untracked
8445 );
8446 });
8447
8448 fs.set_status_for_repo(path!("/root/my-repo/.git").as_ref(), &[]);
8449 project
8450 .update(cx, |project, cx| project.git_scans_complete(cx))
8451 .await;
8452 cx.run_until_parked();
8453
8454 repository.read_with(cx, |repository, _cx| {
8455 assert_eq!(repository.status_for_path(&repo_path(C_TXT)), None);
8456 assert_eq!(repository.status_for_path(&repo_path(E_TXT)), None);
8457 });
8458}
8459
8460// TODO: this test is flaky (especially on Windows but at least sometimes on all platforms).
8461#[cfg(any())]
8462#[gpui::test]
8463async fn test_conflicted_cherry_pick(cx: &mut gpui::TestAppContext) {
8464 init_test(cx);
8465 cx.executor().allow_parking();
8466
8467 let root = TempTree::new(json!({
8468 "project": {
8469 "a.txt": "a",
8470 },
8471 }));
8472 let root_path = root.path();
8473
8474 let repo = git_init(&root_path.join("project"));
8475 git_add("a.txt", &repo);
8476 git_commit("init", &repo);
8477
8478 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8479
8480 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8481 tree.flush_fs_events(cx).await;
8482 project
8483 .update(cx, |project, cx| project.git_scans_complete(cx))
8484 .await;
8485 cx.executor().run_until_parked();
8486
8487 let repository = project.read_with(cx, |project, cx| {
8488 project.repositories(cx).values().next().unwrap().clone()
8489 });
8490
8491 git_branch("other-branch", &repo);
8492 git_checkout("refs/heads/other-branch", &repo);
8493 std::fs::write(root_path.join("project/a.txt"), "A").unwrap();
8494 git_add("a.txt", &repo);
8495 git_commit("capitalize", &repo);
8496 let commit = repo
8497 .head()
8498 .expect("Failed to get HEAD")
8499 .peel_to_commit()
8500 .expect("HEAD is not a commit");
8501 git_checkout("refs/heads/main", &repo);
8502 std::fs::write(root_path.join("project/a.txt"), "b").unwrap();
8503 git_add("a.txt", &repo);
8504 git_commit("improve letter", &repo);
8505 git_cherry_pick(&commit, &repo);
8506 std::fs::read_to_string(root_path.join("project/.git/CHERRY_PICK_HEAD"))
8507 .expect("No CHERRY_PICK_HEAD");
8508 pretty_assertions::assert_eq!(
8509 git_status(&repo),
8510 collections::HashMap::from_iter([("a.txt".to_owned(), git2::Status::CONFLICTED)])
8511 );
8512 tree.flush_fs_events(cx).await;
8513 project
8514 .update(cx, |project, cx| project.git_scans_complete(cx))
8515 .await;
8516 cx.executor().run_until_parked();
8517 let conflicts = repository.update(cx, |repository, _| {
8518 repository
8519 .merge_conflicts
8520 .iter()
8521 .cloned()
8522 .collect::<Vec<_>>()
8523 });
8524 pretty_assertions::assert_eq!(conflicts, [RepoPath::from("a.txt")]);
8525
8526 git_add("a.txt", &repo);
8527 // Attempt to manually simulate what `git cherry-pick --continue` would do.
8528 git_commit("whatevs", &repo);
8529 std::fs::remove_file(root.path().join("project/.git/CHERRY_PICK_HEAD"))
8530 .expect("Failed to remove CHERRY_PICK_HEAD");
8531 pretty_assertions::assert_eq!(git_status(&repo), collections::HashMap::default());
8532 tree.flush_fs_events(cx).await;
8533 let conflicts = repository.update(cx, |repository, _| {
8534 repository
8535 .merge_conflicts
8536 .iter()
8537 .cloned()
8538 .collect::<Vec<_>>()
8539 });
8540 pretty_assertions::assert_eq!(conflicts, []);
8541}
8542
8543#[gpui::test]
8544async fn test_update_gitignore(cx: &mut gpui::TestAppContext) {
8545 init_test(cx);
8546 let fs = FakeFs::new(cx.background_executor.clone());
8547 fs.insert_tree(
8548 path!("/root"),
8549 json!({
8550 ".git": {},
8551 ".gitignore": "*.txt\n",
8552 "a.xml": "<a></a>",
8553 "b.txt": "Some text"
8554 }),
8555 )
8556 .await;
8557
8558 fs.set_head_and_index_for_repo(
8559 path!("/root/.git").as_ref(),
8560 &[
8561 (".gitignore", "*.txt\n".into()),
8562 ("a.xml", "<a></a>".into()),
8563 ],
8564 );
8565
8566 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
8567
8568 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8569 tree.flush_fs_events(cx).await;
8570 project
8571 .update(cx, |project, cx| project.git_scans_complete(cx))
8572 .await;
8573 cx.executor().run_until_parked();
8574
8575 let repository = project.read_with(cx, |project, cx| {
8576 project.repositories(cx).values().next().unwrap().clone()
8577 });
8578
8579 // One file is unmodified, the other is ignored.
8580 cx.read(|cx| {
8581 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, false);
8582 assert_entry_git_state(tree.read(cx), repository.read(cx), "b.txt", None, true);
8583 });
8584
8585 // Change the gitignore, and stage the newly non-ignored file.
8586 fs.atomic_write(path!("/root/.gitignore").into(), "*.xml\n".into())
8587 .await
8588 .unwrap();
8589 fs.set_index_for_repo(
8590 Path::new(path!("/root/.git")),
8591 &[
8592 (".gitignore", "*.txt\n".into()),
8593 ("a.xml", "<a></a>".into()),
8594 ("b.txt", "Some text".into()),
8595 ],
8596 );
8597
8598 cx.executor().run_until_parked();
8599 cx.read(|cx| {
8600 assert_entry_git_state(tree.read(cx), repository.read(cx), "a.xml", None, true);
8601 assert_entry_git_state(
8602 tree.read(cx),
8603 repository.read(cx),
8604 "b.txt",
8605 Some(StatusCode::Added),
8606 false,
8607 );
8608 });
8609}
8610
8611// NOTE:
8612// This test always fails on Windows, because on Windows, unlike on Unix, you can't rename
8613// a directory which some program has already open.
8614// This is a limitation of the Windows.
8615// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8616// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8617#[gpui::test]
8618#[cfg_attr(target_os = "windows", ignore)]
8619async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
8620 init_test(cx);
8621 cx.executor().allow_parking();
8622 let root = TempTree::new(json!({
8623 "projects": {
8624 "project1": {
8625 "a": "",
8626 "b": "",
8627 }
8628 },
8629
8630 }));
8631 let root_path = root.path();
8632
8633 let repo = git_init(&root_path.join("projects/project1"));
8634 git_add("a", &repo);
8635 git_commit("init", &repo);
8636 std::fs::write(root_path.join("projects/project1/a"), "aa").unwrap();
8637
8638 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8639
8640 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8641 tree.flush_fs_events(cx).await;
8642 project
8643 .update(cx, |project, cx| project.git_scans_complete(cx))
8644 .await;
8645 cx.executor().run_until_parked();
8646
8647 let repository = project.read_with(cx, |project, cx| {
8648 project.repositories(cx).values().next().unwrap().clone()
8649 });
8650
8651 repository.read_with(cx, |repository, _| {
8652 assert_eq!(
8653 repository.work_directory_abs_path.as_ref(),
8654 root_path.join("projects/project1").as_path()
8655 );
8656 assert_eq!(
8657 repository
8658 .status_for_path(&repo_path("a"))
8659 .map(|entry| entry.status),
8660 Some(StatusCode::Modified.worktree()),
8661 );
8662 assert_eq!(
8663 repository
8664 .status_for_path(&repo_path("b"))
8665 .map(|entry| entry.status),
8666 Some(FileStatus::Untracked),
8667 );
8668 });
8669
8670 std::fs::rename(
8671 root_path.join("projects/project1"),
8672 root_path.join("projects/project2"),
8673 )
8674 .unwrap();
8675 tree.flush_fs_events(cx).await;
8676
8677 repository.read_with(cx, |repository, _| {
8678 assert_eq!(
8679 repository.work_directory_abs_path.as_ref(),
8680 root_path.join("projects/project2").as_path()
8681 );
8682 assert_eq!(
8683 repository.status_for_path(&repo_path("a")).unwrap().status,
8684 StatusCode::Modified.worktree(),
8685 );
8686 assert_eq!(
8687 repository.status_for_path(&repo_path("b")).unwrap().status,
8688 FileStatus::Untracked,
8689 );
8690 });
8691}
8692
8693// NOTE: This test always fails on Windows, because on Windows, unlike on Unix,
8694// you can't rename a directory which some program has already open. This is a
8695// limitation of the Windows. See:
8696// See: https://stackoverflow.com/questions/41365318/access-is-denied-when-renaming-folder
8697// See: https://learn.microsoft.com/en-us/windows-hardware/drivers/ddi/ntifs/ns-ntifs-_file_rename_information
8698#[gpui::test]
8699#[cfg_attr(target_os = "windows", ignore)]
8700async fn test_file_status(cx: &mut gpui::TestAppContext) {
8701 init_test(cx);
8702 cx.executor().allow_parking();
8703 const IGNORE_RULE: &str = "**/target";
8704
8705 let root = TempTree::new(json!({
8706 "project": {
8707 "a.txt": "a",
8708 "b.txt": "bb",
8709 "c": {
8710 "d": {
8711 "e.txt": "eee"
8712 }
8713 },
8714 "f.txt": "ffff",
8715 "target": {
8716 "build_file": "???"
8717 },
8718 ".gitignore": IGNORE_RULE
8719 },
8720
8721 }));
8722 let root_path = root.path();
8723
8724 const A_TXT: &str = "a.txt";
8725 const B_TXT: &str = "b.txt";
8726 const E_TXT: &str = "c/d/e.txt";
8727 const F_TXT: &str = "f.txt";
8728 const DOTGITIGNORE: &str = ".gitignore";
8729 const BUILD_FILE: &str = "target/build_file";
8730
8731 // Set up git repository before creating the worktree.
8732 let work_dir = root.path().join("project");
8733 let mut repo = git_init(work_dir.as_path());
8734 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8735 git_add(A_TXT, &repo);
8736 git_add(E_TXT, &repo);
8737 git_add(DOTGITIGNORE, &repo);
8738 git_commit("Initial commit", &repo);
8739
8740 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8741
8742 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8743 tree.flush_fs_events(cx).await;
8744 project
8745 .update(cx, |project, cx| project.git_scans_complete(cx))
8746 .await;
8747 cx.executor().run_until_parked();
8748
8749 let repository = project.read_with(cx, |project, cx| {
8750 project.repositories(cx).values().next().unwrap().clone()
8751 });
8752
8753 // Check that the right git state is observed on startup
8754 repository.read_with(cx, |repository, _cx| {
8755 assert_eq!(
8756 repository.work_directory_abs_path.as_ref(),
8757 root_path.join("project").as_path()
8758 );
8759
8760 assert_eq!(
8761 repository
8762 .status_for_path(&repo_path(B_TXT))
8763 .unwrap()
8764 .status,
8765 FileStatus::Untracked,
8766 );
8767 assert_eq!(
8768 repository
8769 .status_for_path(&repo_path(F_TXT))
8770 .unwrap()
8771 .status,
8772 FileStatus::Untracked,
8773 );
8774 });
8775
8776 // Modify a file in the working copy.
8777 std::fs::write(work_dir.join(A_TXT), "aa").unwrap();
8778 tree.flush_fs_events(cx).await;
8779 project
8780 .update(cx, |project, cx| project.git_scans_complete(cx))
8781 .await;
8782 cx.executor().run_until_parked();
8783
8784 // The worktree detects that the file's git status has changed.
8785 repository.read_with(cx, |repository, _| {
8786 assert_eq!(
8787 repository
8788 .status_for_path(&repo_path(A_TXT))
8789 .unwrap()
8790 .status,
8791 StatusCode::Modified.worktree(),
8792 );
8793 });
8794
8795 // Create a commit in the git repository.
8796 git_add(A_TXT, &repo);
8797 git_add(B_TXT, &repo);
8798 git_commit("Committing modified and added", &repo);
8799 tree.flush_fs_events(cx).await;
8800 project
8801 .update(cx, |project, cx| project.git_scans_complete(cx))
8802 .await;
8803 cx.executor().run_until_parked();
8804
8805 // The worktree detects that the files' git status have changed.
8806 repository.read_with(cx, |repository, _cx| {
8807 assert_eq!(
8808 repository
8809 .status_for_path(&repo_path(F_TXT))
8810 .unwrap()
8811 .status,
8812 FileStatus::Untracked,
8813 );
8814 assert_eq!(repository.status_for_path(&repo_path(B_TXT)), None);
8815 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8816 });
8817
8818 // Modify files in the working copy and perform git operations on other files.
8819 git_reset(0, &repo);
8820 git_remove_index(Path::new(B_TXT), &repo);
8821 git_stash(&mut repo);
8822 std::fs::write(work_dir.join(E_TXT), "eeee").unwrap();
8823 std::fs::write(work_dir.join(BUILD_FILE), "this should be ignored").unwrap();
8824 tree.flush_fs_events(cx).await;
8825 project
8826 .update(cx, |project, cx| project.git_scans_complete(cx))
8827 .await;
8828 cx.executor().run_until_parked();
8829
8830 // Check that more complex repo changes are tracked
8831 repository.read_with(cx, |repository, _cx| {
8832 assert_eq!(repository.status_for_path(&repo_path(A_TXT)), None);
8833 assert_eq!(
8834 repository
8835 .status_for_path(&repo_path(B_TXT))
8836 .unwrap()
8837 .status,
8838 FileStatus::Untracked,
8839 );
8840 assert_eq!(
8841 repository
8842 .status_for_path(&repo_path(E_TXT))
8843 .unwrap()
8844 .status,
8845 StatusCode::Modified.worktree(),
8846 );
8847 });
8848
8849 std::fs::remove_file(work_dir.join(B_TXT)).unwrap();
8850 std::fs::remove_dir_all(work_dir.join("c")).unwrap();
8851 std::fs::write(
8852 work_dir.join(DOTGITIGNORE),
8853 [IGNORE_RULE, "f.txt"].join("\n"),
8854 )
8855 .unwrap();
8856
8857 git_add(Path::new(DOTGITIGNORE), &repo);
8858 git_commit("Committing modified git ignore", &repo);
8859
8860 tree.flush_fs_events(cx).await;
8861 cx.executor().run_until_parked();
8862
8863 let mut renamed_dir_name = "first_directory/second_directory";
8864 const RENAMED_FILE: &str = "rf.txt";
8865
8866 std::fs::create_dir_all(work_dir.join(renamed_dir_name)).unwrap();
8867 std::fs::write(
8868 work_dir.join(renamed_dir_name).join(RENAMED_FILE),
8869 "new-contents",
8870 )
8871 .unwrap();
8872
8873 tree.flush_fs_events(cx).await;
8874 project
8875 .update(cx, |project, cx| project.git_scans_complete(cx))
8876 .await;
8877 cx.executor().run_until_parked();
8878
8879 repository.read_with(cx, |repository, _cx| {
8880 assert_eq!(
8881 repository
8882 .status_for_path(
8883 &rel_path(renamed_dir_name)
8884 .join(rel_path(RENAMED_FILE))
8885 .into()
8886 )
8887 .unwrap()
8888 .status,
8889 FileStatus::Untracked,
8890 );
8891 });
8892
8893 renamed_dir_name = "new_first_directory/second_directory";
8894
8895 std::fs::rename(
8896 work_dir.join("first_directory"),
8897 work_dir.join("new_first_directory"),
8898 )
8899 .unwrap();
8900
8901 tree.flush_fs_events(cx).await;
8902 project
8903 .update(cx, |project, cx| project.git_scans_complete(cx))
8904 .await;
8905 cx.executor().run_until_parked();
8906
8907 repository.read_with(cx, |repository, _cx| {
8908 assert_eq!(
8909 repository
8910 .status_for_path(
8911 &rel_path(renamed_dir_name)
8912 .join(rel_path(RENAMED_FILE))
8913 .into()
8914 )
8915 .unwrap()
8916 .status,
8917 FileStatus::Untracked,
8918 );
8919 });
8920}
8921
8922#[gpui::test]
8923#[ignore]
8924async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
8925 init_test(cx);
8926 cx.executor().allow_parking();
8927
8928 const IGNORE_RULE: &str = "**/target";
8929
8930 let root = TempTree::new(json!({
8931 "project": {
8932 "src": {
8933 "main.rs": "fn main() {}"
8934 },
8935 "target": {
8936 "debug": {
8937 "important_text.txt": "important text",
8938 },
8939 },
8940 ".gitignore": IGNORE_RULE
8941 },
8942
8943 }));
8944 let root_path = root.path();
8945
8946 // Set up git repository before creating the worktree.
8947 let work_dir = root.path().join("project");
8948 let repo = git_init(work_dir.as_path());
8949 repo.add_ignore_rule(IGNORE_RULE).unwrap();
8950 git_add("src/main.rs", &repo);
8951 git_add(".gitignore", &repo);
8952 git_commit("Initial commit", &repo);
8953
8954 let project = Project::test(Arc::new(RealFs::new(None, cx.executor())), [root_path], cx).await;
8955 let repository_updates = Arc::new(Mutex::new(Vec::new()));
8956 let project_events = Arc::new(Mutex::new(Vec::new()));
8957 project.update(cx, |project, cx| {
8958 let repo_events = repository_updates.clone();
8959 cx.subscribe(project.git_store(), move |_, _, e, _| {
8960 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
8961 repo_events.lock().push(e.clone());
8962 }
8963 })
8964 .detach();
8965 let project_events = project_events.clone();
8966 cx.subscribe_self(move |_, e, _| {
8967 if let Event::WorktreeUpdatedEntries(_, updates) = e {
8968 project_events.lock().extend(
8969 updates
8970 .iter()
8971 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
8972 .filter(|(path, _)| path != "fs-event-sentinel"),
8973 );
8974 }
8975 })
8976 .detach();
8977 });
8978
8979 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
8980 tree.flush_fs_events(cx).await;
8981 tree.update(cx, |tree, cx| {
8982 tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
8983 })
8984 .await
8985 .unwrap();
8986 tree.update(cx, |tree, _| {
8987 assert_eq!(
8988 tree.entries(true, 0)
8989 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
8990 .collect::<Vec<_>>(),
8991 vec![
8992 (rel_path(""), false),
8993 (rel_path("project/"), false),
8994 (rel_path("project/.gitignore"), false),
8995 (rel_path("project/src"), false),
8996 (rel_path("project/src/main.rs"), false),
8997 (rel_path("project/target"), true),
8998 (rel_path("project/target/debug"), true),
8999 (rel_path("project/target/debug/important_text.txt"), true),
9000 ]
9001 );
9002 });
9003
9004 assert_eq!(
9005 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9006 vec![
9007 RepositoryEvent::StatusesChanged { full_scan: true },
9008 RepositoryEvent::MergeHeadsChanged,
9009 ],
9010 "Initial worktree scan should produce a repo update event"
9011 );
9012 assert_eq!(
9013 project_events.lock().drain(..).collect::<Vec<_>>(),
9014 vec![
9015 ("project/target".to_string(), PathChange::Loaded),
9016 ("project/target/debug".to_string(), PathChange::Loaded),
9017 (
9018 "project/target/debug/important_text.txt".to_string(),
9019 PathChange::Loaded
9020 ),
9021 ],
9022 "Initial project changes should show that all not-ignored and all opened files are loaded"
9023 );
9024
9025 let deps_dir = work_dir.join("target").join("debug").join("deps");
9026 std::fs::create_dir_all(&deps_dir).unwrap();
9027 tree.flush_fs_events(cx).await;
9028 project
9029 .update(cx, |project, cx| project.git_scans_complete(cx))
9030 .await;
9031 cx.executor().run_until_parked();
9032 std::fs::write(deps_dir.join("aa.tmp"), "something tmp").unwrap();
9033 tree.flush_fs_events(cx).await;
9034 project
9035 .update(cx, |project, cx| project.git_scans_complete(cx))
9036 .await;
9037 cx.executor().run_until_parked();
9038 std::fs::remove_dir_all(&deps_dir).unwrap();
9039 tree.flush_fs_events(cx).await;
9040 project
9041 .update(cx, |project, cx| project.git_scans_complete(cx))
9042 .await;
9043 cx.executor().run_until_parked();
9044
9045 tree.update(cx, |tree, _| {
9046 assert_eq!(
9047 tree.entries(true, 0)
9048 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9049 .collect::<Vec<_>>(),
9050 vec![
9051 (rel_path(""), false),
9052 (rel_path("project/"), false),
9053 (rel_path("project/.gitignore"), false),
9054 (rel_path("project/src"), false),
9055 (rel_path("project/src/main.rs"), false),
9056 (rel_path("project/target"), true),
9057 (rel_path("project/target/debug"), true),
9058 (rel_path("project/target/debug/important_text.txt"), true),
9059 ],
9060 "No stray temp files should be left after the flycheck changes"
9061 );
9062 });
9063
9064 assert_eq!(
9065 repository_updates
9066 .lock()
9067 .iter()
9068 .cloned()
9069 .collect::<Vec<_>>(),
9070 Vec::new(),
9071 "No further RepositoryUpdated events should happen, as only ignored dirs' contents was changed",
9072 );
9073 assert_eq!(
9074 project_events.lock().as_slice(),
9075 vec![
9076 ("project/target/debug/deps".to_string(), PathChange::Added),
9077 ("project/target/debug/deps".to_string(), PathChange::Removed),
9078 ],
9079 "Due to `debug` directory being tracket, it should get updates for entries inside it.
9080 No updates for more nested directories should happen as those are ignored",
9081 );
9082}
9083
9084#[gpui::test]
9085async fn test_odd_events_for_ignored_dirs(
9086 executor: BackgroundExecutor,
9087 cx: &mut gpui::TestAppContext,
9088) {
9089 init_test(cx);
9090 let fs = FakeFs::new(executor);
9091 fs.insert_tree(
9092 path!("/root"),
9093 json!({
9094 ".git": {},
9095 ".gitignore": "**/target/",
9096 "src": {
9097 "main.rs": "fn main() {}",
9098 },
9099 "target": {
9100 "debug": {
9101 "foo.txt": "foo",
9102 "deps": {}
9103 }
9104 }
9105 }),
9106 )
9107 .await;
9108 fs.set_head_and_index_for_repo(
9109 path!("/root/.git").as_ref(),
9110 &[
9111 (".gitignore", "**/target/".into()),
9112 ("src/main.rs", "fn main() {}".into()),
9113 ],
9114 );
9115
9116 let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
9117 let repository_updates = Arc::new(Mutex::new(Vec::new()));
9118 let project_events = Arc::new(Mutex::new(Vec::new()));
9119 project.update(cx, |project, cx| {
9120 let repository_updates = repository_updates.clone();
9121 cx.subscribe(project.git_store(), move |_, _, e, _| {
9122 if let GitStoreEvent::RepositoryUpdated(_, e, _) = e {
9123 repository_updates.lock().push(e.clone());
9124 }
9125 })
9126 .detach();
9127 let project_events = project_events.clone();
9128 cx.subscribe_self(move |_, e, _| {
9129 if let Event::WorktreeUpdatedEntries(_, updates) = e {
9130 project_events.lock().extend(
9131 updates
9132 .iter()
9133 .map(|(path, _, change)| (path.as_unix_str().to_string(), *change))
9134 .filter(|(path, _)| path != "fs-event-sentinel"),
9135 );
9136 }
9137 })
9138 .detach();
9139 });
9140
9141 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9142 tree.update(cx, |tree, cx| {
9143 tree.load_file(rel_path("target/debug/foo.txt"), cx)
9144 })
9145 .await
9146 .unwrap();
9147 tree.flush_fs_events(cx).await;
9148 project
9149 .update(cx, |project, cx| project.git_scans_complete(cx))
9150 .await;
9151 cx.run_until_parked();
9152 tree.update(cx, |tree, _| {
9153 assert_eq!(
9154 tree.entries(true, 0)
9155 .map(|entry| (entry.path.as_ref(), entry.is_ignored))
9156 .collect::<Vec<_>>(),
9157 vec![
9158 (rel_path(""), false),
9159 (rel_path(".gitignore"), false),
9160 (rel_path("src"), false),
9161 (rel_path("src/main.rs"), false),
9162 (rel_path("target"), true),
9163 (rel_path("target/debug"), true),
9164 (rel_path("target/debug/deps"), true),
9165 (rel_path("target/debug/foo.txt"), true),
9166 ]
9167 );
9168 });
9169
9170 assert_eq!(
9171 repository_updates.lock().drain(..).collect::<Vec<_>>(),
9172 vec![
9173 RepositoryEvent::MergeHeadsChanged,
9174 RepositoryEvent::BranchChanged,
9175 RepositoryEvent::StatusesChanged { full_scan: false },
9176 RepositoryEvent::StatusesChanged { full_scan: false },
9177 ],
9178 "Initial worktree scan should produce a repo update event"
9179 );
9180 assert_eq!(
9181 project_events.lock().drain(..).collect::<Vec<_>>(),
9182 vec![
9183 ("target".to_string(), PathChange::Loaded),
9184 ("target/debug".to_string(), PathChange::Loaded),
9185 ("target/debug/deps".to_string(), PathChange::Loaded),
9186 ("target/debug/foo.txt".to_string(), PathChange::Loaded),
9187 ],
9188 "All non-ignored entries and all opened firs should be getting a project event",
9189 );
9190
9191 // Emulate a flycheck spawn: it emits a `INODE_META_MOD`-flagged FS event on target/debug/deps, then creates and removes temp files inside.
9192 // This may happen multiple times during a single flycheck, but once is enough for testing.
9193 fs.emit_fs_event("/root/target/debug/deps", None);
9194 tree.flush_fs_events(cx).await;
9195 project
9196 .update(cx, |project, cx| project.git_scans_complete(cx))
9197 .await;
9198 cx.executor().run_until_parked();
9199
9200 assert_eq!(
9201 repository_updates
9202 .lock()
9203 .iter()
9204 .cloned()
9205 .collect::<Vec<_>>(),
9206 Vec::new(),
9207 "No further RepositoryUpdated events should happen, as only ignored dirs received FS events",
9208 );
9209 assert_eq!(
9210 project_events.lock().as_slice(),
9211 Vec::new(),
9212 "No further project events should happen, as only ignored dirs received FS events",
9213 );
9214}
9215
9216#[gpui::test]
9217async fn test_repos_in_invisible_worktrees(
9218 executor: BackgroundExecutor,
9219 cx: &mut gpui::TestAppContext,
9220) {
9221 init_test(cx);
9222 let fs = FakeFs::new(executor);
9223 fs.insert_tree(
9224 path!("/root"),
9225 json!({
9226 "dir1": {
9227 ".git": {},
9228 "dep1": {
9229 ".git": {},
9230 "src": {
9231 "a.txt": "",
9232 },
9233 },
9234 "b.txt": "",
9235 },
9236 }),
9237 )
9238 .await;
9239
9240 let project = Project::test(fs.clone(), [path!("/root/dir1/dep1").as_ref()], cx).await;
9241 let _visible_worktree =
9242 project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9243 project
9244 .update(cx, |project, cx| project.git_scans_complete(cx))
9245 .await;
9246
9247 let repos = project.read_with(cx, |project, cx| {
9248 project
9249 .repositories(cx)
9250 .values()
9251 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9252 .collect::<Vec<_>>()
9253 });
9254 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9255
9256 let (_invisible_worktree, _) = project
9257 .update(cx, |project, cx| {
9258 project.worktree_store.update(cx, |worktree_store, cx| {
9259 worktree_store.find_or_create_worktree(path!("/root/dir1/b.txt"), false, cx)
9260 })
9261 })
9262 .await
9263 .expect("failed to create worktree");
9264 project
9265 .update(cx, |project, cx| project.git_scans_complete(cx))
9266 .await;
9267
9268 let repos = project.read_with(cx, |project, cx| {
9269 project
9270 .repositories(cx)
9271 .values()
9272 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9273 .collect::<Vec<_>>()
9274 });
9275 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/dir1/dep1")).into()]);
9276}
9277
9278#[gpui::test(iterations = 10)]
9279async fn test_rescan_with_gitignore(cx: &mut gpui::TestAppContext) {
9280 init_test(cx);
9281 cx.update(|cx| {
9282 cx.update_global::<SettingsStore, _>(|store, cx| {
9283 store.update_user_settings(cx, |settings| {
9284 settings.project.worktree.file_scan_exclusions = Some(Vec::new());
9285 });
9286 });
9287 });
9288 let fs = FakeFs::new(cx.background_executor.clone());
9289 fs.insert_tree(
9290 path!("/root"),
9291 json!({
9292 ".gitignore": "ancestor-ignored-file1\nancestor-ignored-file2\n",
9293 "tree": {
9294 ".git": {},
9295 ".gitignore": "ignored-dir\n",
9296 "tracked-dir": {
9297 "tracked-file1": "",
9298 "ancestor-ignored-file1": "",
9299 },
9300 "ignored-dir": {
9301 "ignored-file1": ""
9302 }
9303 }
9304 }),
9305 )
9306 .await;
9307 fs.set_head_and_index_for_repo(
9308 path!("/root/tree/.git").as_ref(),
9309 &[
9310 (".gitignore", "ignored-dir\n".into()),
9311 ("tracked-dir/tracked-file1", "".into()),
9312 ],
9313 );
9314
9315 let project = Project::test(fs.clone(), [path!("/root/tree").as_ref()], cx).await;
9316
9317 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9318 tree.flush_fs_events(cx).await;
9319 project
9320 .update(cx, |project, cx| project.git_scans_complete(cx))
9321 .await;
9322 cx.executor().run_until_parked();
9323
9324 let repository = project.read_with(cx, |project, cx| {
9325 project.repositories(cx).values().next().unwrap().clone()
9326 });
9327
9328 tree.read_with(cx, |tree, _| {
9329 tree.as_local()
9330 .unwrap()
9331 .manually_refresh_entries_for_paths(vec![rel_path("ignored-dir").into()])
9332 })
9333 .recv()
9334 .await;
9335
9336 cx.read(|cx| {
9337 assert_entry_git_state(
9338 tree.read(cx),
9339 repository.read(cx),
9340 "tracked-dir/tracked-file1",
9341 None,
9342 false,
9343 );
9344 assert_entry_git_state(
9345 tree.read(cx),
9346 repository.read(cx),
9347 "tracked-dir/ancestor-ignored-file1",
9348 None,
9349 false,
9350 );
9351 assert_entry_git_state(
9352 tree.read(cx),
9353 repository.read(cx),
9354 "ignored-dir/ignored-file1",
9355 None,
9356 true,
9357 );
9358 });
9359
9360 fs.create_file(
9361 path!("/root/tree/tracked-dir/tracked-file2").as_ref(),
9362 Default::default(),
9363 )
9364 .await
9365 .unwrap();
9366 fs.set_index_for_repo(
9367 path!("/root/tree/.git").as_ref(),
9368 &[
9369 (".gitignore", "ignored-dir\n".into()),
9370 ("tracked-dir/tracked-file1", "".into()),
9371 ("tracked-dir/tracked-file2", "".into()),
9372 ],
9373 );
9374 fs.create_file(
9375 path!("/root/tree/tracked-dir/ancestor-ignored-file2").as_ref(),
9376 Default::default(),
9377 )
9378 .await
9379 .unwrap();
9380 fs.create_file(
9381 path!("/root/tree/ignored-dir/ignored-file2").as_ref(),
9382 Default::default(),
9383 )
9384 .await
9385 .unwrap();
9386
9387 cx.executor().run_until_parked();
9388 cx.read(|cx| {
9389 assert_entry_git_state(
9390 tree.read(cx),
9391 repository.read(cx),
9392 "tracked-dir/tracked-file2",
9393 Some(StatusCode::Added),
9394 false,
9395 );
9396 assert_entry_git_state(
9397 tree.read(cx),
9398 repository.read(cx),
9399 "tracked-dir/ancestor-ignored-file2",
9400 None,
9401 false,
9402 );
9403 assert_entry_git_state(
9404 tree.read(cx),
9405 repository.read(cx),
9406 "ignored-dir/ignored-file2",
9407 None,
9408 true,
9409 );
9410 assert!(
9411 tree.read(cx)
9412 .entry_for_path(&rel_path(".git"))
9413 .unwrap()
9414 .is_ignored
9415 );
9416 });
9417}
9418
9419#[gpui::test]
9420async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
9421 init_test(cx);
9422
9423 let fs = FakeFs::new(cx.executor());
9424 fs.insert_tree(
9425 path!("/project"),
9426 json!({
9427 ".git": {
9428 "worktrees": {
9429 "some-worktree": {
9430 "commondir": "../..\n",
9431 // For is_git_dir
9432 "HEAD": "",
9433 "config": ""
9434 }
9435 },
9436 "modules": {
9437 "subdir": {
9438 "some-submodule": {
9439 // For is_git_dir
9440 "HEAD": "",
9441 "config": "",
9442 }
9443 }
9444 }
9445 },
9446 "src": {
9447 "a.txt": "A",
9448 },
9449 "some-worktree": {
9450 ".git": "gitdir: ../.git/worktrees/some-worktree\n",
9451 "src": {
9452 "b.txt": "B",
9453 }
9454 },
9455 "subdir": {
9456 "some-submodule": {
9457 ".git": "gitdir: ../../.git/modules/subdir/some-submodule\n",
9458 "c.txt": "C",
9459 }
9460 }
9461 }),
9462 )
9463 .await;
9464
9465 let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
9466 let scan_complete = project.update(cx, |project, cx| project.git_scans_complete(cx));
9467 scan_complete.await;
9468
9469 let mut repositories = project.update(cx, |project, cx| {
9470 project
9471 .repositories(cx)
9472 .values()
9473 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9474 .collect::<Vec<_>>()
9475 });
9476 repositories.sort();
9477 pretty_assertions::assert_eq!(
9478 repositories,
9479 [
9480 Path::new(path!("/project")).into(),
9481 Path::new(path!("/project/some-worktree")).into(),
9482 Path::new(path!("/project/subdir/some-submodule")).into(),
9483 ]
9484 );
9485
9486 // Generate a git-related event for the worktree and check that it's refreshed.
9487 fs.with_git_state(
9488 path!("/project/some-worktree/.git").as_ref(),
9489 true,
9490 |state| {
9491 state
9492 .head_contents
9493 .insert(repo_path("src/b.txt"), "b".to_owned());
9494 state
9495 .index_contents
9496 .insert(repo_path("src/b.txt"), "b".to_owned());
9497 },
9498 )
9499 .unwrap();
9500 cx.run_until_parked();
9501
9502 let buffer = project
9503 .update(cx, |project, cx| {
9504 project.open_local_buffer(path!("/project/some-worktree/src/b.txt"), cx)
9505 })
9506 .await
9507 .unwrap();
9508 let (worktree_repo, barrier) = project.update(cx, |project, cx| {
9509 let (repo, _) = project
9510 .git_store()
9511 .read(cx)
9512 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9513 .unwrap();
9514 pretty_assertions::assert_eq!(
9515 repo.read(cx).work_directory_abs_path,
9516 Path::new(path!("/project/some-worktree")).into(),
9517 );
9518 let barrier = repo.update(cx, |repo, _| repo.barrier());
9519 (repo.clone(), barrier)
9520 });
9521 barrier.await.unwrap();
9522 worktree_repo.update(cx, |repo, _| {
9523 pretty_assertions::assert_eq!(
9524 repo.status_for_path(&repo_path("src/b.txt"))
9525 .unwrap()
9526 .status,
9527 StatusCode::Modified.worktree(),
9528 );
9529 });
9530
9531 // The same for the submodule.
9532 fs.with_git_state(
9533 path!("/project/subdir/some-submodule/.git").as_ref(),
9534 true,
9535 |state| {
9536 state
9537 .head_contents
9538 .insert(repo_path("c.txt"), "c".to_owned());
9539 state
9540 .index_contents
9541 .insert(repo_path("c.txt"), "c".to_owned());
9542 },
9543 )
9544 .unwrap();
9545 cx.run_until_parked();
9546
9547 let buffer = project
9548 .update(cx, |project, cx| {
9549 project.open_local_buffer(path!("/project/subdir/some-submodule/c.txt"), cx)
9550 })
9551 .await
9552 .unwrap();
9553 let (submodule_repo, barrier) = project.update(cx, |project, cx| {
9554 let (repo, _) = project
9555 .git_store()
9556 .read(cx)
9557 .repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
9558 .unwrap();
9559 pretty_assertions::assert_eq!(
9560 repo.read(cx).work_directory_abs_path,
9561 Path::new(path!("/project/subdir/some-submodule")).into(),
9562 );
9563 let barrier = repo.update(cx, |repo, _| repo.barrier());
9564 (repo.clone(), barrier)
9565 });
9566 barrier.await.unwrap();
9567 submodule_repo.update(cx, |repo, _| {
9568 pretty_assertions::assert_eq!(
9569 repo.status_for_path(&repo_path("c.txt")).unwrap().status,
9570 StatusCode::Modified.worktree(),
9571 );
9572 });
9573}
9574
9575#[gpui::test]
9576async fn test_repository_deduplication(cx: &mut gpui::TestAppContext) {
9577 init_test(cx);
9578 let fs = FakeFs::new(cx.background_executor.clone());
9579 fs.insert_tree(
9580 path!("/root"),
9581 json!({
9582 "project": {
9583 ".git": {},
9584 "child1": {
9585 "a.txt": "A",
9586 },
9587 "child2": {
9588 "b.txt": "B",
9589 }
9590 }
9591 }),
9592 )
9593 .await;
9594
9595 let project = Project::test(
9596 fs.clone(),
9597 [
9598 path!("/root/project/child1").as_ref(),
9599 path!("/root/project/child2").as_ref(),
9600 ],
9601 cx,
9602 )
9603 .await;
9604
9605 let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
9606 tree.flush_fs_events(cx).await;
9607 project
9608 .update(cx, |project, cx| project.git_scans_complete(cx))
9609 .await;
9610 cx.executor().run_until_parked();
9611
9612 let repos = project.read_with(cx, |project, cx| {
9613 project
9614 .repositories(cx)
9615 .values()
9616 .map(|repo| repo.read(cx).work_directory_abs_path.clone())
9617 .collect::<Vec<_>>()
9618 });
9619 pretty_assertions::assert_eq!(repos, [Path::new(path!("/root/project")).into()]);
9620}
9621
9622async fn search(
9623 project: &Entity<Project>,
9624 query: SearchQuery,
9625 cx: &mut gpui::TestAppContext,
9626) -> Result<HashMap<String, Vec<Range<usize>>>> {
9627 let search_rx = project.update(cx, |project, cx| project.search(query, cx));
9628 let mut results = HashMap::default();
9629 while let Ok(search_result) = search_rx.recv().await {
9630 match search_result {
9631 SearchResult::Buffer { buffer, ranges } => {
9632 results.entry(buffer).or_insert(ranges);
9633 }
9634 SearchResult::LimitReached => {}
9635 }
9636 }
9637 Ok(results
9638 .into_iter()
9639 .map(|(buffer, ranges)| {
9640 buffer.update(cx, |buffer, cx| {
9641 let path = buffer
9642 .file()
9643 .unwrap()
9644 .full_path(cx)
9645 .to_string_lossy()
9646 .to_string();
9647 let ranges = ranges
9648 .into_iter()
9649 .map(|range| range.to_offset(buffer))
9650 .collect::<Vec<_>>();
9651 (path, ranges)
9652 })
9653 })
9654 .collect())
9655}
9656
9657pub fn init_test(cx: &mut gpui::TestAppContext) {
9658 zlog::init_test();
9659
9660 cx.update(|cx| {
9661 let settings_store = SettingsStore::test(cx);
9662 cx.set_global(settings_store);
9663 release_channel::init(SemanticVersion::default(), cx);
9664 language::init(cx);
9665 Project::init_settings(cx);
9666 });
9667}
9668
9669fn json_lang() -> Arc<Language> {
9670 Arc::new(Language::new(
9671 LanguageConfig {
9672 name: "JSON".into(),
9673 matcher: LanguageMatcher {
9674 path_suffixes: vec!["json".to_string()],
9675 ..Default::default()
9676 },
9677 ..Default::default()
9678 },
9679 None,
9680 ))
9681}
9682
9683fn js_lang() -> Arc<Language> {
9684 Arc::new(Language::new(
9685 LanguageConfig {
9686 name: "JavaScript".into(),
9687 matcher: LanguageMatcher {
9688 path_suffixes: vec!["js".to_string()],
9689 ..Default::default()
9690 },
9691 ..Default::default()
9692 },
9693 None,
9694 ))
9695}
9696
9697fn rust_lang() -> Arc<Language> {
9698 Arc::new(Language::new(
9699 LanguageConfig {
9700 name: "Rust".into(),
9701 matcher: LanguageMatcher {
9702 path_suffixes: vec!["rs".to_string()],
9703 ..Default::default()
9704 },
9705 ..Default::default()
9706 },
9707 Some(tree_sitter_rust::LANGUAGE.into()),
9708 ))
9709}
9710
9711fn python_lang(fs: Arc<FakeFs>) -> Arc<Language> {
9712 struct PythonMootToolchainLister(Arc<FakeFs>);
9713 #[async_trait]
9714 impl ToolchainLister for PythonMootToolchainLister {
9715 async fn list(
9716 &self,
9717 worktree_root: PathBuf,
9718 subroot_relative_path: Arc<RelPath>,
9719 _: Option<HashMap<String, String>>,
9720 _: &dyn Fs,
9721 ) -> ToolchainList {
9722 // This lister will always return a path .venv directories within ancestors
9723 let ancestors = subroot_relative_path.ancestors().collect::<Vec<_>>();
9724 let mut toolchains = vec![];
9725 for ancestor in ancestors {
9726 let venv_path = worktree_root.join(ancestor.as_std_path()).join(".venv");
9727 if self.0.is_dir(&venv_path).await {
9728 toolchains.push(Toolchain {
9729 name: SharedString::new("Python Venv"),
9730 path: venv_path.to_string_lossy().into_owned().into(),
9731 language_name: LanguageName(SharedString::new_static("Python")),
9732 as_json: serde_json::Value::Null,
9733 })
9734 }
9735 }
9736 ToolchainList {
9737 toolchains,
9738 ..Default::default()
9739 }
9740 }
9741 async fn resolve(
9742 &self,
9743 _: PathBuf,
9744 _: Option<HashMap<String, String>>,
9745 _: &dyn Fs,
9746 ) -> anyhow::Result<Toolchain> {
9747 Err(anyhow::anyhow!("Not implemented"))
9748 }
9749 fn meta(&self) -> ToolchainMetadata {
9750 ToolchainMetadata {
9751 term: SharedString::new_static("Virtual Environment"),
9752 new_toolchain_placeholder: SharedString::new_static(
9753 "A path to the python3 executable within a virtual environment, or path to virtual environment itself",
9754 ),
9755 manifest_name: ManifestName::from(SharedString::new_static("pyproject.toml")),
9756 }
9757 }
9758 fn activation_script(&self, _: &Toolchain, _: ShellKind) -> Vec<String> {
9759 vec![]
9760 }
9761 }
9762 Arc::new(
9763 Language::new(
9764 LanguageConfig {
9765 name: "Python".into(),
9766 matcher: LanguageMatcher {
9767 path_suffixes: vec!["py".to_string()],
9768 ..Default::default()
9769 },
9770 ..Default::default()
9771 },
9772 None, // We're not testing Python parsing with this language.
9773 )
9774 .with_manifest(Some(ManifestName::from(SharedString::new_static(
9775 "pyproject.toml",
9776 ))))
9777 .with_toolchain_lister(Some(Arc::new(PythonMootToolchainLister(fs)))),
9778 )
9779}
9780
9781fn typescript_lang() -> Arc<Language> {
9782 Arc::new(Language::new(
9783 LanguageConfig {
9784 name: "TypeScript".into(),
9785 matcher: LanguageMatcher {
9786 path_suffixes: vec!["ts".to_string()],
9787 ..Default::default()
9788 },
9789 ..Default::default()
9790 },
9791 Some(tree_sitter_typescript::LANGUAGE_TYPESCRIPT.into()),
9792 ))
9793}
9794
9795fn tsx_lang() -> Arc<Language> {
9796 Arc::new(Language::new(
9797 LanguageConfig {
9798 name: "tsx".into(),
9799 matcher: LanguageMatcher {
9800 path_suffixes: vec!["tsx".to_string()],
9801 ..Default::default()
9802 },
9803 ..Default::default()
9804 },
9805 Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
9806 ))
9807}
9808
9809fn get_all_tasks(
9810 project: &Entity<Project>,
9811 task_contexts: Arc<TaskContexts>,
9812 cx: &mut App,
9813) -> Task<Vec<(TaskSourceKind, ResolvedTask)>> {
9814 let new_tasks = project.update(cx, |project, cx| {
9815 project.task_store.update(cx, |task_store, cx| {
9816 task_store.task_inventory().unwrap().update(cx, |this, cx| {
9817 this.used_and_current_resolved_tasks(task_contexts, cx)
9818 })
9819 })
9820 });
9821
9822 cx.background_spawn(async move {
9823 let (mut old, new) = new_tasks.await;
9824 old.extend(new);
9825 old
9826 })
9827}
9828
9829#[track_caller]
9830fn assert_entry_git_state(
9831 tree: &Worktree,
9832 repository: &Repository,
9833 path: &str,
9834 index_status: Option<StatusCode>,
9835 is_ignored: bool,
9836) {
9837 assert_eq!(tree.abs_path(), repository.work_directory_abs_path);
9838 let entry = tree
9839 .entry_for_path(&rel_path(path))
9840 .unwrap_or_else(|| panic!("entry {path} not found"));
9841 let status = repository
9842 .status_for_path(&repo_path(path))
9843 .map(|entry| entry.status);
9844 let expected = index_status.map(|index_status| {
9845 TrackedStatus {
9846 index_status,
9847 worktree_status: StatusCode::Unmodified,
9848 }
9849 .into()
9850 });
9851 assert_eq!(
9852 status, expected,
9853 "expected {path} to have git status: {expected:?}"
9854 );
9855 assert_eq!(
9856 entry.is_ignored, is_ignored,
9857 "expected {path} to have is_ignored: {is_ignored}"
9858 );
9859}
9860
9861#[track_caller]
9862fn git_init(path: &Path) -> git2::Repository {
9863 let mut init_opts = RepositoryInitOptions::new();
9864 init_opts.initial_head("main");
9865 git2::Repository::init_opts(path, &init_opts).expect("Failed to initialize git repository")
9866}
9867
9868#[track_caller]
9869fn git_add<P: AsRef<Path>>(path: P, repo: &git2::Repository) {
9870 let path = path.as_ref();
9871 let mut index = repo.index().expect("Failed to get index");
9872 index.add_path(path).expect("Failed to add file");
9873 index.write().expect("Failed to write index");
9874}
9875
9876#[track_caller]
9877fn git_remove_index(path: &Path, repo: &git2::Repository) {
9878 let mut index = repo.index().expect("Failed to get index");
9879 index.remove_path(path).expect("Failed to add file");
9880 index.write().expect("Failed to write index");
9881}
9882
9883#[track_caller]
9884fn git_commit(msg: &'static str, repo: &git2::Repository) {
9885 use git2::Signature;
9886
9887 let signature = Signature::now("test", "test@zed.dev").unwrap();
9888 let oid = repo.index().unwrap().write_tree().unwrap();
9889 let tree = repo.find_tree(oid).unwrap();
9890 if let Ok(head) = repo.head() {
9891 let parent_obj = head.peel(git2::ObjectType::Commit).unwrap();
9892
9893 let parent_commit = parent_obj.as_commit().unwrap();
9894
9895 repo.commit(
9896 Some("HEAD"),
9897 &signature,
9898 &signature,
9899 msg,
9900 &tree,
9901 &[parent_commit],
9902 )
9903 .expect("Failed to commit with parent");
9904 } else {
9905 repo.commit(Some("HEAD"), &signature, &signature, msg, &tree, &[])
9906 .expect("Failed to commit");
9907 }
9908}
9909
9910#[cfg(any())]
9911#[track_caller]
9912fn git_cherry_pick(commit: &git2::Commit<'_>, repo: &git2::Repository) {
9913 repo.cherrypick(commit, None).expect("Failed to cherrypick");
9914}
9915
9916#[track_caller]
9917fn git_stash(repo: &mut git2::Repository) {
9918 use git2::Signature;
9919
9920 let signature = Signature::now("test", "test@zed.dev").unwrap();
9921 repo.stash_save(&signature, "N/A", None)
9922 .expect("Failed to stash");
9923}
9924
9925#[track_caller]
9926fn git_reset(offset: usize, repo: &git2::Repository) {
9927 let head = repo.head().expect("Couldn't get repo head");
9928 let object = head.peel(git2::ObjectType::Commit).unwrap();
9929 let commit = object.as_commit().unwrap();
9930 let new_head = commit
9931 .parents()
9932 .inspect(|parnet| {
9933 parnet.message();
9934 })
9935 .nth(offset)
9936 .expect("Not enough history");
9937 repo.reset(new_head.as_object(), git2::ResetType::Soft, None)
9938 .expect("Could not reset");
9939}
9940
9941#[cfg(any())]
9942#[track_caller]
9943fn git_branch(name: &str, repo: &git2::Repository) {
9944 let head = repo
9945 .head()
9946 .expect("Couldn't get repo head")
9947 .peel_to_commit()
9948 .expect("HEAD is not a commit");
9949 repo.branch(name, &head, false).expect("Failed to commit");
9950}
9951
9952#[cfg(any())]
9953#[track_caller]
9954fn git_checkout(name: &str, repo: &git2::Repository) {
9955 repo.set_head(name).expect("Failed to set head");
9956 repo.checkout_head(None).expect("Failed to check out head");
9957}
9958
9959#[cfg(any())]
9960#[track_caller]
9961fn git_status(repo: &git2::Repository) -> collections::HashMap<String, git2::Status> {
9962 repo.statuses(None)
9963 .unwrap()
9964 .iter()
9965 .map(|status| (status.path().unwrap().to_string(), status.status()))
9966 .collect()
9967}
9968
9969#[gpui::test]
9970async fn test_find_project_path_abs(
9971 background_executor: BackgroundExecutor,
9972 cx: &mut gpui::TestAppContext,
9973) {
9974 // find_project_path should work with absolute paths
9975 init_test(cx);
9976
9977 let fs = FakeFs::new(background_executor);
9978 fs.insert_tree(
9979 path!("/root"),
9980 json!({
9981 "project1": {
9982 "file1.txt": "content1",
9983 "subdir": {
9984 "file2.txt": "content2"
9985 }
9986 },
9987 "project2": {
9988 "file3.txt": "content3"
9989 }
9990 }),
9991 )
9992 .await;
9993
9994 let project = Project::test(
9995 fs.clone(),
9996 [
9997 path!("/root/project1").as_ref(),
9998 path!("/root/project2").as_ref(),
9999 ],
10000 cx,
10001 )
10002 .await;
10003
10004 // Make sure the worktrees are fully initialized
10005 project
10006 .update(cx, |project, cx| project.git_scans_complete(cx))
10007 .await;
10008 cx.run_until_parked();
10009
10010 let (project1_abs_path, project1_id, project2_abs_path, project2_id) =
10011 project.read_with(cx, |project, cx| {
10012 let worktrees: Vec<_> = project.worktrees(cx).collect();
10013 let abs_path1 = worktrees[0].read(cx).abs_path().to_path_buf();
10014 let id1 = worktrees[0].read(cx).id();
10015 let abs_path2 = worktrees[1].read(cx).abs_path().to_path_buf();
10016 let id2 = worktrees[1].read(cx).id();
10017 (abs_path1, id1, abs_path2, id2)
10018 });
10019
10020 project.update(cx, |project, cx| {
10021 let abs_path = project1_abs_path.join("file1.txt");
10022 let found_path = project.find_project_path(abs_path, cx).unwrap();
10023 assert_eq!(found_path.worktree_id, project1_id);
10024 assert_eq!(&*found_path.path, rel_path("file1.txt"));
10025
10026 let abs_path = project1_abs_path.join("subdir").join("file2.txt");
10027 let found_path = project.find_project_path(abs_path, cx).unwrap();
10028 assert_eq!(found_path.worktree_id, project1_id);
10029 assert_eq!(&*found_path.path, rel_path("subdir/file2.txt"));
10030
10031 let abs_path = project2_abs_path.join("file3.txt");
10032 let found_path = project.find_project_path(abs_path, cx).unwrap();
10033 assert_eq!(found_path.worktree_id, project2_id);
10034 assert_eq!(&*found_path.path, rel_path("file3.txt"));
10035
10036 let abs_path = project1_abs_path.join("nonexistent.txt");
10037 let found_path = project.find_project_path(abs_path, cx);
10038 assert!(
10039 found_path.is_some(),
10040 "Should find project path for nonexistent file in worktree"
10041 );
10042
10043 // Test with an absolute path outside any worktree
10044 let abs_path = Path::new("/some/other/path");
10045 let found_path = project.find_project_path(abs_path, cx);
10046 assert!(
10047 found_path.is_none(),
10048 "Should not find project path for path outside any worktree"
10049 );
10050 });
10051}